import org.opendaylight.controller.sal.core.ComponentActivatorAbstractBase;
import org.opendaylight.controller.sal.core.IContainer;
import org.opendaylight.controller.sal.core.IContainerListener;
+import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerListener;
import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerService;
import org.opendaylight.controller.sal.utils.GlobalConstants;
import org.opendaylight.controller.switchmanager.IInventoryListener;
IForwardingRulesManager.class.getName(),
IInventoryListener.class.getName(),
ICacheUpdateAware.class.getName(),
- IConfigurationContainerAware.class.getName() };
+ IConfigurationContainerAware.class.getName(),
+ IFlowProgrammerListener.class.getName()};
} else {
interfaces = new String[] {
ISwitchManagerAware.class.getName(),
IForwardingRulesManager.class.getName(),
IInventoryListener.class.getName(),
ICacheUpdateAware.class.getName(),
- IConfigurationContainerAware.class.getName() };
+ IConfigurationContainerAware.class.getName(),
+ IFlowProgrammerListener.class.getName()};
}
c.setInterface(interfaces, props);
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import org.opendaylight.controller.sal.core.Property;
import org.opendaylight.controller.sal.core.UpdateType;
import org.opendaylight.controller.sal.flowprogrammer.Flow;
+import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerListener;
import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerService;
import org.opendaylight.controller.sal.match.Match;
import org.opendaylight.controller.sal.match.MatchType;
public class ForwardingRulesManagerImpl implements IForwardingRulesManager,
PortGroupChangeListener, IContainerListener, ISwitchManagerAware,
IConfigurationContainerAware, IInventoryListener, IObjectReader,
- ICacheUpdateAware<Long, String>, CommandProvider {
+ ICacheUpdateAware<Long, String>, CommandProvider,
+ IFlowProgrammerListener {
private static final String SAVE = "Save";
private static final String NODEDOWN = "Node is Down";
private static final Logger log = LoggerFactory
private boolean inContainerMode; // being used by default instance only
/*
* Flow database. It's the software view of what was installed on the
- * switch. It is indexed by node. For convenience a version indexed
- * by group name is also maintained. The core element is a class which
- * contains the flow entry pushed by the functional modules and the
- * respective container flow merged version. In absence of container
- * flows, the two flow entries are the same.
+ * switch. It is indexed by node. For convenience a version indexed by group
+ * name is also maintained. The core element is a class which contains the
+ * flow entry pushed by the functional modules and the respective container
+ * flow merged version. In absence of container flows, the two flow entries
+ * are the same.
*/
private ConcurrentMap<Node, Set<FlowEntryInstall>> nodeFlows;
private ConcurrentMap<String, Set<FlowEntryInstall>> groupFlows;
/*
- * Inactive flow list. This is for the global instance of FRM
- * It will contain all the flow entries which were installed on the
- * global container when the first container is created.
+ * Inactive flow list. This is for the global instance of FRM It will
+ * contain all the flow entries which were installed on the global container
+ * when the first container is created.
*/
private List<FlowEntry> inactiveFlows;
private ISwitchManager switchManager;
/**
- * Adds a flow entry onto the network node
- * It runs various validity checks and derive the final container flows
- * merged entries that will be attempted to be installed
- *
- * @param flowEntry the original flow entry application requested to add
+ * Adds a flow entry onto the network node It runs various validity checks
+ * and derive the final container flows merged entries that will be
+ * attempted to be installed
+ *
+ * @param flowEntry
+ * the original flow entry application requested to add
* @return
*/
private Status addEntry(FlowEntry flowEntry) {
// Sanity Check
if (flowEntry == null || flowEntry.getNode() == null) {
- String msg = "Invalid FlowEntry";
+ String msg = "Invalid FlowEntry";
log.warn(msg + ": " + flowEntry);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
/*
- * Derive the container flow merged entries to install
- * In presence of N container flows, we may end up with
- * N different entries to install...
+ * Derive the container flow merged entries to install In presence of N
+ * container flows, we may end up with N different entries to install...
*/
- List<FlowEntryInstall> toInstallList = deriveInstallEntries(flowEntry
- .clone(), container.getContainerFlows());
+ List<FlowEntryInstall> toInstallList = deriveInstallEntries(
+ flowEntry.clone(), container.getContainerFlows());
// Container Flow conflict Check
if (toInstallList.isEmpty()) {
- String msg = "Flow Entry conflicts with all Container Flows";
- log.warn(msg);
+ String msg = "Flow Entry conflicts with all Container Flows";
+ log.warn(msg);
return new Status(StatusCode.CONFLICT, msg);
}
// Derive the list of entries good to be installed
List<FlowEntryInstall> toInstallSafe = new ArrayList<FlowEntryInstall>();
for (FlowEntryInstall entry : toInstallList) {
- // Conflict Check: Verify new entry would not overwrite existing ones
+ // Conflict Check: Verify new entry would not overwrite existing
+ // ones
if (findMatch(entry.getInstall(), false) != null) {
- log.warn("Operation Rejected: A flow with same match " +
- "and priority exists on the target node");
+ log.warn("Operation Rejected: A flow with same match "
+ + "and priority exists on the target node");
log.trace("Aborting to install " + entry);
continue;
}
toInstallSafe.add(entry);
}
- // Declare failure if all the container flow merged entries clash with existing entries
+ // Declare failure if all the container flow merged entries clash with
+ // existing entries
if (toInstallSafe.size() == 0) {
- String msg = "A flow with same match and priority exists " +
- "on the target node";
- log.warn(msg);
+ String msg = "A flow with same match and priority exists "
+ + "on the target node";
+ log.warn(msg);
return new Status(StatusCode.CONFLICT, msg);
}
for (FlowEntryInstall installEntry : toInstallList) {
// Install and update database
- Status ret = addEntriesInternal(installEntry);
+ Status ret = addEntriesInternal(installEntry);
if (ret.isSuccess()) {
oneSucceded = true;
* of container flow merged flow entries good to be installed on this
* container. If the list of container flows is null or empty, the install
* entry list will contain only one entry, the original flow entry. If the
- * flow entry is congruent with all the N container flows, then the output
+ * flow entry is congruent with all the N container flows, then the output
* install entry list will contain N entries. If the output list is empty,
* it means the passed flow entry conflicts with all the container flows.
- *
- * @param cFlowList The list of container flows
- * @return the list of container flow merged entries good to be installed on this container
+ *
+ * @param cFlowList
+ * The list of container flows
+ * @return the list of container flow merged entries good to be installed on
+ * this container
*/
private List<FlowEntryInstall> deriveInstallEntries(FlowEntry request,
List<ContainerFlow> cFlowList) {
toInstallList.add(new FlowEntryInstall(request.clone(), null));
} else {
// Create the list of entries to be installed. If the flow entry is
- // not congruent with any container flow, no install entries will be created
+ // not congruent with any container flow, no install entries will be
+ // created
for (ContainerFlow cFlow : container.getContainerFlows()) {
if (cFlow.allowsFlow(request.getFlow())) {
toInstallList.add(new FlowEntryInstall(request.clone(),
}
/**
- * Modify a flow entry with a new one
- * It runs various validity check and derive the final container flows
- * merged flow entries to work with
- *
+ * Modify a flow entry with a new one It runs various validity check and
+ * derive the final container flows merged flow entries to work with
+ *
* @param currentFlowEntry
* @param newFlowEntry
* @return Success or error string
*/
private Status modifyEntry(FlowEntry currentFlowEntry,
FlowEntry newFlowEntry) {
- Status retExt;
+ Status retExt;
// Sanity checks
if (currentFlowEntry == null || currentFlowEntry.getNode() == null
|| newFlowEntry == null || newFlowEntry.getNode() == null) {
- String msg ="Modify: Invalid FlowEntry";
+ String msg = "Modify: Invalid FlowEntry";
log.warn(msg + ": {} or {} ", currentFlowEntry, newFlowEntry);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
if (!currentFlowEntry.getNode().equals(newFlowEntry.getNode())
|| !currentFlowEntry.getFlowName().equals(
newFlowEntry.getFlowName())) {
- String msg = "Modify: Incompatible Flow Entries";
- log.warn(msg +": {} and {}", currentFlowEntry, newFlowEntry);
+ String msg = "Modify: Incompatible Flow Entries";
+ log.warn(msg + ": {} and {}", currentFlowEntry, newFlowEntry);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
// Equality Check
if (currentFlowEntry.equals(newFlowEntry)) {
- String msg = "Modify skipped as flows are the same";
+ String msg = "Modify skipped as flows are the same";
log.debug(msg + ": " + currentFlowEntry + " and " + newFlowEntry);
return new Status(StatusCode.SUCCESS, msg);
}
- // Conflict Check: Verify the new entry would not conflict with another existing one
- // This is a loose check on the previous original flow entry requests. No check
+ // Conflict Check: Verify the new entry would not conflict with another
+ // existing one
+ // This is a loose check on the previous original flow entry requests.
+ // No check
// on the container flow merged flow entries (if any) yet
FlowEntryInstall sameMatchOriginalEntry = findMatch(newFlowEntry, true);
if (sameMatchOriginalEntry != null
&& !sameMatchOriginalEntry.getOriginal().equals(
currentFlowEntry)) {
- String msg = "Operation Rejected: Another flow with same match " +
- "and priority exists on the target node";
+ String msg = "Operation Rejected: Another flow with same match "
+ + "and priority exists on the target node";
log.warn(msg);
return new Status(StatusCode.CONFLICT, msg);
}
newFlowEntry.clone(), container.getContainerFlows());
if (toInstallList.isEmpty()) {
- String msg = "Modify Operation Rejected: The new entry " +
- "conflicts with all the container flows";
+ String msg = "Modify Operation Rejected: The new entry "
+ + "conflicts with all the container flows";
log.warn(msg);
return new Status(StatusCode.CONFLICT, msg);
}
* This is only possible when the new entry and current entry have
* different match. In this scenario the modification would ultimately
* be handled as a remove and add operations in the protocol plugin.
- *
+ *
* Also, if any of the new flow entries would clash with an existing
* one, we cannot proceed with the modify operation, because it would
* fail for some entries and leave stale entries on the network node.
- * Modify path can be taken only if it can be performed completely,
- * for all entries.
- *
- * So, for the above two cases, to simplify, let's decouple the modify in:
- * 1) remove current entries
- * 2) install new entries
+ * Modify path can be taken only if it can be performed completely, for
+ * all entries.
+ *
+ * So, for the above two cases, to simplify, let's decouple the modify
+ * in: 1) remove current entries 2) install new entries
*/
boolean decouple = false;
if (installedList.size() != toInstallList.size()) {
- log.info("Modify: New flow entry does not satisfy the same " +
- "number of container flows as the original entry does");
+ log.info("Modify: New flow entry does not satisfy the same "
+ + "number of container flows as the original entry does");
decouple = true;
}
List<FlowEntryInstall> toInstallSafe = new ArrayList<FlowEntryInstall>();
for (FlowEntryInstall installEntry : toInstallList) {
- // Conflict Check: Verify the new entry would not overwrite another existing one
- FlowEntryInstall sameMatchEntry = findMatch(installEntry
- .getInstall(), false);
+ // Conflict Check: Verify the new entry would not overwrite another
+ // existing one
+ FlowEntryInstall sameMatchEntry = findMatch(
+ installEntry.getInstall(), false);
if (sameMatchEntry != null
&& !sameMatchEntry.getOriginal().equals(currentFlowEntry)) {
- log.info("Modify: new container flow merged flow entry " +
- "clashes with existing flow");
+ log.info("Modify: new container flow merged flow entry "
+ + "clashes with existing flow");
decouple = true;
} else {
toInstallSafe.add(installEntry);
/*
* The two list have the same size and the entries to install do not
* clash with any existing flow on the network node. We assume here
- * (and might be wrong) that the same container flows that were satisfied
- * by the current entries are the same that are satisfied by the new
- * entries. Let's take the risk for now.
- *
+ * (and might be wrong) that the same container flows that were
+ * satisfied by the current entries are the same that are satisfied
+ * by the new entries. Let's take the risk for now.
+ *
* Note: modification has to be complete. If any entry modification
- * fails, we need to stop, restore the already modified entries,
- * and declare failure.
+ * fails, we need to stop, restore the already modified entries, and
+ * declare failure.
*/
Status retModify;
int i = 0;
}
// Check if uncompleted modify
if (i < size) {
- log.warn("Unable to perform a complete modify for all " +
- "the container flows merged entries");
+ log.warn("Unable to perform a complete modify for all "
+ + "the container flows merged entries");
// Restore original entries
int j = 0;
while (j < i) {
}
// Fatal error, recovery failed
if (j < i) {
- String msg = "Flow recovery failed ! Unrecoverable Error";
+ String msg = "Flow recovery failed ! Unrecoverable Error";
log.error(msg);
return new Status(StatusCode.INTERNALERROR, msg);
}
/**
* This is the function that modifies the final container flows merged
- * entries on the network node and update the database. It expects that
- * all the validity checks are passed
- *
+ * entries on the network node and update the database. It expects that all
+ * the validity checks are passed
+ *
* @param currentEntries
* @param newEntries
* @return
.getFlow());
if (!status.isSuccess()) {
- log.warn("SDN Plugin failed to program the flow: " + status.getDescription());
+ log.warn("SDN Plugin failed to program the flow: "
+ + status.getDescription());
return status;
}
- log.trace("Modified {} => {}", currentEntries.getInstall(), newEntries
- .getInstall());
+ log.trace("Modified {} => {}", currentEntries.getInstall(),
+ newEntries.getInstall());
// Update DB
updateLocalDatabase(currentEntries, false);
/**
* Remove a flow entry. If the entry is not present in the software view
* (entry or node not present), it return successfully
- *
+ *
* @param flowEntry
* @return
*/
private Status removeEntry(FlowEntry flowEntry) {
Status error = new Status(null, null);
-
+
// Sanity Check
if (flowEntry == null || flowEntry.getNode() == null) {
- String msg = "Invalid FlowEntry";
+ String msg = "Invalid FlowEntry";
log.warn(msg + ": " + flowEntry);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
// Derive the container flows merged installed entries
- List<FlowEntryInstall> installedList = deriveInstallEntries(flowEntry
- .clone(), container.getContainerFlows());
+ List<FlowEntryInstall> installedList = deriveInstallEntries(
+ flowEntry.clone(), container.getContainerFlows());
Set<FlowEntryInstall> flowsOnNode = nodeFlows.get(flowEntry.getNode());
boolean atLeastOneRemoved = false;
for (FlowEntryInstall entry : installedList) {
if (flowsOnNode == null) {
- String msg = "Removal skipped (Node down)";
+ String msg = "Removal skipped (Node down or Flow not on Node)";
log.debug(msg + " for flow entry " + flowEntry);
return new Status(StatusCode.SUCCESS, msg);
}
}
/*
- * No worries if full removal failed. Consistency checker will
- * take care of removing the stale entries later, or adjusting
- * the software database if not in sync with hardware
+ * No worries if full removal failed. Consistency checker will take care
+ * of removing the stale entries later, or adjusting the software
+ * database if not in sync with hardware
*/
- return (atLeastOneRemoved) ?
- new Status(StatusCode.SUCCESS, null) : error;
+ return (atLeastOneRemoved) ? new Status(StatusCode.SUCCESS, null)
+ : error;
}
/**
* This is the function that removes the final container flows merged entry
* from the network node and update the database. It expects that all the
* validity checks are passed
- *
- * @param entry the FlowEntryInstall
+ *
+ * @param entry
+ * the FlowEntryInstall
* @return "Success" or error string
*/
private Status removeEntryInternal(FlowEntryInstall entry) {
entry.toBeDeleted();
// Remove from node
- Status status =
- programmer.removeFlow(entry.getNode(),
- entry.getInstall().getFlow());
+ Status status = programmer.removeFlow(entry.getNode(), entry
+ .getInstall().getFlow());
if (!status.isSuccess()) {
- log.warn("SDN Plugin failed to remove the flow: " +
- status.getDescription());
+ log.warn("SDN Plugin failed to remove the flow: "
+ + status.getDescription());
return status;
}
log.trace("Removed {}", entry.getInstall());
* on the network node and updates the database. It expects that all the
* validity and conflict checks are passed. That means it does not check
* whether this flow would conflict or overwrite an existing one.
- *
- * @param entry the FlowEntryInstall
+ *
+ * @param entry
+ * the FlowEntryInstall
* @return "Success" or error string
*/
private Status addEntriesInternal(FlowEntryInstall entry) {
// Install the flow on the network node
- Status status = programmer.addFlow(entry.getNode(),
- entry.getInstall().getFlow());
+ Status status = programmer.addFlow(entry.getNode(), entry.getInstall()
+ .getFlow());
if (!status.isSuccess()) {
- log.warn("SDN Plugin failed to program the flow: " +
- status.getDescription());
+ log.warn("SDN Plugin failed to program the flow: "
+ + status.getDescription());
return status;
}
}
/**
- * Returns true if the flow conflicts with all the container's flows.
- * This means that if the function returns true, the passed flow entry
- * is congruent with at least one container flow, hence it is good to
- * be installed on this container.
- *
+ * Returns true if the flow conflicts with all the container's flows. This
+ * means that if the function returns true, the passed flow entry is
+ * congruent with at least one container flow, hence it is good to be
+ * installed on this container.
+ *
* @param flowEntry
- * @return true if flow conflicts with all the container flows, false otherwise
+ * @return true if flow conflicts with all the container flows, false
+ * otherwise
*/
private boolean entryConflictsWithContainerFlows(FlowEntry flowEntry) {
List<ContainerFlow> cFlowList = container.getContainerFlows();
// Validity check and avoid unnecessary computation
- // Also takes care of default container where no container flows are present
+ // Also takes care of default container where no container flows are
+ // present
if (cFlowList == null || cFlowList.isEmpty()) {
return false;
}
@Override
public Status installFlowEntry(FlowEntry flowEntry) {
- Status status;
+ Status status;
if (inContainerMode) {
- String msg = "Controller in container mode: Install Refused";
+ String msg = "Controller in container mode: Install Refused";
status = new Status(StatusCode.NOTACCEPTABLE, msg);
log.warn(msg);
} else {
@Override
public Status uninstallFlowEntry(FlowEntry entry) {
- Status status;
+ Status status;
if (inContainerMode) {
- String msg = "Controller in container mode: Uninstall Refused";
+ String msg = "Controller in container mode: Uninstall Refused";
status = new Status(StatusCode.NOTACCEPTABLE, msg);
log.warn(msg);
} else {
- status = removeEntry(entry);
+ status = removeEntry(entry);
}
return status;
}
@Override
public Status modifyFlowEntry(FlowEntry currentFlowEntry,
FlowEntry newFlowEntry) {
- Status status = null;
+ Status status = null;
if (inContainerMode) {
String msg = "Controller in container mode: Modify Refused";
status = new Status(StatusCode.NOTACCEPTABLE, msg);
/*
* Run a loose check on the installed entries to decide whether to go
* with a add or modify method. A loose check means only check against
- * the original flow entry requests and not against the installed
- * flow entries which are the result of the original entry merged with
- * the container flow(s) (if any). The modifyFlowEntry method in
- * presence of conflicts with the Container flows (if any) would revert
- * back to a delete + add pattern
+ * the original flow entry requests and not against the installed flow
+ * entries which are the result of the original entry merged with the
+ * container flow(s) (if any). The modifyFlowEntry method in presence of
+ * conflicts with the Container flows (if any) would revert back to a
+ * delete + add pattern
*/
FlowEntryInstall currentFlowEntries = findMatch(newFlowEntry, true);
}
/**
- * Try to find in the database if a Flow with the same Match and priority
- * of the passed one already exists for the specified network node.
- * Flow, priority and network node are all specified in the FlowEntry
- * If found, the respective FlowEntryInstall Object is returned
- *
- * @param flowEntry the FlowEntry to be tested against the ones installed
- * @param looseCheck if true, the function will run the check against the
- * original flow entry portion of the installed entries
+ * Try to find in the database if a Flow with the same Match and priority of
+ * the passed one already exists for the specified network node. Flow,
+ * priority and network node are all specified in the FlowEntry If found,
+ * the respective FlowEntryInstall Object is returned
+ *
+ * @param flowEntry
+ * the FlowEntry to be tested against the ones installed
+ * @param looseCheck
+ * if true, the function will run the check against the original
+ * flow entry portion of the installed entries
* @return null if not found, otherwise the FlowEntryInstall which contains
- * the existing flow entry
+ * the existing flow entry
*/
private FlowEntryInstall findMatch(FlowEntry flowEntry, boolean looseCheck) {
Flow flow = flowEntry.getFlow();
}
/**
- * Updates all installed flows because the container flow got updated
- * This is obtained in two phases on per node basis:
- * 1) Uninstall of all flows
- * 2) Reinstall of all flows
- * This is needed because a new container flows merged flow may conflict with an existing
- * old container flows merged flow on the network node
+ * Updates all installed flows because the container flow got updated This
+ * is obtained in two phases on per node basis: 1) Uninstall of all flows 2)
+ * Reinstall of all flows This is needed because a new container flows
+ * merged flow may conflict with an existing old container flows merged flow
+ * on the network node
*/
private void updateFlowsContainerFlow() {
List<FlowEntryInstall> oldCouples = new ArrayList<FlowEntryInstall>();
if (entry.getValue() == null) {
continue;
}
- // Create a set of old entries and one of original entries to be reinstalled
+ // Create a set of old entries and one of original entries to be
+ // reinstalled
for (FlowEntryInstall oldCouple : entry.getValue()) {
oldCouples.add(oldCouple);
toReinstall.add(oldCouple.getOriginal());
}
- // Remove the old couples. No validity checks to be run, use the internal remove
+ // Remove the old couples. No validity checks to be run, use the
+ // internal remove
for (FlowEntryInstall oldCouple : oldCouples) {
this.removeEntryInternal(oldCouple);
}
- // Reinstall the original flow entries, via the regular path: new cFlow merge + validations
+ // Reinstall the original flow entries, via the regular path: new
+ // cFlow merge + validations
for (FlowEntry flowEntry : toReinstall) {
this.installFlowEntry(flowEntry);
}
flowName);
} else {
log.warn("Failed to add ports {} to Flow entry {}: "
- + error.getDescription(), portList,
+ + error.getDescription(), portList,
currentFlowEntry.toString());
}
return;
flowName);
} else {
log.warn("Failed to remove ports {} from Flow entry {}: "
- + status.getDescription(), portList,
+ + status.getDescription(), portList,
currentFlowEntry.toString());
}
return;
}
}
- log
- .warn(
- "Failed to remove ports from Flow {} on Node {}: Entry Not Found",
- flowName, node);
+ log.warn(
+ "Failed to remove ports from Flow {} on Node {}: Entry Not Found",
+ flowName, node);
}
/*
}
}
if (currentFlowEntry == null) {
- log
- .warn(
- "Failed to replace output port for flow {} on node {}: Entry Not Found",
- flowName, node);
+ log.warn(
+ "Failed to replace output port for flow {} on node {}: Entry Not Found",
+ flowName, node);
return;
}
@SuppressWarnings("deprecation")
private void allocateCaches() {
if (this.clusterContainerService == null) {
- log
- .warn("Un-initialized clusterContainerService, can't create cache");
+ log.warn("Un-initialized clusterContainerService, can't create cache");
return;
}
log.debug("FRM allocateCaches for Container {}", container);
try {
- clusterContainerService.createCache("frm.nodeFlows", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ clusterContainerService.createCache("frm.nodeFlows",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
- clusterContainerService.createCache("frm.groupFlows", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ clusterContainerService.createCache("frm.groupFlows",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
- clusterContainerService.createCache("frm.staticFlows", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ clusterContainerService.createCache("frm.staticFlows",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
- clusterContainerService.createCache("frm.flowsSaveEvent", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ clusterContainerService.createCache("frm.flowsSaveEvent",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
clusterContainerService.createCache("frm.staticFlowsOrdinal",
EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
- clusterContainerService.createCache("frm.portGroupConfigs", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ clusterContainerService.createCache("frm.portGroupConfigs",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
- clusterContainerService.createCache("frm.portGroupData", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ clusterContainerService.createCache("frm.portGroupData",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
- clusterContainerService.createCache("frm.TSPolicies", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ clusterContainerService.createCache("frm.TSPolicies",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
} catch (CacheConfigException cce) {
log.error("FRM CacheConfigException");
}
}
- @SuppressWarnings( { "unchecked", "deprecation" })
+ @SuppressWarnings({ "unchecked", "deprecation" })
private void retrieveCaches() {
ConcurrentMap<?, ?> map;
if (this.clusterContainerService == null) {
- log
- .warn("un-initialized clusterContainerService, can't retrieve cache");
+ log.warn("un-initialized clusterContainerService, can't retrieve cache");
return;
}
if (map != null) {
nodeFlows = (ConcurrentMap<Node, Set<FlowEntryInstall>>) map;
} else {
- log
- .error(
- "FRM Cache frm.nodeFlows allocation failed for Container {}",
- container);
+ log.error(
+ "FRM Cache frm.nodeFlows allocation failed for Container {}",
+ container);
}
map = clusterContainerService.getCache("frm.groupFlows");
if (map != null) {
groupFlows = (ConcurrentMap<String, Set<FlowEntryInstall>>) map;
} else {
- log
- .error(
- "FRM Cache frm.groupFlows allocation failed for Container {}",
- container);
+ log.error(
+ "FRM Cache frm.groupFlows allocation failed for Container {}",
+ container);
}
map = clusterContainerService.getCache("frm.staticFlows");
if (map != null) {
staticFlows = (ConcurrentMap<Integer, FlowConfig>) map;
} else {
- log
- .error(
- "FRM Cache frm.staticFlows allocation failed for Container {}",
- container);
+ log.error(
+ "FRM Cache frm.staticFlows allocation failed for Container {}",
+ container);
}
map = clusterContainerService.getCache("frm.flowsSaveEvent");
if (map != null) {
flowsSaveEvent = (ConcurrentMap<Long, String>) map;
} else {
- log
- .error(
- "FRM Cache frm.flowsSaveEvent allocation failed for Container {}",
- container);
+ log.error(
+ "FRM Cache frm.flowsSaveEvent allocation failed for Container {}",
+ container);
}
map = clusterContainerService.getCache("frm.staticFlowsOrdinal");
if (map != null) {
staticFlowsOrdinal = (ConcurrentMap<Integer, Integer>) map;
} else {
- log
- .error(
- "FRM Cache frm.staticFlowsOrdinal allocation failed for Container {}",
- container);
+ log.error(
+ "FRM Cache frm.staticFlowsOrdinal allocation failed for Container {}",
+ container);
}
map = clusterContainerService.getCache("frm.portGroupConfigs");
if (map != null) {
portGroupConfigs = (ConcurrentMap<String, PortGroupConfig>) map;
} else {
- log
- .error(
- "FRM Cache frm.portGroupConfigs allocation failed for Container {}",
- container);
+ log.error(
+ "FRM Cache frm.portGroupConfigs allocation failed for Container {}",
+ container);
}
map = clusterContainerService.getCache("frm.portGroupData");
if (map != null) {
portGroupData = (ConcurrentMap<PortGroupConfig, Map<Node, PortGroup>>) map;
} else {
- log
- .error(
- "FRM Cache frm.portGroupData allocation failed for Container {}",
- container);
+ log.error(
+ "FRM Cache frm.portGroupData allocation failed for Container {}",
+ container);
}
map = clusterContainerService.getCache("frm.TSPolicies");
if (map != null) {
TSPolicies = (ConcurrentMap<String, Object>) map;
} else {
- log
- .error(
- "FRM Cache frm.TSPolicies allocation failed for Container {}",
- container);
+ log.error(
+ "FRM Cache frm.TSPolicies allocation failed for Container {}",
+ container);
}
}
@SuppressWarnings("deprecation")
- private void destroyCaches() {
+ private void destroyCaches() {
if (this.clusterContainerService == null) {
- log
- .warn("Un-initialized clusterContainerService, can't destroy cache");
+ log.warn("Un-initialized clusterContainerService, can't destroy cache");
return;
}
* multiple entry configuration (PortGroup) and hardware installation is
* NOT done directly on this event. 3. The User prefers to retain the
* configuration in Controller and skip hardware installation.
- *
+ *
* Hence it is safe to update the StaticFlow DB at this point.
- *
+ *
* Note : For the case of PortGrouping, it is essential to have this DB
* populated before the PortGroupListeners can query for the DB
* triggered using portGroupChanged event...
if (config.getNode().equals(node)) {
if (config.installInHw()
&& !config.getStatus().equals(
- StatusCode.SUCCESS.toString())) {
+ StatusCode.SUCCESS.toString())) {
Status status = this.addEntry(config.getFlowEntry());
config.setStatus(status.getDescription());
}
log.trace("Updating Static Flow configs on node down: " + node);
List<Integer> toRemove = new ArrayList<Integer>();
- for (Entry<Integer,FlowConfig> entry : staticFlows.entrySet()) {
+ for (Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
- FlowConfig config = entry.getValue();
+ FlowConfig config = entry.getValue();
if (config.isPortGroupEnabled()) {
continue;
}
-
+
if (config.installInHw() && config.getNode().equals(node)) {
- if (config.isInternalFlow()) {
- // Take note of this controller generated static flow
- toRemove.add(entry.getKey());
- } else {
- config.setStatus(NODEDOWN);
- }
+ if (config.isInternalFlow()) {
+ // Take note of this controller generated static flow
+ toRemove.add(entry.getKey());
+ } else {
+ config.setStatus(NODEDOWN);
+ }
}
}
- // Remove controller generated static flows for this node
+ // Remove controller generated static flows for this node
for (Integer index : toRemove) {
- staticFlows.remove(index);
- }
+ staticFlows.remove(index);
+ }
}
private void updateStaticFlowConfigsOnContainerModeChange(UpdateType update) {
if (config.installInHw()) {
switch (update) {
case ADDED:
- config
- .setStatus("Removed from node because in container mode");
+ config.setStatus("Removed from node because in container mode");
break;
case REMOVED:
config.setStatus(StatusCode.SUCCESS.toString());
public Status removeStaticFlow(FlowConfig config) {
/*
* No config.isInternal() check as NB does not take this path and GUI
- * cannot issue a delete on an internal generated flow. We need this path
- * to be accessible when switch mode is changed from proactive to
+ * cannot issue a delete on an internal generated flow. We need this
+ * path to be accessible when switch mode is changed from proactive to
* reactive, so that we can remove the internal generated LLDP and ARP
* punt flows
*/
for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
if (entry.getValue().isByNameAndNodeIdEqual(config)) {
// Program the network node
- Status status = this.removeEntry(config.getFlowEntry());
+ Status status = this.removeEntry(config.getFlowEntry());
// Update configuration database if programming was successful
if (status.isSuccess()) {
staticFlows.remove(entry.getKey());
public Status removeStaticFlow(String name, Node node) {
for (Map.Entry<Integer, FlowConfig> mapEntry : staticFlows.entrySet()) {
FlowConfig entry = mapEntry.getValue();
- Status status = new Status(null,null);
+ Status status = new Status(null, null);
if (entry.isByNameAndNodeIdEqual(name, node)) {
// Validity check for api3 entry point
if (entry.isInternalFlow()) {
- String msg = "Invalid operation: Controller generated " +
- "flow cannot be deleted";
- log.warn(msg);
- return new Status(StatusCode.NOTACCEPTABLE, msg);
+ String msg = "Invalid operation: Controller generated "
+ + "flow cannot be deleted";
+ log.warn(msg);
+ return new Status(StatusCode.NOTACCEPTABLE, msg);
}
if (!entry.isPortGroupEnabled()) {
// Program the network node
public Status modifyStaticFlow(FlowConfig newFlowConfig) {
// Validity check for api3 entry point
if (newFlowConfig.isInternalFlow()) {
- String msg = "Invalid operation: Controller generated flow " +
- "cannot be modified";
- log.warn(msg);
+ String msg = "Invalid operation: Controller generated flow "
+ + "cannot be modified";
+ log.warn(msg);
return new Status(StatusCode.NOTACCEPTABLE, msg);
}
// Validity Check
StringBuffer resultStr = new StringBuffer();
if (!newFlowConfig.isValid(container, resultStr)) {
- String msg = "Invalid Configuration (" + resultStr.toString()
- + ")";
+ String msg = "Invalid Configuration (" + resultStr.toString() + ")";
newFlowConfig.setStatus(msg);
log.warn(msg);
return new Status(StatusCode.BADREQUEST, msg);
}
if (oldFlowConfig == null) {
- String msg = "Attempt to modify a non existing static flow";
- log.warn(msg);
- return new Status(StatusCode.NOTFOUND, msg);
+ String msg = "Attempt to modify a non existing static flow";
+ log.warn(msg);
+ return new Status(StatusCode.NOTFOUND, msg);
}
// Do not attempt to reinstall the flow, warn user
if (newFlowConfig.equals(oldFlowConfig)) {
- String msg = "No modification detected";
- log.info("Static flow modification skipped: " + msg);
+ String msg = "No modification detected";
+ log.info("Static flow modification skipped: " + msg);
return new Status(StatusCode.SUCCESS, msg);
}
// If flow is installed, program the network node
Status status = new Status(StatusCode.SUCCESS, "Saved in config");
if (oldFlowConfig.installInHw()) {
- status = this.modifyEntry(oldFlowConfig.getFlowEntry(),
+ status = this.modifyEntry(oldFlowConfig.getFlowEntry(),
newFlowConfig.getFlowEntry());
}
return status;
}
+ @Override
+ public Status toggleStaticFlowStatus(String name, Node node) {
+ return toggleStaticFlowStatus(getStaticFlow(name, node));
+ }
-
- @Override
- public Status toggleStaticFlowStatus(String name, Node node) {
- return toggleStaticFlowStatus(getStaticFlow(name, node));
- }
-
@Override
public Status toggleStaticFlowStatus(FlowConfig config) {
- if (config == null) {
- String msg = "Invalid request: null flow config";
- log.warn(msg);
- return new Status(StatusCode.BADREQUEST, msg);
- }
+ if (config == null) {
+ String msg = "Invalid request: null flow config";
+ log.warn(msg);
+ return new Status(StatusCode.BADREQUEST, msg);
+ }
// Validity check for api3 entry point
if (config.isInternalFlow()) {
- String msg = "Invalid operation: Controller generated flow " +
- "cannot be modified";
- log.warn(msg);
- return new Status(StatusCode.NOTACCEPTABLE, msg);
+ String msg = "Invalid operation: Controller generated flow "
+ + "cannot be modified";
+ log.warn(msg);
+ return new Status(StatusCode.NOTACCEPTABLE, msg);
}
for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
}
}
return new Status(StatusCode.NOTFOUND,
- "Unable to locate the entry. Failed to toggle status");
+ "Unable to locate the entry. Failed to toggle status");
}
/**
- * Uninstall all the Flow Entries present in the software view
- * A copy of each entry is stored in the inactive list so
- * that it can be re-applied when needed
- * This function is called on the default container instance of FRM only
- * when the first container is created
+ * Uninstall all the Flow Entries present in the software view A copy of
+ * each entry is stored in the inactive list so that it can be re-applied
+ * when needed This function is called on the default container instance of
+ * FRM only when the first container is created
*/
private void uninstallAllFlowEntries() {
log.info("Uninstalling all flows");
for (FlowEntry flowEntry : inactiveFlows) {
Status status = this.removeEntry(flowEntry);
if (!status.isSuccess()) {
- log.warn("Failed to remove entry: {}: " +
- status.getDescription(), flowEntry);
+ log.warn(
+ "Failed to remove entry: {}: "
+ + status.getDescription(), flowEntry);
}
}
}
/**
- * Re-install all the Flow Entries present in the inactive list
- * The inactive list will be empty at the end of this call
- * This function is called on the default container instance of FRM only
- * when the last container is deleted
+ * Re-install all the Flow Entries present in the inactive list The inactive
+ * list will be empty at the end of this call This function is called on the
+ * default container instance of FRM only when the last container is deleted
*/
private void reinstallAllFlowEntries() {
log.info("Reinstalling all inactive flows");
for (FlowEntry flowEntry : this.inactiveFlows) {
- Status status = this.addEntry(flowEntry);
+ Status status = this.addEntry(flowEntry);
if (!status.isSuccess()) {
- log.warn("Failed to install entry: {}: " +
- status.getDescription(), flowEntry);
+ log.warn(
+ "Failed to install entry: {}: "
+ + status.getDescription(), flowEntry);
}
}
allowLLDP.setName("**Punt LLDP");
allowLLDP.setPriority("1");
allowLLDP.setNode(node);
- allowLLDP.setEtherType("0x"
- + Integer.toHexString(EtherTypes.LLDP.intValue())
+ allowLLDP
+ .setEtherType("0x"
+ + Integer.toHexString(EtherTypes.LLDP.intValue())
.toUpperCase());
allowLLDP.setActions(puntAction);
defaultConfigs.add(allowLLDP);
/**
* Remove from the databases all the flows installed on the node
- *
+ *
* @param node
*/
private synchronized void cleanDatabaseForNode(Node node) {
public void notifyNode(Node node, UpdateType type,
Map<String, Property> propMap) {
switch (type) {
- case ADDED:
+ case ADDED:
addStaticFlowsToSwitch(node);
break;
- case REMOVED:
+ case REMOVED:
cleanDatabaseForNode(node);
updateStaticFlowConfigsOnNodeDown(node);
break;
@Override
public void notifyNodeConnector(NodeConnector nodeConnector,
- UpdateType type, Map<String, Property> propMap) {
+ UpdateType type, Map<String, Property> propMap) {
}
private FlowConfig getDerivedFlowConfig(FlowConfig original,
@Override
public void portGroupChanged(PortGroupConfig config,
Map<Node, PortGroup> data, boolean add) {
- log.info("PortGroup Changed for :" + config + " Data: "
- + portGroupData);
+ log.info("PortGroup Changed for :" + config + " Data: " + portGroupData);
Map<Node, PortGroup> existingData = portGroupData.get(config);
if (existingData != null) {
for (Map.Entry<Node, PortGroup> entry : data.entrySet()) {
if (existingPortGroup == null) {
if (add) {
existingData.put(entry.getKey(), entry.getValue());
- addPortGroupFlows(config, entry.getKey(), entry
- .getValue());
+ addPortGroupFlows(config, entry.getKey(),
+ entry.getValue());
}
} else {
if (add) {
existingPortGroup.getPorts().addAll(
entry.getValue().getPorts());
- addPortGroupFlows(config, entry.getKey(), entry
- .getValue());
+ addPortGroupFlows(config, entry.getKey(),
+ entry.getValue());
} else {
existingPortGroup.getPorts().removeAll(
entry.getValue().getPorts());
- removePortGroupFlows(config, entry.getKey(), entry
- .getValue());
+ removePortGroupFlows(config, entry.getKey(),
+ entry.getValue());
}
}
}
/**
* Function called by the dependency manager when all the required
* dependencies are satisfied
- *
+ *
*/
void init() {
frmAware = Collections
.synchronizedSet(new HashSet<IForwardingRulesManagerAware>());
- frmFileName = GlobalConstants.STARTUPHOME.toString() + "frm_staticflows_"
- + this.getContainerName() + ".conf";
- portGroupFileName = GlobalConstants.STARTUPHOME.toString() + "portgroup_"
- + this.getContainerName() + ".conf";
+ frmFileName = GlobalConstants.STARTUPHOME.toString()
+ + "frm_staticflows_" + this.getContainerName() + ".conf";
+ portGroupFileName = GlobalConstants.STARTUPHOME.toString()
+ + "portgroup_" + this.getContainerName() + ".conf";
inContainerMode = false;
* Function called by the dependency manager when at least one dependency
* become unsatisfied or when the component is shutting down because for
* example bundle is being stopped.
- *
+ *
*/
void destroy() {
destroyCaches();
/**
* Function called by dependency manager after "init ()" is called and after
* the services provided by the class are registered in the service registry
- *
+ *
*/
void start() {
/*
* Function called by the dependency manager before the services exported by
* the component are unregistered, this will be followed by a "destroy ()"
* calls
- *
+ *
*/
void stop() {
}
public void containerFlowUpdated(String containerName,
ContainerFlow previousFlow, ContainerFlow currentFlow, UpdateType t) {
/*
- * Whether it is an addition or removal, we have to recompute the
- * merged flows entries taking into account all the current container flows
+ * Whether it is an addition or removal, we have to recompute the merged
+ * flows entries taking into account all the current container flows
* because flow merging is not an injective function
*/
updateFlowsContainerFlow();
@Override
public Status saveConfiguration() {
- return saveConfig();
+ return saveConfig();
}
public void _frmNodeFlows(CommandInterpreter ci) {
}
}
+ @Override
+ public void flowRemoved(Node node, Flow flow) {
+ log.trace("Received flow removed notification on {} for {}", node, flow);
+ // For flow entry identification, only match and priority matter
+ FlowEntry toFind = new FlowEntry("any", "any", flow, node);
+ FlowEntryInstall installedEntry = this.findMatch(toFind, false);
+ if (installedEntry == null) {
+ log.trace("Entry is not know to us");
+ return;
+ }
+
+ // Update Static flow status
+ for (Map.Entry<Integer, FlowConfig> entry : staticFlows.entrySet()) {
+ FlowConfig conf = entry.getValue();
+ if (conf.isByNameAndNodeIdEqual(installedEntry.getFlowName(), node)) {
+ // Update Configuration database
+ conf.toggleStatus();
+ break;
+ }
+ }
+ // Update software views
+ this.updateLocalDatabase(installedEntry, false);
+ }
+
}
logger.debug("Retrieving cache for HostTrackerIH");
inactiveStaticHosts = (ConcurrentMap<NodeConnector, HostNodeConnector>) this.clusterContainerService
.getCache("hostTrackerIH");
- if (hostsDB == null) {
+ if (inactiveStaticHosts == null) {
logger.error("Cache couldn't be retrieved for HostTrackerIH");
}
logger.debug("Cache was successfully retrieved for HostTrackerIH");
import java.util.List;
import java.util.Set;
+import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
--- /dev/null
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.protocol_plugin.openflow;
+
+import org.opendaylight.controller.sal.flowprogrammer.IPluginOutFlowProgrammerService;
+
+/**
+ * Interface which defines the methods exposed by the Flow Programmer Notifier.
+ * Their implementation relays the asynchronous messages received from the
+ * network nodes to the the SAL Flow Programmer Notifier Service on the proper
+ * container.
+ */
+public interface IFlowProgrammerNotifier extends
+ IPluginOutFlowProgrammerService {
+
+}
package org.opendaylight.controller.protocol_plugin.openflow.core.internal;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
}
}
+ public void _controllerShowConnConfig(CommandInterpreter ci) {
+ String str = System.getProperty("secureChannelEnabled");
+ if ((str != null) && (str.trim().equalsIgnoreCase("true"))) {
+ ci.print("The Controller and Switch should communicate through TLS connetion.\n");
+
+ String keyStoreFile = System.getProperty("controllerKeyStore");
+ String trustStoreFile = System.getProperty("controllerTrustStore");
+ if ((keyStoreFile == null) || keyStoreFile.trim().isEmpty()) {
+ ci.print("controllerKeyStore not specified in ./configuration/config.ini\n");
+ } else {
+ ci.print("controllerKeyStore=" + keyStoreFile + "\n");
+ }
+ if ((trustStoreFile == null) || trustStoreFile.trim().isEmpty()) {
+ ci.print("controllerTrustStore not specified in ./configuration/config.ini\n");
+ } else {
+ ci.print("controllerTrustStore=" + trustStoreFile + "\n");
+ }
+ } else {
+ ci.print("The Controller and Switch should communicate through TCP connetion.\n");
+ }
+ }
+
private void registerWithOSGIConsole() {
BundleContext bundleContext = FrameworkUtil.getBundle(this.getClass())
.getBundleContext();
help.append("--Open Flow Controller --\n");
help.append("\tcontrollerShowSwitches\n");
help.append("\tcontrollerReset\n");
+ help.append("\tcontrollerShowConnConfig\n");
return help.toString();
}
}
newBuffer.put(outBuffer);
outBuffer = newBuffer;
}
+ }
+ synchronized (outBuffer) {
msg.writeTo(outBuffer);
if (!socket.isOpen()) {
package org.opendaylight.controller.protocol_plugin.openflow.core.internal;
import java.io.FileInputStream;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.AsynchronousCloseException;
* @throws Exception
*/
private void createSecureChannel(SocketChannel socket) throws Exception {
- String keyStoreFile = System.getProperty("controllerKeyStore").trim();
- String keyStorePassword = System.getProperty("controllerKeyStorePassword").trim();
- String trustStoreFile = System.getProperty("controllerTrustStore").trim();
- String trustStorePassword = System.getProperty("controllerTrustStorePassword").trim();
-
+ String keyStoreFile = System.getProperty("controllerKeyStore");
+ String keyStorePassword = System.getProperty("controllerKeyStorePassword");
+ String trustStoreFile = System.getProperty("controllerTrustStore");
+ String trustStorePassword = System.getProperty("controllerTrustStorePassword");
+
+ if (keyStoreFile != null) {
+ keyStoreFile = keyStoreFile.trim();
+ }
+ if ((keyStoreFile == null) || keyStoreFile.isEmpty()) {
+ throw new FileNotFoundException("controllerKeyStore not specified in ./configuration/config.ini");
+ }
+ if (keyStorePassword != null) {
+ keyStorePassword = keyStorePassword.trim();
+ }
+ if ((keyStorePassword == null) || keyStorePassword.isEmpty()) {
+ throw new FileNotFoundException("controllerKeyStorePassword not specified in ./configuration/config.ini");
+ }
+ if (trustStoreFile != null) {
+ trustStoreFile = trustStoreFile.trim();
+ }
+ if ((trustStoreFile == null) || trustStoreFile.isEmpty()) {
+ throw new FileNotFoundException("controllerTrustStore not specified in ./configuration/config.ini");
+ }
+ if (trustStorePassword != null) {
+ trustStorePassword = trustStorePassword.trim();
+ }
+ if ((trustStorePassword == null) || trustStorePassword.isEmpty()) {
+ throw new FileNotFoundException("controllerTrustStorePassword not specified in ./configuration/config.ini");
+ }
+
KeyStore ks = KeyStore.getInstance("JKS");
KeyStore ts = KeyStore.getInstance("JKS");
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
newBuffer.put(myAppData);
myAppData = newBuffer;
}
+ }
+ synchronized (myAppData) {
msg.writeTo(myAppData);
myAppData.flip();
sslEngineResult = sslEngine.wrap(myAppData, myNetData);
package org.opendaylight.controller.protocol_plugin.openflow.core.internal;
-import java.io.IOException;
+import java.net.SocketException;
import java.nio.channels.AsynchronousCloseException;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
@Override
public Integer asyncSend(OFMessage msg, int xid) {
msg.setXid(xid);
- transmitQ.add(new PriorityMessage(msg, 0));
+ if (transmitQ != null) {
+ transmitQ.add(new PriorityMessage(msg, 0));
+ }
return xid;
}
@Override
public Integer asyncFastSend(OFMessage msg, int xid) {
msg.setXid(xid);
- transmitQ.add(new PriorityMessage(msg, 1));
+ if (transmitQ != null) {
+ transmitQ.add(new PriorityMessage(msg, 1));
+ }
return xid;
}
public void resumeSend() {
try {
- msgReadWriteService.resumeSend();
+ if (msgReadWriteService != null) {
+ msgReadWriteService.resumeSend();
+ }
} catch (Exception e) {
reportError(e);
}
}
private void reportError(Exception e) {
- if (e instanceof AsynchronousCloseException) {
+ if (e instanceof AsynchronousCloseException ||
+ e instanceof InterruptedException ||
+ e instanceof SocketException) {
logger.debug("Caught exception {}", e.getMessage());
} else {
logger.warn("Caught exception {}", e.getMessage());
logger.trace("Message sent: {}", pmsg.toString());
}
Thread.sleep(10);
+ } catch (InterruptedException ie) {
+ reportError(new InterruptedException("PriorityMessageTransmit thread interrupted"));
} catch (Exception e) {
reportError(e);
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import org.apache.felix.dm.Component;
import org.opendaylight.controller.protocol_plugin.openflow.IDataPacketListen;
import org.opendaylight.controller.protocol_plugin.openflow.IDataPacketMux;
+import org.opendaylight.controller.protocol_plugin.openflow.IFlowProgrammerNotifier;
import org.opendaylight.controller.protocol_plugin.openflow.IInventoryShimExternalListener;
import org.opendaylight.controller.protocol_plugin.openflow.IInventoryShimInternalListener;
import org.opendaylight.controller.protocol_plugin.openflow.IOFStatisticsManager;
import org.opendaylight.controller.protocol_plugin.openflow.IStatisticsListener;
import org.opendaylight.controller.protocol_plugin.openflow.ITopologyServiceShimListener;
import org.opendaylight.controller.protocol_plugin.openflow.core.IController;
+import org.opendaylight.controller.protocol_plugin.openflow.core.IMessageListener;
import org.opendaylight.controller.protocol_plugin.openflow.core.internal.Controller;
import org.opendaylight.controller.sal.core.ComponentActivatorAbstractBase;
import org.opendaylight.controller.sal.core.IContainerListener;
import org.opendaylight.controller.sal.core.Node;
import org.opendaylight.controller.sal.discovery.IDiscoveryService;
import org.opendaylight.controller.sal.flowprogrammer.IPluginInFlowProgrammerService;
+import org.opendaylight.controller.sal.flowprogrammer.IPluginOutFlowProgrammerService;
import org.opendaylight.controller.sal.inventory.IPluginInInventoryService;
import org.opendaylight.controller.sal.inventory.IPluginOutInventoryService;
import org.opendaylight.controller.sal.packet.IPluginInDataPacketService;
/**
* Openflow protocol plugin Activator
- *
- *
+ *
+ *
*/
public class Activator extends ComponentActivatorAbstractBase {
protected static final Logger logger = LoggerFactory
.getLogger(Activator.class);
/**
- * Function called when the activator starts just after some
- * initializations are done by the
- * ComponentActivatorAbstractBase.
- *
+ * Function called when the activator starts just after some initializations
+ * are done by the ComponentActivatorAbstractBase.
+ *
*/
public void init() {
}
/**
- * Function called when the activator stops just before the
- * cleanup done by ComponentActivatorAbstractBase
- *
+ * Function called when the activator stops just before the cleanup done by
+ * ComponentActivatorAbstractBase
+ *
*/
public void destroy() {
}
/**
- * Function that is used to communicate to dependency manager the
- * list of known implementations for services inside a container
- *
- *
+ * Function that is used to communicate to dependency manager the list of
+ * known implementations for services inside a container
+ *
+ *
* @return An array containing all the CLASS objects that will be
- * instantiated in order to get an fully working implementation
- * Object
+ * instantiated in order to get an fully working implementation
+ * Object
*/
public Object[] getImplementations() {
Object[] res = { TopologyServices.class, DataPacketServices.class,
- InventoryService.class, ReadService.class };
+ InventoryService.class, ReadService.class,
+ FlowProgrammerNotifier.class };
return res;
}
/**
- * Function that is called when configuration of the dependencies
- * is required.
- *
- * @param c dependency manager Component object, used for
- * configuring the dependencies exported and imported
- * @param imp Implementation class that is being configured,
- * needed as long as the same routine can configure multiple
- * implementations
- * @param containerName The containerName being configured, this allow
- * also optional per-container different behavior if needed, usually
- * should not be the case though.
+ * Function that is called when configuration of the dependencies is
+ * required.
+ *
+ * @param c
+ * dependency manager Component object, used for configuring the
+ * dependencies exported and imported
+ * @param imp
+ * Implementation class that is being configured, needed as long
+ * as the same routine can configure multiple implementations
+ * @param containerName
+ * The containerName being configured, this allow also optional
+ * per-container different behavior if needed, usually should not
+ * be the case though.
*/
public void configureInstance(Component c, Object imp, String containerName) {
if (imp.equals(TopologyServices.class)) {
// export the service to be used by SAL
- c.setInterface(new String[] {
- IPluginInTopologyService.class.getName(),
- ITopologyServiceShimListener.class.getName() }, null);
+ c.setInterface(
+ new String[] { IPluginInTopologyService.class.getName(),
+ ITopologyServiceShimListener.class.getName() },
+ null);
// Hook the services coming in from SAL, as optional in
// case SAL is not yet there, could happen
- c.add(createContainerServiceDependency(containerName).setService(
- IPluginOutTopologyService.class).setCallbacks(
- "setPluginOutTopologyService",
- "unsetPluginOutTopologyService").setRequired(false));
- c.add(createServiceDependency().setService(
- IRefreshInternalProvider.class).setCallbacks(
- "setRefreshInternalProvider",
- "unsetRefreshInternalProvider").setRequired(false));
+ c.add(createContainerServiceDependency(containerName)
+ .setService(IPluginOutTopologyService.class)
+ .setCallbacks("setPluginOutTopologyService",
+ "unsetPluginOutTopologyService").setRequired(false));
+ c.add(createServiceDependency()
+ .setService(IRefreshInternalProvider.class)
+ .setCallbacks("setRefreshInternalProvider",
+ "unsetRefreshInternalProvider").setRequired(false));
}
if (imp.equals(InventoryService.class)) {
// export the service
- c.setInterface(new String[] {
- IPluginInInventoryService.class.getName(),
- IStatisticsListener.class.getName(),
- IInventoryShimInternalListener.class.getName() }, null);
+ c.setInterface(
+ new String[] { IPluginInInventoryService.class.getName(),
+ IStatisticsListener.class.getName(),
+ IInventoryShimInternalListener.class.getName() },
+ null);
// Now lets add a service dependency to make sure the
// provider of service exists
- c.add(createServiceDependency().setService(IController.class,
- "(name=Controller)").setCallbacks("setController",
- "unsetController").setRequired(true));
- c.add(createContainerServiceDependency(containerName).setService(
- IPluginOutInventoryService.class).setCallbacks(
- "setPluginOutInventoryServices",
- "unsetPluginOutInventoryServices").setRequired(false));
+ c.add(createServiceDependency()
+ .setService(IController.class, "(name=Controller)")
+ .setCallbacks("setController", "unsetController")
+ .setRequired(true));
+ c.add(createContainerServiceDependency(containerName)
+ .setService(IPluginOutInventoryService.class)
+ .setCallbacks("setPluginOutInventoryServices",
+ "unsetPluginOutInventoryServices")
+ .setRequired(false));
}
if (imp.equals(DataPacketServices.class)) {
c.setInterface(IPluginInDataPacketService.class.getName(), props);
// Hook the services coming in from SAL, as optional in
// case SAL is not yet there, could happen
- c.add(createServiceDependency().setService(IController.class,
- "(name=Controller)").setCallbacks("setController",
- "unsetController").setRequired(true));
+ c.add(createServiceDependency()
+ .setService(IController.class, "(name=Controller)")
+ .setCallbacks("setController", "unsetController")
+ .setRequired(true));
// This is required for the transmission to happen properly
c.add(createServiceDependency().setService(IDataPacketMux.class)
.setCallbacks("setIDataPacketMux", "unsetIDataPacketMux")
.setRequired(true));
- c.add(createContainerServiceDependency(containerName).setService(
- IPluginOutDataPacketService.class).setCallbacks(
- "setPluginOutDataPacketService",
- "unsetPluginOutDataPacketService").setRequired(false));
+ c.add(createContainerServiceDependency(containerName)
+ .setService(IPluginOutDataPacketService.class)
+ .setCallbacks("setPluginOutDataPacketService",
+ "unsetPluginOutDataPacketService")
+ .setRequired(false));
}
if (imp.equals(ReadService.class)) {
// by SAL
props.put("protocolPluginType", Node.NodeIDType.OPENFLOW);
c.setInterface(IPluginInReadService.class.getName(), props);
- c.add(createServiceDependency().setService(
- IPluginReadServiceFilter.class).setCallbacks("setService",
- "unsetService").setRequired(true));
+ c.add(createServiceDependency()
+ .setService(IPluginReadServiceFilter.class)
+ .setCallbacks("setService", "unsetService")
+ .setRequired(true));
+ }
+
+ if (imp.equals(FlowProgrammerNotifier.class)) {
+ // export the service to be used by SAL
+ Dictionary<String, Object> props = new Hashtable<String, Object>();
+ // Set the protocolPluginType property which will be used
+ // by SAL
+ props.put("protocolPluginType", Node.NodeIDType.OPENFLOW);
+ c.setInterface(IFlowProgrammerNotifier.class.getName(), props);
+
+ c.add(createContainerServiceDependency(containerName)
+ .setService(IPluginOutFlowProgrammerService.class)
+ .setCallbacks("setPluginOutFlowProgrammerService",
+ "unsetPluginOutFlowProgrammerService")
+ .setRequired(true));
}
}
/**
- * Function that is used to communicate to dependency manager the
- * list of known implementations for services that are container
- * independent.
- *
- *
+ * Function that is used to communicate to dependency manager the list of
+ * known implementations for services that are container independent.
+ *
+ *
* @return An array containing all the CLASS objects that will be
- * instantiated in order to get an fully working implementation
- * Object
+ * instantiated in order to get an fully working implementation
+ * Object
*/
public Object[] getGlobalImplementations() {
Object[] res = { Controller.class, OFStatisticsManager.class,
}
/**
- * Function that is called when configuration of the dependencies
- * is required.
- *
- * @param c dependency manager Component object, used for
- * configuring the dependencies exported and imported
- * @param imp Implementation class that is being configured,
- * needed as long as the same routine can configure multiple
- * implementations
+ * Function that is called when configuration of the dependencies is
+ * required.
+ *
+ * @param c
+ * dependency manager Component object, used for configuring the
+ * dependencies exported and imported
+ * @param imp
+ * Implementation class that is being configured, needed as long
+ * as the same routine can configure multiple implementations
*/
public void configureGlobalInstance(Component c, Object imp) {
// Set the protocolPluginType property which will be used
// by SAL
props.put("protocolPluginType", Node.NodeIDType.OPENFLOW);
- c.setInterface(IPluginInFlowProgrammerService.class
- .getName(), props);
+ c.setInterface(
+ new String[] {
+ IPluginInFlowProgrammerService.class.getName(),
+ IMessageListener.class.getName(),
+ IContainerListener.class.getName() }, props);
+
+ c.add(createServiceDependency()
+ .setService(IController.class, "(name=Controller)")
+ .setCallbacks("setController", "unsetController")
+ .setRequired(true));
+
+ c.add(createServiceDependency()
+ .setService(IFlowProgrammerNotifier.class)
+ .setCallbacks("setFlowProgrammerNotifier",
+ "unsetsetFlowProgrammerNotifier")
+ .setRequired(false));
- c.add(createServiceDependency().setService(IController.class,
- "(name=Controller)").setCallbacks("setController",
- "unsetController").setRequired(true));
}
if (imp.equals(ReadServiceFilter.class)) {
- c.setInterface(new String[] {
- IPluginReadServiceFilter.class.getName(),
- IContainerListener.class.getName() }, null);
+ c.setInterface(
+ new String[] { IPluginReadServiceFilter.class.getName(),
+ IContainerListener.class.getName() }, null);
- c.add(createServiceDependency().setService(IController.class,
- "(name=Controller)").setCallbacks("setController",
- "unsetController").setRequired(true));
- c.add(createServiceDependency().setService(
- IOFStatisticsManager.class).setCallbacks("setService",
- "unsetService").setRequired(true));
+ c.add(createServiceDependency()
+ .setService(IController.class, "(name=Controller)")
+ .setCallbacks("setController", "unsetController")
+ .setRequired(true));
+ c.add(createServiceDependency()
+ .setService(IOFStatisticsManager.class)
+ .setCallbacks("setService", "unsetService")
+ .setRequired(true));
}
if (imp.equals(OFStatisticsManager.class)) {
c.setInterface(new String[] { IOFStatisticsManager.class.getName(),
IInventoryShimExternalListener.class.getName() }, null);
- c.add(createServiceDependency().setService(IController.class,
- "(name=Controller)").setCallbacks("setController",
- "unsetController").setRequired(true));
- c.add(createServiceDependency().setService(
- IStatisticsListener.class)
- .setCallbacks("setStatisticsListener",
- "unsetStatisticsListener").setRequired(false));
+ c.add(createServiceDependency()
+ .setService(IController.class, "(name=Controller)")
+ .setCallbacks("setController", "unsetController")
+ .setRequired(true));
+ c.add(createServiceDependency()
+ .setService(IStatisticsListener.class)
+ .setCallbacks("setStatisticsListener",
+ "unsetStatisticsListener").setRequired(false));
}
if (imp.equals(DiscoveryService.class)) {
// export the service
- c.setInterface(new String[] {
- IInventoryShimExternalListener.class.getName(),
- IDataPacketListen.class.getName(),
- IContainerListener.class.getName() }, null);
-
- c.add(createServiceDependency().setService(IController.class,
- "(name=Controller)").setCallbacks("setController",
- "unsetController").setRequired(true));
+ c.setInterface(
+ new String[] {
+ IInventoryShimExternalListener.class.getName(),
+ IDataPacketListen.class.getName(),
+ IContainerListener.class.getName() }, null);
+
+ c.add(createServiceDependency()
+ .setService(IController.class, "(name=Controller)")
+ .setCallbacks("setController", "unsetController")
+ .setRequired(true));
c.add(createContainerServiceDependency(
- GlobalConstants.DEFAULT.toString()).setService(
- IPluginInInventoryService.class).setCallbacks(
- "setPluginInInventoryService",
- "unsetPluginInInventoryService").setRequired(true));
+ GlobalConstants.DEFAULT.toString())
+ .setService(IPluginInInventoryService.class)
+ .setCallbacks("setPluginInInventoryService",
+ "unsetPluginInInventoryService").setRequired(true));
c.add(createServiceDependency().setService(IDataPacketMux.class)
.setCallbacks("setIDataPacketMux", "unsetIDataPacketMux")
.setRequired(true));
- c.add(createServiceDependency().setService(IDiscoveryService.class)
+ c.add(createServiceDependency()
+ .setService(IDiscoveryService.class)
.setCallbacks("setDiscoveryService",
"unsetDiscoveryService").setRequired(true));
}
IContainerListener.class.getName(),
IInventoryShimExternalListener.class.getName() }, null);
- c.add(createServiceDependency().setService(IController.class,
- "(name=Controller)").setCallbacks("setController",
- "unsetController").setRequired(true));
- c.add(createServiceDependency().setService(
- IPluginOutDataPacketService.class).setCallbacks(
- "setPluginOutDataPacketService",
- "unsetPluginOutDataPacketService").setRequired(false));
+ c.add(createServiceDependency()
+ .setService(IController.class, "(name=Controller)")
+ .setCallbacks("setController", "unsetController")
+ .setRequired(true));
+ c.add(createServiceDependency()
+ .setService(IPluginOutDataPacketService.class)
+ .setCallbacks("setPluginOutDataPacketService",
+ "unsetPluginOutDataPacketService")
+ .setRequired(false));
// See if there is any local packet dispatcher
- c.add(createServiceDependency().setService(IDataPacketListen.class)
+ c.add(createServiceDependency()
+ .setService(IDataPacketListen.class)
.setCallbacks("setIDataPacketListen",
"unsetIDataPacketListen").setRequired(false));
}
c.setInterface(new String[] { IContainerListener.class.getName() },
null);
- c.add(createServiceDependency().setService(IController.class,
- "(name=Controller)").setCallbacks("setController",
- "unsetController").setRequired(true));
- c.add(createServiceDependency().setService(
- IInventoryShimInternalListener.class).setCallbacks(
- "setInventoryShimInternalListener",
- "unsetInventoryShimInternalListener").setRequired(true));
- c.add(createServiceDependency().setService(
- IInventoryShimExternalListener.class).setCallbacks(
- "setInventoryShimExternalListener",
- "unsetInventoryShimExternalListener").setRequired(false));
+ c.add(createServiceDependency()
+ .setService(IController.class, "(name=Controller)")
+ .setCallbacks("setController", "unsetController")
+ .setRequired(true));
+ c.add(createServiceDependency()
+ .setService(IInventoryShimInternalListener.class)
+ .setCallbacks("setInventoryShimInternalListener",
+ "unsetInventoryShimInternalListener")
+ .setRequired(true));
+ c.add(createServiceDependency()
+ .setService(IInventoryShimExternalListener.class)
+ .setCallbacks("setInventoryShimExternalListener",
+ "unsetInventoryShimExternalListener")
+ .setRequired(false));
}
if (imp.equals(TopologyServiceShim.class)) {
c.setInterface(new String[] { IDiscoveryService.class.getName(),
IContainerListener.class.getName(),
IRefreshInternalProvider.class.getName() }, null);
- c.add(createServiceDependency().setService(
- ITopologyServiceShimListener.class).setCallbacks(
- "setTopologyServiceShimListener",
- "unsetTopologyServiceShimListener").setRequired(true));
- c.add(createServiceDependency().setService(
- IOFStatisticsManager.class).setCallbacks(
- "setStatisticsManager", "unsetStatisticsManager")
- .setRequired(false));
+ c.add(createServiceDependency()
+ .setService(ITopologyServiceShimListener.class)
+ .setCallbacks("setTopologyServiceShimListener",
+ "unsetTopologyServiceShimListener")
+ .setRequired(true));
+ c.add(createServiceDependency()
+ .setService(IOFStatisticsManager.class)
+ .setCallbacks("setStatisticsManager",
+ "unsetStatisticsManager").setRequired(false));
}
}
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
/**
* Utility class for converting a SAL Flow into the OF flow and vice-versa
- *
- *
- *
*/
public class FlowConverter {
private Flow flow; // SAL Flow
}
/**
- * Returns the match in OF 1.0 (OFMatch) form or OF 1.0 + IPv6 extensions form (V6Match)
- *
+ * Returns the match in OF 1.0 (OFMatch) form or OF 1.0 + IPv6 extensions
+ * form (V6Match)
+ *
* @return
*/
public OFMatch getOFMatch() {
}
if (match.isPresent(MatchType.NW_TOS)) {
/*
- * OF 1.0 switch expects the TOS as the 6 msb in the byte.
- * it is actually the DSCP field followed by a zero ECN
+ * OF 1.0 switch expects the TOS as the 6 msb in the byte. it is
+ * actually the DSCP field followed by a zero ECN
*/
byte tos = (Byte) match.getField(MatchType.NW_TOS).getValue();
- byte dscp = (byte)((int)tos << 2);
+ byte dscp = (byte) ((int) tos << 2);
if (!isIPv6) {
ofMatch.setNetworkTypeOfService(dscp);
wildcards &= ~OFMatch.OFPFW_NW_TOS;
/**
* Returns the list of actions in OF 1.0 form
+ *
* @return
*/
public List<OFAction> getOFActions() {
if (action.getType() == ActionType.CONTROLLER) {
OFActionOutput ofAction = new OFActionOutput();
ofAction.setPort(OFPort.OFPP_CONTROLLER.getValue());
- // We want the whole frame hitting the match be sent to the controller
+ // We want the whole frame hitting the match be sent to the
+ // controller
ofAction.setMaxLength((short) 0xffff);
actionsList.add(ofAction);
actionsLength += OFActionOutput.MINIMUM_LENGTH;
continue;
}
if (action.getType() == ActionType.SET_NEXT_HOP) {
- //TODO
+ // TODO
continue;
}
}
}
/**
- * Utility to convert a SAL flow to an OF 1.0 (OFFlowMod) or
- * to an OF 1.0 + IPv6 extension (V6FlowMod) Flow modifier Message
- *
+ * Utility to convert a SAL flow to an OF 1.0 (OFFlowMod) or to an OF 1.0 +
+ * IPv6 extension (V6FlowMod) Flow modifier Message
+ *
* @param sw
* @param command
* @param port
if (port != null) {
((OFFlowMod) fm).setOutPort(port);
}
+ if (command == OFFlowMod.OFPFC_ADD
+ || command == OFFlowMod.OFPFC_MODIFY
+ || command == OFFlowMod.OFPFC_MODIFY_STRICT) {
+ if (flow.getIdleTimeout() != 0 || flow.getHardTimeout() != 0) {
+ // Instruct switch to let controller know when flow expires
+ ((OFFlowMod) fm).setFlags((short) 1);
+ }
+ }
} else {
((V6FlowMod) fm).setVendor();
((V6FlowMod) fm).setMatch((V6Match) ofMatch);
if (port != null) {
((V6FlowMod) fm).setOutPort(port);
}
+ if (command == OFFlowMod.OFPFC_ADD
+ || command == OFFlowMod.OFPFC_MODIFY
+ || command == OFFlowMod.OFPFC_MODIFY_STRICT) {
+ if (flow.getIdleTimeout() != 0 || flow.getHardTimeout() != 0) {
+ // Instruct switch to let controller know when flow expires
+ ((V6FlowMod) fm).setFlags((short) 1);
+ }
+ }
}
return fm;
}
Match salMatch = new Match();
/*
- * Installed flow may not have a Match defined
- * like in case of a drop all flow
+ * Installed flow may not have a Match defined like in case of a
+ * drop all flow
*/
if (ofMatch != null) {
if (!isIPv6) {
if (ofMatch.getNetworkSource() != 0) {
salMatch.setField(MatchType.NW_SRC, NetUtils
.getInetAddress(ofMatch.getNetworkSource()),
- NetUtils.getInetNetworkMask(ofMatch
- .getNetworkSourceMaskLen(), false));
+ NetUtils.getInetNetworkMask(
+ ofMatch.getNetworkSourceMaskLen(),
+ false));
}
if (ofMatch.getNetworkDestination() != 0) {
- salMatch
- .setField(
- MatchType.NW_DST,
- NetUtils.getInetAddress(ofMatch
- .getNetworkDestination()),
- NetUtils
- .getInetNetworkMask(
- ofMatch
- .getNetworkDestinationMaskLen(),
- false));
+ salMatch.setField(MatchType.NW_DST,
+ NetUtils.getInetAddress(ofMatch
+ .getNetworkDestination()),
+ NetUtils.getInetNetworkMask(
+ ofMatch.getNetworkDestinationMaskLen(),
+ false));
}
if (ofMatch.getNetworkTypeOfService() != 0) {
- int dscp = NetUtils.getUnsignedByte(
- ofMatch.getNetworkTypeOfService());
- byte tos = (byte)(dscp >> 2);
+ int dscp = NetUtils.getUnsignedByte(ofMatch
+ .getNetworkTypeOfService());
+ byte tos = (byte) (dscp >> 2);
salMatch.setField(MatchType.NW_TOS, tos);
}
if (ofMatch.getNetworkProtocol() != 0) {
- salMatch.setField(MatchType.NW_PROTO, ofMatch
- .getNetworkProtocol());
+ salMatch.setField(MatchType.NW_PROTO,
+ ofMatch.getNetworkProtocol());
}
if (ofMatch.getTransportSource() != 0) {
- salMatch.setField(MatchType.TP_SRC, ((Short) ofMatch
- .getTransportSource()));
+ salMatch.setField(MatchType.TP_SRC,
+ ((Short) ofMatch.getTransportSource()));
}
if (ofMatch.getTransportDestination() != 0) {
- salMatch.setField(MatchType.TP_DST, ((Short) ofMatch
- .getTransportDestination()));
+ salMatch.setField(MatchType.TP_DST,
+ ((Short) ofMatch.getTransportDestination()));
}
} else {
// Compute OF1.0 + IPv6 extensions Match
.getDataLayerVirtualLanPriorityCodePoint());
}
if (v6Match.getNetworkSrc() != null) {
- salMatch.setField(MatchType.NW_SRC, v6Match
- .getNetworkSrc(), v6Match
- .getNetworkSourceMask());
+ salMatch.setField(MatchType.NW_SRC,
+ v6Match.getNetworkSrc(),
+ v6Match.getNetworkSourceMask());
}
if (v6Match.getNetworkDest() != null) {
- salMatch.setField(MatchType.NW_DST, v6Match
- .getNetworkDest(), v6Match
- .getNetworkDestinationMask());
+ salMatch.setField(MatchType.NW_DST,
+ v6Match.getNetworkDest(),
+ v6Match.getNetworkDestinationMask());
}
if (v6Match.getNetworkTypeOfService() != 0) {
- int dscp = NetUtils.getUnsignedByte(
- v6Match.getNetworkTypeOfService());
- byte tos = (byte) (dscp >> 2);
+ int dscp = NetUtils.getUnsignedByte(v6Match
+ .getNetworkTypeOfService());
+ byte tos = (byte) (dscp >> 2);
salMatch.setField(MatchType.NW_TOS, tos);
}
if (v6Match.getNetworkProtocol() != 0) {
- salMatch.setField(MatchType.NW_PROTO, v6Match
- .getNetworkProtocol());
+ salMatch.setField(MatchType.NW_PROTO,
+ v6Match.getNetworkProtocol());
}
if (v6Match.getTransportSource() != 0) {
- salMatch.setField(MatchType.TP_SRC, ((Short) v6Match
- .getTransportSource()));
+ salMatch.setField(MatchType.TP_SRC,
+ ((Short) v6Match.getTransportSource()));
}
if (v6Match.getTransportDestination() != 0) {
- salMatch.setField(MatchType.TP_DST, ((Short) v6Match
- .getTransportDestination()));
+ salMatch.setField(MatchType.TP_DST,
+ ((Short) v6Match.getTransportDestination()));
}
}
}
} else if (ofPort == OFPort.OFPP_NORMAL.getValue()) {
salAction = new HwPath();
} else if (ofPort == OFPort.OFPP_TABLE.getValue()) {
- salAction = new HwPath(); //TODO: we do not handle table in sal for now
+ salAction = new HwPath(); // TODO: we do not handle
+ // table in sal for now
} else {
- salAction = new Output(NodeConnectorCreator
- .createOFNodeConnector(ofPort, node));
+ salAction = new Output(
+ NodeConnectorCreator.createOFNodeConnector(
+ ofPort, node));
}
} else if (ofAction instanceof OFActionVirtualLanIdentifier) {
salAction = new SetVlanId(
--- /dev/null
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.protocol_plugin.openflow.internal;
+
+import org.apache.felix.dm.Component;
+import org.opendaylight.controller.protocol_plugin.openflow.IFlowProgrammerNotifier;
+import org.opendaylight.controller.sal.core.Node;
+import org.opendaylight.controller.sal.flowprogrammer.Flow;
+import org.opendaylight.controller.sal.flowprogrammer.IPluginOutFlowProgrammerService;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Flow Programmer Notifier class for relaying asynchronous messages received
+ * from the network node to the listeners on the proper container
+ */
+public class FlowProgrammerNotifier implements IFlowProgrammerNotifier {
+ protected static final Logger logger = LoggerFactory
+ .getLogger(FlowProgrammerNotifier.class);
+ private IPluginOutFlowProgrammerService salNotifier;
+
+ public FlowProgrammerNotifier() {
+ salNotifier = null;
+ }
+
+ void init(Component c) {
+ logger.debug("INIT called!");
+ }
+
+ /**
+ * Function called by the dependency manager when at least one dependency
+ * become unsatisfied or when the component is shutting down because for
+ * example bundle is being stopped.
+ *
+ */
+ void destroy() {
+ logger.debug("DESTROY called!");
+ }
+
+ /**
+ * Function called by dependency manager after "init ()" is called and after
+ * the services provided by the class are registered in the service registry
+ *
+ */
+ void start() {
+ logger.debug("START called!");
+ }
+
+ /**
+ * Function called by the dependency manager before the services exported by
+ * the component are unregistered, this will be followed by a "destroy ()"
+ * calls
+ *
+ */
+ void stop() {
+ logger.debug("STOP called!");
+ }
+
+ public void setPluginOutFlowProgrammerService(
+ IPluginOutFlowProgrammerService s) {
+ this.salNotifier = s;
+ }
+
+ public void unsetPluginOutFlowProgrammerService(
+ IPluginOutFlowProgrammerService s) {
+ if (this.salNotifier == s) {
+ this.salNotifier = null;
+ }
+ }
+
+ @Override
+ public void flowRemoved(Node node, Flow flow) {
+ if (salNotifier != null) {
+ salNotifier.flowRemoved(node, flow);
+ } else {
+ logger.warn("Unable to relay switch message to upper layer");
+ }
+ }
+
+}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
package org.opendaylight.controller.protocol_plugin.openflow.internal;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+import org.opendaylight.controller.protocol_plugin.openflow.IFlowProgrammerNotifier;
import org.opendaylight.controller.protocol_plugin.openflow.core.IController;
+import org.opendaylight.controller.protocol_plugin.openflow.core.IMessageListener;
import org.opendaylight.controller.protocol_plugin.openflow.core.ISwitch;
+import org.opendaylight.controller.protocol_plugin.openflow.vendorextension.v6extension.V6Error;
import org.openflow.protocol.OFError;
import org.openflow.protocol.OFFlowMod;
+import org.openflow.protocol.OFFlowRemoved;
import org.openflow.protocol.OFMessage;
import org.openflow.protocol.OFPort;
+import org.openflow.protocol.OFType;
+import org.openflow.protocol.action.OFAction;
+import org.opendaylight.controller.sal.core.ContainerFlow;
+import org.opendaylight.controller.sal.core.IContainerListener;
import org.opendaylight.controller.sal.core.Node;
import org.opendaylight.controller.sal.core.Node.NodeIDType;
+import org.opendaylight.controller.sal.core.NodeConnector;
+import org.opendaylight.controller.sal.core.UpdateType;
import org.opendaylight.controller.sal.flowprogrammer.Flow;
import org.opendaylight.controller.sal.flowprogrammer.IPluginInFlowProgrammerService;
+import org.opendaylight.controller.sal.match.Match;
+import org.opendaylight.controller.sal.match.MatchType;
+import org.opendaylight.controller.sal.utils.GlobalConstants;
+import org.opendaylight.controller.sal.utils.NodeCreator;
import org.opendaylight.controller.sal.utils.StatusCode;
import org.opendaylight.controller.sal.utils.Status;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Represents the openflow plugin component in charge of programming the flows
- * on the switch. It servers the install requests coming from the SAL layer.
- *
- *
- *
+ * the flow programming and relay them to functional modules above SAL.
*/
-public class FlowProgrammerService implements IPluginInFlowProgrammerService {
+public class FlowProgrammerService implements IPluginInFlowProgrammerService,
+ IMessageListener, IContainerListener {
+ private static final Logger log = LoggerFactory
+ .getLogger(FlowProgrammerService.class);
private IController controller;
+ private ConcurrentMap<String, IFlowProgrammerNotifier> flowProgrammerNotifiers;
+ private Map<String, Set<NodeConnector>> containerToNc;
public FlowProgrammerService() {
controller = null;
+ flowProgrammerNotifiers = new ConcurrentHashMap<String, IFlowProgrammerNotifier>();
}
public void setController(IController core) {
}
}
+ public void setFlowProgrammerNotifier(Map<String, ?> props,
+ IFlowProgrammerNotifier s) {
+ if (props == null || props.get("containerName") == null) {
+ log.error("Didn't receive the service correct properties");
+ return;
+ }
+ String containerName = (String) props.get("containerName");
+ this.flowProgrammerNotifiers.put(containerName, s);
+ }
+
+ public void unsetFlowProgrammerNotifier(Map<String, ?> props,
+ IFlowProgrammerNotifier s) {
+ if (props == null || props.get("containerName") == null) {
+ log.error("Didn't receive the service correct properties");
+ return;
+ }
+ String containerName = (String) props.get("containerName");
+ if (this.flowProgrammerNotifiers != null
+ && this.flowProgrammerNotifiers.containsKey(containerName)
+ && this.flowProgrammerNotifiers.get(containerName) == s) {
+ this.flowProgrammerNotifiers.remove(containerName);
+ }
+ }
+
/**
* Function called by the dependency manager when all the required
* dependencies are satisfied
- *
+ *
*/
void init() {
+ this.controller.addMessageListener(OFType.FLOW_REMOVED, this);
}
/**
- * Function called by the dependency manager when at least one
- * dependency become unsatisfied or when the component is shutting
- * down because for example bundle is being stopped.
- *
+ * Function called by the dependency manager when at least one dependency
+ * become unsatisfied or when the component is shutting down because for
+ * example bundle is being stopped.
+ *
*/
void destroy() {
}
/**
- * Function called by dependency manager after "init ()" is called
- * and after the services provided by the class are registered in
- * the service registry
- *
+ * Function called by dependency manager after "init ()" is called and after
+ * the services provided by the class are registered in the service registry
+ *
*/
void start() {
}
/**
- * Function called by the dependency manager before the services
- * exported by the component are unregistered, this will be
- * followed by a "destroy ()" calls
- *
+ * Function called by the dependency manager before the services exported by
+ * the component are unregistered, this will be followed by a "destroy ()"
+ * calls
+ *
*/
void stop() {
}
public Status addFlow(Node node, Flow flow) {
String action = "add";
if (!node.getType().equals(NodeIDType.OPENFLOW)) {
- return new Status(StatusCode.NOTACCEPTABLE,
- errorString("send", action, "Invalid node type"));
+ return new Status(StatusCode.NOTACCEPTABLE, errorString("send",
+ action, "Invalid node type"));
}
if (controller != null) {
*/
Object result = sw.syncSend(msg);
if (result instanceof Boolean) {
- return ((Boolean) result == Boolean.TRUE) ?
- new Status(StatusCode.SUCCESS, null)
- : new Status(StatusCode.TIMEOUT,
- errorString(null, action,
- "Request Timed Out"));
+ return ((Boolean) result == Boolean.TRUE) ? new Status(
+ StatusCode.SUCCESS, null) : new Status(
+ StatusCode.TIMEOUT, errorString(null, action,
+ "Request Timed Out"));
} else if (result instanceof OFError) {
- return new Status(StatusCode.INTERNALERROR,
- errorString("program", action, Utils
- .getOFErrorString((OFError) result)));
+ OFError res = (OFError) result;
+ if (res.getErrorType() == V6Error.NICIRA_VENDOR_ERRORTYPE) {
+ V6Error er = new V6Error(res);
+ byte[] b = res.getError();
+ ByteBuffer bb = ByteBuffer.allocate(b.length);
+ bb.put(b);
+ bb.rewind();
+ er.readFrom(bb);
+ return new Status(StatusCode.INTERNALERROR,
+ errorString("program", action,
+ "Vendor Extension Internal Error"));
+ }
+ return new Status(StatusCode.INTERNALERROR, errorString(
+ "program", action, Utils.getOFErrorString(res)));
} else {
- return new Status(StatusCode.INTERNALERROR,
- errorString("send", action, "Internal Error"));
+ return new Status(StatusCode.INTERNALERROR, errorString(
+ "send", action, "Internal Error"));
}
} else {
return new Status(StatusCode.GONE, errorString("send", action,
- "Switch is not available"));
+ "Switch is not available"));
}
}
- return new Status(StatusCode.INTERNALERROR,
- errorString("send", action, "Internal plugin error"));
+ return new Status(StatusCode.INTERNALERROR, errorString("send", action,
+ "Internal plugin error"));
}
@Override
public Status modifyFlow(Node node, Flow oldFlow, Flow newFlow) {
String action = "modify";
if (!node.getType().equals(NodeIDType.OPENFLOW)) {
- return new Status(StatusCode.NOTACCEPTABLE,
- errorString("send", action, "Invalid node type"));
+ return new Status(StatusCode.NOTACCEPTABLE, errorString("send",
+ action, "Invalid node type"));
}
if (controller != null) {
ISwitch sw = controller.getSwitch((Long) node.getID());
if (sw != null) {
OFMessage msg1 = null, msg2 = null;
- // If priority and match portion are the same, send a modification message
+ // If priority and match portion are the same, send a
+ // modification message
if (oldFlow.getPriority() != newFlow.getPriority()
|| !oldFlow.getMatch().equals(newFlow.getMatch())) {
msg1 = new FlowConverter(oldFlow).getOFFlowMod(
Object result = sw.syncSend(msg1);
if (result instanceof Boolean) {
if ((Boolean) result == Boolean.FALSE) {
- return new Status(StatusCode.TIMEOUT,
- errorString(null, action,
- "Request Timed Out"));
+ return new Status(StatusCode.TIMEOUT, errorString(null,
+ action, "Request Timed Out"));
} else if (msg2 == null) {
return new Status(StatusCode.SUCCESS, null);
}
} else if (result instanceof OFError) {
- return new Status(StatusCode.INTERNALERROR,
- errorString("program", action, Utils
- .getOFErrorString((OFError) result)));
+ return new Status(StatusCode.INTERNALERROR, errorString(
+ "program", action,
+ Utils.getOFErrorString((OFError) result)));
} else {
- return new Status(StatusCode.INTERNALERROR,
- errorString("send", action, "Internal Error"));
+ return new Status(StatusCode.INTERNALERROR, errorString(
+ "send", action, "Internal Error"));
}
if (msg2 != null) {
action = "add";
result = sw.syncSend(msg2);
if (result instanceof Boolean) {
- return ((Boolean) result == Boolean.TRUE) ?
- new Status(StatusCode.SUCCESS, null)
- : new Status(StatusCode.TIMEOUT,
- errorString(null, action,
- "Request Timed Out"));
+ return ((Boolean) result == Boolean.TRUE) ? new Status(
+ StatusCode.SUCCESS, null) : new Status(
+ StatusCode.TIMEOUT, errorString(null, action,
+ "Request Timed Out"));
} else if (result instanceof OFError) {
return new Status(StatusCode.INTERNALERROR,
errorString("program", action, Utils
- .getOFErrorString((OFError) result)));
+ .getOFErrorString((OFError) result)));
} else {
return new Status(StatusCode.INTERNALERROR,
errorString("send", action, "Internal Error"));
"Switch is not available"));
}
}
- return new Status(StatusCode.INTERNALERROR,
- errorString("send", action, "Internal plugin error"));
+ return new Status(StatusCode.INTERNALERROR, errorString("send", action,
+ "Internal plugin error"));
}
@Override
public Status removeFlow(Node node, Flow flow) {
String action = "remove";
if (!node.getType().equals(NodeIDType.OPENFLOW)) {
- return new Status(StatusCode.NOTACCEPTABLE,
- errorString("send", action, "Invalid node type"));
+ return new Status(StatusCode.NOTACCEPTABLE, errorString("send",
+ action, "Invalid node type"));
}
if (controller != null) {
ISwitch sw = controller.getSwitch((Long) node.getID());
OFFlowMod.OFPFC_DELETE_STRICT, OFPort.OFPP_NONE);
Object result = sw.syncSend(msg);
if (result instanceof Boolean) {
- return ((Boolean) result == Boolean.TRUE) ?
- new Status(StatusCode.SUCCESS, null)
- : new Status(StatusCode.TIMEOUT,
- errorString(null, action,
- "Request Timed Out"));
+ return ((Boolean) result == Boolean.TRUE) ? new Status(
+ StatusCode.SUCCESS, null) : new Status(
+ StatusCode.TIMEOUT, errorString(null, action,
+ "Request Timed Out"));
} else if (result instanceof OFError) {
- return new Status(StatusCode.INTERNALERROR,
- errorString("program", action, Utils
- .getOFErrorString((OFError) result)));
+ return new Status(StatusCode.INTERNALERROR, errorString(
+ "program", action,
+ Utils.getOFErrorString((OFError) result)));
} else {
- return new Status(StatusCode.INTERNALERROR,
- errorString("send", action, "Internal Error"));
+ return new Status(StatusCode.INTERNALERROR, errorString(
+ "send", action, "Internal Error"));
}
} else {
- return new Status(StatusCode.GONE, errorString("send", action,
+ return new Status(StatusCode.GONE, errorString("send", action,
"Switch is not available"));
}
}
- return new Status(StatusCode.INTERNALERROR,
- errorString("send", action, "Internal plugin error"));
+ return new Status(StatusCode.INTERNALERROR, errorString("send", action,
+ "Internal plugin error"));
}
@Override
+ " flow message: " : action + " the flow: ") + cause;
}
+ @Override
+ public void receive(ISwitch sw, OFMessage msg) {
+ if (msg instanceof OFFlowRemoved) {
+ handleFlowRemovedMessage(sw, (OFFlowRemoved) msg);
+ }
+ }
+
+ private void handleFlowRemovedMessage(ISwitch sw, OFFlowRemoved msg) {
+ Node node = NodeCreator.createOFNode(sw.getId());
+ Flow flow = new FlowConverter(msg.getMatch(),
+ new ArrayList<OFAction>(0)).getFlow(node);
+ flow.setPriority(msg.getPriority());
+ flow.setIdleTimeout(msg.getIdleTimeout());
+ flow.setId(msg.getCookie());
+
+ Match match = flow.getMatch();
+ NodeConnector inPort = match.isPresent(MatchType.IN_PORT) ? (NodeConnector) match
+ .getField(MatchType.IN_PORT).getValue() : null;
+
+ for (Map.Entry<String, IFlowProgrammerNotifier> containerNotifier : flowProgrammerNotifiers
+ .entrySet()) {
+ String container = containerNotifier.getKey();
+ IFlowProgrammerNotifier notifier = containerNotifier.getValue();
+ /*
+ * Switch only provide us with the match information. For now let's
+ * try to identify the container membership only from the input port
+ * match field. In any case, upper layer consumers can derive
+ * whether the notification was not for them. More sophisticated
+ * filtering can be added later on.
+ */
+ if (inPort == null
+ || container.equals(GlobalConstants.DEFAULT.toString())
+ || this.containerToNc.get(container).contains(inPort)) {
+ notifier.flowRemoved(node, flow);
+ }
+ }
+ }
+
+ @Override
+ public void tagUpdated(String containerName, Node n, short oldTag,
+ short newTag, UpdateType t) {
+
+ }
+
+ @Override
+ public void containerFlowUpdated(String containerName,
+ ContainerFlow previousFlow, ContainerFlow currentFlow, UpdateType t) {
+ }
+
+ @Override
+ public void nodeConnectorUpdated(String containerName, NodeConnector p,
+ UpdateType type) {
+ Set<NodeConnector> target = null;
+
+ switch (type) {
+ case ADDED:
+ if (!containerToNc.containsKey(containerName)) {
+ containerToNc.put(containerName, new HashSet<NodeConnector>());
+ }
+ containerToNc.get(containerName).add(p);
+ break;
+ case CHANGED:
+ break;
+ case REMOVED:
+ target = containerToNc.get(containerName);
+ if (target != null) {
+ target.remove(p);
+ }
+ break;
+ default:
+ }
+
+ }
+
+ @Override
+ public void containerModeUpdated(UpdateType t) {
+
+ }
}
.getLast());
long timePeriod = (long) (factoredSamples * portStatsPeriod)
/ (long) tickPeriod;
- average = (8 * increment) / timePeriod;
+ average = (8L * increment) / timePeriod;
return average;
}
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import org.opendaylight.controller.sal.utils.GlobalConstants;
/**
- * The class describes a shim layer that relays the topology events from OpenFlow
- * core to various listeners. The notifications are filtered based on container
- * configurations.
+ * The class describes a shim layer that relays the topology events from
+ * OpenFlow core to various listeners. The notifications are filtered based on
+ * container configurations.
*/
public class TopologyServiceShim implements IDiscoveryService,
IContainerListener, CommandProvider, IRefreshInternalProvider {
.getLogger(TopologyServiceShim.class);
private ConcurrentMap<String, ITopologyServiceShimListener> topologyServiceShimListeners = new ConcurrentHashMap<String, ITopologyServiceShimListener>();
private ConcurrentMap<NodeConnector, List<String>> containerMap = new ConcurrentHashMap<NodeConnector, List<String>>();
- private ConcurrentMap<String, Map<NodeConnector, Pair<Edge, Set<Property>>>> edgeMap = new ConcurrentHashMap<String, Map<NodeConnector, Pair<Edge, Set<Property>>>>();
+ private ConcurrentMap<String, ConcurrentMap<NodeConnector, Pair<Edge, Set<Property>>>> edgeMap = new ConcurrentHashMap<String, ConcurrentMap<NodeConnector, Pair<Edge, Set<Property>>>>();
private BlockingQueue<NotifyEntry> notifyQ;
private Thread notifyThread;
private Thread bwUtilNotifyThread;
private BlockingQueue<UtilizationUpdate> bwUtilNotifyQ;
private List<NodeConnector> connectorsOverUtilized;
- private float bwThresholdFactor = (float) 0.8; // Threshold = 80% of link bandwidth
+ private float bwThresholdFactor = (float) 0.8; // Threshold = 80% of link
+ // bandwidth
class NotifyEntry {
String container;
ITopologyServiceShimListener topologServiceShimListener = topologyServiceShimListeners
.get(entry.container);
- topologServiceShimListener.edgeUpdate(entry.edgeProps
- .getLeft(), entry.type, entry.edgeProps.getRight());
+ topologServiceShimListener.edgeUpdate(
+ entry.edgeProps.getLeft(), entry.type,
+ entry.edgeProps.getRight());
entry = null;
} catch (InterruptedException e1) {
try {
UtilizationUpdate update = notifyQ.take();
NodeConnector connector = update.connector;
- Set<String> containerList = edgeMap.keySet();//.get(connector);
+ Set<String> containerList = edgeMap.keySet();
for (String container : containerList) {
Map<NodeConnector, Pair<Edge, Set<Property>>> edgePropsMap = edgeMap
.get(container);
}
}
} catch (InterruptedException e1) {
- logger
- .warn(
- "Edge Bandwidth Utilization Notify Thread interrupted",
- e1.getMessage());
+ logger.warn(
+ "Edge Bandwidth Utilization Notify Thread interrupted",
+ e1.getMessage());
if (shuttingDown) {
return;
}
/**
* Function called by the dependency manager when all the required
* dependencies are satisfied
- *
+ *
*/
void init() {
logger.trace("Init called");
}
/**
- * Continuously polls the transmit bit rate for all the node connectors
- * from statistics manager and trigger the warning notification upward
- * when the transmit rate is above a threshold which is a percentage of
- * the edge bandwidth
+ * Continuously polls the transmit bit rate for all the node connectors from
+ * statistics manager and trigger the warning notification upward when the
+ * transmit rate is above a threshold which is a percentage of the edge
+ * bandwidth
*/
protected void pollTxBitRates() {
Map<NodeConnector, Pair<Edge, Set<Property>>> globalContainerEdges = edgeMap
for (NodeConnector connector : globalContainerEdges.keySet()) {
// Skip if node connector belongs to production switch
- if (connector.getType().equals(NodeConnector.NodeConnectorIDType.PRODUCTION)) {
+ if (connector.getType().equals(
+ NodeConnector.NodeConnectorIDType.PRODUCTION)) {
continue;
}
}
/**
- * Function called by the dependency manager when at least one
- * dependency become unsatisfied or when the component is shutting
- * down because for example bundle is being stopped.
- *
+ * Function called by the dependency manager when at least one dependency
+ * become unsatisfied or when the component is shutting down because for
+ * example bundle is being stopped.
+ *
*/
void destroy() {
logger.trace("DESTROY called!");
}
/**
- * Function called by dependency manager after "init ()" is called
- * and after the services provided by the class are registered in
- * the service registry
- *
+ * Function called by dependency manager after "init ()" is called and after
+ * the services provided by the class are registered in the service registry
+ *
*/
void start() {
logger.trace("START called!");
}
/**
- * Function called by the dependency manager before the services
- * exported by the component are unregistered, this will be
- * followed by a "destroy ()" calls
- *
+ * Function called by the dependency manager before the services exported by
+ * the component are unregistered, this will be followed by a "destroy ()"
+ * calls
+ *
*/
void stop() {
logger.trace("STOP called!");
}
if ((this.topologyServiceShimListeners != null)
&& !this.topologyServiceShimListeners
- .containsKey(containerName)) {
+ .containsKey(containerName)) {
this.topologyServiceShimListeners.put(containerName, s);
logger.trace("Added topologyServiceShimListener for container:"
+ containerName);
return;
}
if ((this.topologyServiceShimListeners != null)
- && this.topologyServiceShimListeners
- .containsKey(containerName)
- && this.topologyServiceShimListeners
- .get(containerName).equals(s)
- ) {
+ && this.topologyServiceShimListeners.containsKey(containerName)
+ && this.topologyServiceShimListeners.get(containerName).equals(
+ s)) {
this.topologyServiceShimListeners.remove(containerName);
logger.trace("Removed topologyServiceShimListener for container: "
+ containerName);
private void notifyEdge(String container, Edge edge, UpdateType type,
Set<Property> props) {
- Map<NodeConnector, Pair<Edge, Set<Property>>> edgePropsMap = edgeMap
+ ConcurrentMap<NodeConnector, Pair<Edge, Set<Property>>> edgePropsMap = edgeMap
.get(container);
NodeConnector src = edge.getTailNodeConnector();
Pair<Edge, Set<Property>> edgeProps = new ImmutablePair<Edge, Set<Property>>(
case ADDED:
case CHANGED:
if (edgePropsMap == null) {
- edgePropsMap = new HashMap<NodeConnector, Pair<Edge, Set<Property>>>();
+ edgePropsMap = new ConcurrentHashMap<NodeConnector, Pair<Edge, Set<Property>>>();
} else {
if (edgePropsMap.containsKey(src)
&& edgePropsMap.get(src).equals(edgeProps)) {
NodeConnector src = edge.getTailNodeConnector(), dst = edge
.getHeadNodeConnector();
- if (!src.getType().equals(
- NodeConnector.NodeConnectorIDType.PRODUCTION)) {
+ if (!src.getType().equals(NodeConnector.NodeConnectorIDType.PRODUCTION)) {
/* Find the common containers for both ends */
List<String> srcContainers = this.containerMap.get(src), dstContainers = this.containerMap
.get(dst), cmnContainers = null;
public String getHelp() {
StringBuffer help = new StringBuffer();
help.append("---Topology Service Shim---\n");
- help
- .append("\t pem [container] - Print edgeMap entries for a given container\n");
+ help.append("\t pem [container] - Print edgeMap entries");
+ help.append(" for a given container\n");
return help.toString();
}
}
/**
- * This method will trigger topology updates to be sent
- * toward SAL. SAL then pushes the updates to ALL the applications
- * that have registered as listeners for this service. SAL has no
- * way of knowing which application requested for the refresh.
- *
- * As an example of this case, is stopping and starting the
- * Topology Manager. When the topology Manager is stopped,
- * and restarted, it will no longer have the latest topology.
- * Hence, a request is sent here.
- *
+ * This method will trigger topology updates to be sent toward SAL. SAL then
+ * pushes the updates to ALL the applications that have registered as
+ * listeners for this service. SAL has no way of knowing which application
+ * requested for the refresh.
+ *
+ * As an example of this case, is stopping and starting the Topology
+ * Manager. When the topology Manager is stopped, and restarted, it will no
+ * longer have the latest topology. Hence, a request is sent here.
+ *
* @param containerName
* @return void
*/
}
/**
- * Reading the current topology database, the method will replay
- * all the edge updates for the ITopologyServiceShimListener instance
- * in the given container, which will in turn publish them toward SAL.
+ * Reading the current topology database, the method will replay all the
+ * edge updates for the ITopologyServiceShimListener instance in the given
+ * container, which will in turn publish them toward SAL.
+ *
* @param containerName
*/
private void TopologyBulkUpdate(String containerName) {
--- /dev/null
+package org.opendaylight.controller.protocol_plugin.openflow.vendorextension.v6extension;
+
+import java.nio.ByteBuffer;
+
+import org.apache.commons.lang3.builder.EqualsBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
+import org.openflow.protocol.OFError;
+
+public class V6Error extends OFError {
+ private static final long serialVersionUID = 1L;
+ public static int MINIMUM_LENGTH = 20;//OfHdr(8) + NXET_VENDOR(2) + NXEC_VENDOR_ERROR(2) + struct nx_vendor_error(8)
+ public static final short NICIRA_VENDOR_ERRORTYPE = (short)0xb0c2;
+ protected int V6VendorId;
+ protected short V6VendorErrorType;
+ protected short V6VendorErrorCode;
+ protected byte[] V6ErrorData;
+
+ public V6Error(OFError e) {
+ this.length = (short)e.getLengthU();
+ this.errorType = e.getErrorType();
+ this.errorCode = e.getErrorCode();
+ this.xid = e.getXid();
+ }
+
+ @Override
+ public void readFrom(ByteBuffer data) {
+ this.V6VendorId = data.getInt();
+ this.V6VendorErrorType = data.getShort();
+ this.V6VendorErrorCode = data.getShort();
+ int dataLength = this.getLengthU() - MINIMUM_LENGTH;
+ if (dataLength > 0) {
+ this.V6ErrorData = new byte[dataLength];
+ data.get(this.V6ErrorData);
+ }
+ }
+
+ /**
+ * @return the V6VendorId
+ */
+ public int getVendorId() {
+ return V6VendorId;
+ }
+
+ /**
+ * @return the V6VendorErrorType
+ */
+ public short getVendorErrorType() {
+ return V6VendorErrorType;
+ }
+
+ /**
+ * @return the VendorErrorType
+ */
+ public short getVendorErrorCode() {
+ return V6VendorErrorCode;
+ }
+
+ /**
+ * @return the Error Bytes
+ */
+ public byte[] getError() {
+ return V6ErrorData;
+ }
+
+ @Override
+ public int hashCode() {
+ return HashCodeBuilder.reflectionHashCode(this);
+ }
+
+ @Override
+ public String toString() {
+ return "V6Error[" + ReflectionToStringBuilder.toString(this) + "]";
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ return EqualsBuilder.reflectionEquals(this, obj);
+ }
+}
import java.util.LinkedList;
import java.util.List;
+import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
import org.openflow.protocol.OFPacketOut;
import org.openflow.protocol.OFPort;
import org.openflow.protocol.OFVendor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
-
/**
* This class is used to create IPv6 Vendor Extension messages. Specfically, It
* defines the methods used in creation of Vendor specific IPv6 Flow Mod message.
super.setVendor(V6StatsRequest.NICIRA_VENDOR_ID);
}
+ /**
+ * Get flags
+ * @return
+ */
+ public short getFlags() {
+ return flags;
+ }
+
+ /**
+ * Set flags
+ * @param flags
+ */
+ public void setFlags(short flags) {
+ this.flags = flags;
+ }
+
/**
* This method forms the Vendor extension IPv6 Flow Mod message.It uses the
* fields in V6FlowMod class, and writes the data according to vendor
data.putShort(this.priority);
data.putInt(OFPacketOut.BUFFER_ID_NONE);
data.putShort(outPort); /* output_port */
- data.putShort((short) 0); /* flags */
+ data.putShort(flags); /* flags */
match_len = this.match.getIPv6MatchLen();
data.putShort(match_len);
byte[] pad = new byte[6];
* Interface for applications which maintain an authorization
* database for their resources. Respective application web bundle
* and User Manager make use of this interface to retrieve
- * authorization information at user or and role level
+ * authorization information at user or and role level.
*/
public interface IResourceAuthorization {
public List<String> getResourceGroups();
/**
- * Assign a resource group to a user group (role)
+ * Assign a resource group to a role
*
- * @param groupName the object expressing the resource group name and the access privilege
- * @param role the user group (role) name
+ * @param groupName the name of the resource group
+ * @param privilege the access privilege role will have on the resource group
+ * @param role the role name
* @return the status of the request
*/
+ @Deprecated
public Status assignResourceGroupToRole(String groupName,
Privilege privilege, String role);
+ /**
+ * Assign a resource group to a role. The access privilege on the resources
+ * is inferred by the AppRoleLevel associated to role.
+ *
+ * @param groupName the name of the resource group
+ * @param role the role name
+ * @return the status of the request
+ */
+ public Status assignResourceGroupToRole(String groupName, String role);
+
/**
* Unassign the passed resource group from the specified role
*
--- /dev/null
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.sal.flowprogrammer;
+
+/**
+ * This interface defines the methods the SAL service which relay to the
+ * functional modules the asynchronous messages related to flow programming
+ * coming from the network nodes.
+ */
+public interface IFlowProgrammerListener extends
+ IPluginOutFlowProgrammerService {
+
+}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import org.opendaylight.controller.sal.utils.Status;
/**
- * Interface for installing/removing flows on a network node
- *
- *
- *
+ * Interface that defines the methods available to the functional modules above
+ * SAL for installing/modifying/removing flows on a network node
*/
public interface IFlowProgrammerService {
/**
* Add a flow to the network node
- *
+ *
* @param node
* @param flow
*/
/**
* Modify existing flow on the switch
- *
+ *
* @param node
* @param flow
*/
/**
* Remove the flow from the network node
+ *
* @param node
* @param flow
*/
/**
* Remove all flows present on the network node
+ *
* @param node
*/
Status removeAllFlows(Node node);
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import org.opendaylight.controller.sal.utils.Status;
/**
- * @file IPluginOutFlowProgrammer.java
- *
- * @brief Flow programmer interface to be implemented by protocol plugins
- *
- *
- *
+ * @file IPluginOutFlowProgrammer.java
+ *
+ * @brief Flow programmer interface to be implemented by protocol plugins
*/
public interface IPluginInFlowProgrammerService {
/**
* Add a flow to the network node
- *
+ *
* @param node
* @param flow
*/
- Status addFlow(Node node, Flow flow);
+ Status addFlow(Node node, Flow flow);
/**
* Modify existing flow on the switch
- *
+ *
* @param node
* @param flow
*/
- Status modifyFlow(Node node, Flow oldFlow, Flow newFlow);
+ Status modifyFlow(Node node, Flow oldFlow, Flow newFlow);
/**
* Remove the flow from the network node
+ *
* @param node
* @param flow
*/
- Status removeFlow(Node node, Flow flow);
+ Status removeFlow(Node node, Flow flow);
/**
* Remove all flows present on the network node
+ *
* @param node
*/
- Status removeAllFlows(Node node);
+ Status removeAllFlows(Node node);
}
--- /dev/null
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+
+package org.opendaylight.controller.sal.flowprogrammer;
+
+import org.opendaylight.controller.sal.core.Node;
+
+/**
+ * This interface defines the methods the protocol plugin must implement to
+ * inform the SAL layer about the asynchronous messages related to flow
+ * programming coming from the network nodes.
+ */
+public interface IPluginOutFlowProgrammerService {
+ /**
+ * Inform SAL that the flow on the specified node has been removed Consumer
+ * has to expect this notification only for flows which were installed with
+ * an idle or hard timeout specified.
+ *
+ * @param node
+ * the network node on which the flow got removed
+ * @param flow
+ * the flow that got removed. Note: It may contain only the Match
+ * and flow parameters fields. Actions may not be present.
+ */
+ public void flowRemoved(Node node, Flow flow);
+}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
-import org.apache.commons.lang3.builder.EqualsBuilder;
-import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Represents the generic matching field
- *
+ *
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.NONE)
-
public class MatchField implements Cloneable, Serializable {
- private static final long serialVersionUID = 1L;
- private static final Logger logger = LoggerFactory
+ private static final long serialVersionUID = 1L;
+ private static final Logger logger = LoggerFactory
.getLogger(MatchField.class);
- private MatchType type; // the field we want to match
+ private MatchType type; // the field we want to match
private Object value; // the value of the field we want to match
- private Object mask; // the value of the mask we want to match on the specified field
+ private Object mask; // the value of the mask we want to match on the
+ // specified field
private transient boolean isValid;
// To satisfy JAXB
+ @SuppressWarnings("unused")
private MatchField() {
}
+
/**
* Mask based match constructor
- *
+ *
* @param type
* @param value
- * @param mask has to be of the same class type of value. A null mask means full match
+ * @param mask
+ * has to be of the same class type of value. A null mask means
+ * full match
*/
public MatchField(MatchType type, Object value, Object mask) {
this.type = type;
this.value = value;
this.mask = mask;
- this.isValid = checkValueType() && checkValues(); // Keep this logic, value checked only if type check is fine
+ // Keep this logic, value checked only if type check is fine
+ this.isValid = checkValueType() && checkValues();
}
/**
* Full match constructor
- *
+ *
* @param type
* @param value
*/
this.type = type;
this.value = value;
this.mask = null;
- this.isValid = checkValueType() && checkValues(); // Keep this logic, value checked only if type check is fine
+ // Keep this logic, value checked only if type check is fine
+ this.isValid = checkValueType() && checkValues();
}
/**
* Returns the value set for this match field
- *
+ *
* @return
*/
public Object getValue() {
return value;
}
-
- @XmlElement(name="value")
+
+ @XmlElement(name = "value")
private String getValueString() {
- return type.stringify(value);
+ return type.stringify(value);
}
/**
* Returns the type field this match field object is for
- *
+ *
* @return
*/
public MatchType getType() {
return type;
}
- @XmlElement(name="type")
+ @XmlElement(name = "type")
private String getTypeString() {
- return type.toString();
+ return type.toString();
}
/**
- * Returns the mask value set for this field match
- * A null mask means this is a full match
+ * Returns the mask value set for this field match A null mask means this is
+ * a full match
+ *
* @return
*/
public Object getMask() {
return mask;
}
-
- @XmlElement(name="mask")
+
+ @XmlElement(name = "mask")
private String getMaskString() {
- return type.stringify(mask);
+ return type.stringify(mask);
}
/**
* Returns the bitmask set for this field match
- *
+ *
* @return
*/
public long getBitMask() {
/**
* Returns whether the field match configuration is valid or not
- *
+ *
* @return
*/
public boolean isValid() {
}
@Override
- public int hashCode() {
- return HashCodeBuilder.reflectionHashCode(this);
+ public String toString() {
+ return type + "(" + getValueString() + "," + getMaskString() + ")";
}
@Override
- public boolean equals(Object obj) {
- return EqualsBuilder.reflectionEquals(this, obj);
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((mask == null) ? 0 : mask.hashCode());
+ result = prime * result + ((type == null) ? 0 : type.hashCode());
+ result = prime * result + ((value == null) ? 0 : value.hashCode());
+ return result;
}
@Override
- public String toString() {
- return type + "(" + getValueString() + "," + getMaskString() + ")";
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ MatchField other = (MatchField) obj;
+ if (type != other.type) {
+ return false;
+ }
+ return (type.equalValues(this.value, other.value) && type.equalMasks(
+ this.mask, other.mask));
}
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
package org.opendaylight.controller.sal.match;
import java.net.InetAddress;
+import java.util.Arrays;
import org.opendaylight.controller.sal.core.NodeConnector;
import org.opendaylight.controller.sal.utils.HexEncode;
import org.opendaylight.controller.sal.utils.NetUtils;
/**
- * Represents the binding between the id, the value and mask type and the range values
- * of the elements type that can be matched on the network frame/packet/message
- *
- *
- *
+ * Represents the binding between the id, the value and mask type and the range
+ * values of the elements type that can be matched on the network
+ * frame/packet/message
+ *
+ *
+ *
*/
public enum MatchType {
- IN_PORT("inPort", 1 << 0, NodeConnector.class, 1, 0),
- DL_SRC("dlSrc", 1 << 1, Byte[].class, 0, 0xffffffffffffL),
- DL_DST("dlDst", 1 << 2, Byte[].class, 0, 0xffffffffffffL),
+ IN_PORT("inPort", 1 << 0, NodeConnector.class, 1, 0),
+ DL_SRC("dlSrc", 1 << 1, Byte[].class, 0, 0xffffffffffffL),
+ DL_DST("dlDst", 1 << 2, Byte[].class, 0, 0xffffffffffffL),
DL_VLAN("dlVlan", 1 << 3, Short.class, 1, 0xfff), // 2 bytes
DL_VLAN_PR("dlVlanPriority", 1 << 4, Byte.class, 0, 0x7), // 3 bits
- DL_OUTER_VLAN("dlOuterVlan", 1 << 5, Short.class, 1, 0xfff),
- DL_OUTER_VLAN_PR("dlOuterVlanPriority", 1 << 6, Short.class, 0, 0x7),
+ DL_OUTER_VLAN("dlOuterVlan", 1 << 5, Short.class, 1, 0xfff),
+ DL_OUTER_VLAN_PR("dlOuterVlanPriority", 1 << 6, Short.class, 0, 0x7),
DL_TYPE("dlType", 1 << 7, Short.class, 0, 0xffff), // 2 bytes
NW_TOS("nwTOS", 1 << 8, Byte.class, 0, 0x3f), // 6 bits (DSCP field)
NW_PROTO("nwProto", 1 << 9, Byte.class, 0, 0xff), // 1 byte
- NW_SRC("nwSrc", 1 << 10, InetAddress.class, 0, 0),
- NW_DST("nwDst", 1 << 11, InetAddress.class, 0, 0),
+ NW_SRC("nwSrc", 1 << 10, InetAddress.class, 0, 0),
+ NW_DST("nwDst", 1 << 11, InetAddress.class, 0, 0),
TP_SRC("tpSrc", 1 << 12, Short.class, 1, 0xffff), // 2 bytes
TP_DST("tpDst", 1 << 13, Short.class, 1, 0xffff); // 2 bytes
}
/**
- * Perform the assignment type validation
+ * Perform the assignment type validation
+ *
* @param value
* @param mask
* @return
Class<?> e = this.dataType();
Class<?> g = value.getClass();
- // This is all what we need, if value type is same of match required type
+ // This is all what we need, if value type is same of match required
+ // type
if (g.equals(e)) {
return true;
}
/**
* Perform the value and mask range validation
+ *
* @param value
* @param mask
* @return
/**
* Return the mask value in 64 bits bitmask form
+ *
* @param mask
* @return
*/
public long getBitMask(Object mask) {
if (this.dataType == InetAddress.class) {
- //TODO handle Inet v4 and v6 v6 will have a second upper mask
+ // TODO handle Inet v4 and v6 v6 will have a second upper mask
return 0;
}
if (this.dataType() == Byte[].class) {
byte mac[] = (byte[]) mask;
long bitmask = 0;
for (short i = 0; i < 6; i++) {
- // bitmask |= (((long)mac[i] & 0xffL) << (long)((5-i)*8));
bitmask |= (((long) mac[i] & 0xffL) << ((5 - i) * 8));
}
return bitmask;
return 0L;
}
- public String stringify(Object value) {
- if (value == null) {
- return null;
- }
-
- switch (this) {
- case DL_DST:
- case DL_SRC:
- return HexEncode.bytesToHexStringFormat((byte[])value);
- case DL_TYPE:
- case DL_VLAN:
- return ((Integer) NetUtils.getUnsignedShort((Short)value))
- .toString();
- case NW_SRC:
- case NW_DST:
- return ((InetAddress)value).getHostAddress();
- case NW_TOS:
- return ((Integer) NetUtils.getUnsignedByte((Byte)value))
- .toString();
- case TP_SRC:
- case TP_DST:
- return ((Integer) NetUtils.getUnsignedShort((Short)value))
- .toString();
- default:
- break;
- }
- return value.toString();
- }
+ public String stringify(Object value) {
+ if (value == null) {
+ return null;
+ }
+
+ switch (this) {
+ case DL_DST:
+ case DL_SRC:
+ return HexEncode.bytesToHexStringFormat((byte[]) value);
+ case DL_TYPE:
+ case DL_VLAN:
+ return ((Integer) NetUtils.getUnsignedShort((Short) value))
+ .toString();
+ case NW_SRC:
+ case NW_DST:
+ return ((InetAddress) value).getHostAddress();
+ case NW_TOS:
+ return ((Integer) NetUtils.getUnsignedByte((Byte) value))
+ .toString();
+ case TP_SRC:
+ case TP_DST:
+ return ((Integer) NetUtils.getUnsignedShort((Short) value))
+ .toString();
+ default:
+ break;
+ }
+ return value.toString();
+ }
+
+ public boolean equalValues(Object a, Object b) {
+ if (a == b) {
+ return true;
+ }
+ if (a == null || b == null) {
+ return false;
+ }
+ switch (this) {
+ case DL_DST:
+ case DL_SRC:
+ return Arrays.equals((byte[]) a, (byte[]) b);
+ default:
+ return a.equals(b);
+ }
+ }
+
+ public boolean equalMasks(Object a, Object b) {
+ if (a == b) {
+ return true;
+ }
+ switch (this) {
+ case NW_SRC:
+ case NW_DST:
+ /*
+ * For network address mask, network node may return full mask for
+ * flows the controller generated with a null mask object
+ */
+ byte maskBytes[] = null;
+ if (a == null) {
+ maskBytes = ((InetAddress) b).getAddress();
+ } else if (b == null) {
+ maskBytes = ((InetAddress) a).getAddress();
+ }
+ if (maskBytes != null) {
+ return (NetUtils.getSubnetMaskLength(maskBytes) == 0);
+ }
+ default:
+ if (a == null) {
+ return false;
+ }
+ return a.equals(b);
+ }
+ }
}
/**
* Called when an Edge utilization is above the safety threshold
* configured on the controller
+ *
* @param edge
*/
public void edgeOverUtilized(Edge edge);
/**
* Represents the return object of the osgi service interfaces function calls.
- * It contains a code {@code StatusCode} representing the result of the call
- * and a string which describes a failure reason (if any) in human readable form.
+ * It contains a code {@code StatusCode} representing the result of the call and
+ * a string which describes a failure reason (if any) in human readable form.
*/
public class Status {
- StatusCode code;
- String description;
-
- /**
- * Generates an instance of the Status class.
- *
- * @param errorCode The status code. If passed as null, code will be
- * stored as {@code StatusCode.UNDEFINED}
- * @param description The human readable description of the status. If passed
- * as null, description will be inferred by the code
- */
- public Status(StatusCode errorCode, String description) {
- this.code = (errorCode != null)? errorCode : StatusCode.UNDEFINED;
- this.description = (description != null)? description : this.code.toString();
- }
-
- /**
- * Returns the status code
- * @return the {@code StatusCode} representing the status code
- */
- public StatusCode getCode() {
- return code;
- }
-
- /**
- * Returns a human readable description of the failure if any
- * @return a string representing the reason of failure
- */
- public String getDescription() {
- return description;
- }
-
- /**
- * Tells whether the status is successful
- * @return true if the Status code is {@code StatusCode.SUCCESS}
- */
- public boolean isSuccess() {
- return code == StatusCode.SUCCESS;
- }
-
- @Override
- public String toString() {
- return code + ": " + description;
- }
+ StatusCode code;
+ String description;
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((code == null) ? 0 : code.hashCode());
- return result;
- }
+ /**
+ * Generates an instance of the Status class. This is used as return code
+ * for internal API2 function calls. This constructor allows to specify,
+ * beside the Status Code, a custom human readable description to add more
+ * information about the status.
+ *
+ * @param errorCode
+ * The status code. If passed as null, code will be stored as
+ * {@code StatusCode.UNDEFINED}
+ * @param description
+ * The human readable description of the status. If passed as
+ * null, description will be inferred by the code
+ */
+ public Status(StatusCode errorCode, String description) {
+ this.code = (errorCode != null) ? errorCode : StatusCode.UNDEFINED;
+ this.description = (description != null) ? description : this.code
+ .toString();
+ }
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- Status other = (Status) obj;
- if (code != other.code)
- return false;
- return true;
- }
+ /**
+ * Generates an instance of the Status class based on the passed StatusCode
+ * only. The description field of the Status object will be inferred by the
+ * status code.
+ *
+ * @param errorCode
+ * The status code. If passed as null, code will be stored as
+ * {@code StatusCode.UNDEFINED}
+ */
+ public Status(StatusCode errorCode) {
+ this.code = (errorCode != null) ? errorCode : StatusCode.UNDEFINED;
+ this.description = (description != null) ? description : this.code
+ .toString();
+ }
+
+ /**
+ * Returns the status code
+ *
+ * @return the {@code StatusCode} representing the status code
+ */
+ public StatusCode getCode() {
+ return code;
+ }
+
+ /**
+ * Returns a human readable description of the failure if any
+ *
+ * @return a string representing the reason of failure
+ */
+ public String getDescription() {
+ return description;
+ }
+
+ /**
+ * Tells whether the status is successful
+ *
+ * @return true if the Status code is {@code StatusCode.SUCCESS}
+ */
+ public boolean isSuccess() {
+ return code == StatusCode.SUCCESS;
+ }
+
+ @Override
+ public String toString() {
+ return code + ": " + description;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((code == null) ? 0 : code.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ Status other = (Status) obj;
+ if (code != other.code)
+ return false;
+ return true;
+ }
}
}
}
+ @Test
+ public void testEqualityNetMask() throws Exception {
+
+ InetAddress srcIP = InetAddress.getByName("1.1.1.1");
+ InetAddress ipMask = InetAddress.getByName("255.255.255.255");
+ InetAddress srcIP2 = InetAddress.getByName("1.1.1.1");
+ InetAddress ipMask2 = null;
+ short ethertype = EtherTypes.IPv4.shortValue();
+ short ethertype2 = EtherTypes.IPv4.shortValue();
+
+ /*
+ * Create a SAL Flow aFlow
+ */
+ Match match1 = new Match();
+ Match match2 = new Match();
+
+ match1.setField(MatchType.DL_TYPE, ethertype);
+ match1.setField(MatchType.NW_SRC, srcIP, ipMask);
+
+ match2.setField(MatchType.DL_TYPE, ethertype2);
+ match2.setField(MatchType.NW_SRC, srcIP2, ipMask2);
+
+ Assert.assertTrue(match1.equals(match2));
+
+ ipMask2 = InetAddress.getByName("255.255.255.255");
+ match2.setField(MatchType.NW_SRC, srcIP2, ipMask2);
+
+ srcIP = InetAddress.getByName("2001:420:281:1004:407a:57f4:4d15:c355");
+ srcIP2 = InetAddress.getByName("2001:420:281:1004:407a:57f4:4d15:c355");
+ ipMask = null;
+ ipMask2 = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff");
+ ethertype = EtherTypes.IPv6.shortValue();
+ ethertype2 = EtherTypes.IPv6.shortValue();
+
+ match1.setField(MatchType.DL_TYPE, ethertype);
+ match1.setField(MatchType.NW_SRC, srcIP, ipMask);
+
+ match2.setField(MatchType.DL_TYPE, ethertype2);
+ match2.setField(MatchType.NW_SRC, srcIP2, ipMask2);
+
+ Assert.assertTrue(match1.equals(match2));
+ }
+
@Test
public void testCloning() throws Exception {
Node node = NodeCreator.createOFNode(7l);
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
package org.opendaylight.controller.sal.implementation.internal;
import org.opendaylight.controller.sal.core.ComponentActivatorAbstractBase;
+import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerListener;
import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerService;
import org.opendaylight.controller.sal.flowprogrammer.IPluginInFlowProgrammerService;
+import org.opendaylight.controller.sal.flowprogrammer.IPluginOutFlowProgrammerService;
import org.opendaylight.controller.sal.inventory.IInventoryService;
import org.opendaylight.controller.sal.inventory.IListenInventoryUpdates;
import org.opendaylight.controller.sal.inventory.IPluginInInventoryService;
.getLogger(Activator.class);
/**
- * Function called when the activator starts just after some
- * initializations are done by the
- * ComponentActivatorAbstractBase.
- *
+ * Function called when the activator starts just after some initializations
+ * are done by the ComponentActivatorAbstractBase.
+ *
*/
public void init() {
}
/**
- * Function called when the activator stops just before the
- * cleanup done by ComponentActivatorAbstractBase
- *
+ * Function called when the activator stops just before the cleanup done by
+ * ComponentActivatorAbstractBase
+ *
*/
public void destroy() {
}
/**
- * Function that is used to communicate to dependency manager the
- * list of known implementations for services inside a container
- *
- *
+ * Function that is used to communicate to dependency manager the list of
+ * known implementations for services inside a container
+ *
+ *
* @return An array containing all the CLASS objects that will be
- * instantiated in order to get an fully working implementation
- * Object
+ * instantiated in order to get an fully working implementation
+ * Object
*/
public Object[] getImplementations() {
Object[] res = { Topology.class, Inventory.class,
}
/**
- * Function that is called when configuration of the dependencies
- * is required.
- *
- * @param c dependency manager Component object, used for
- * configuring the dependencies exported and imported
- * @param imp Implementation class that is being configured,
- * needed as long as the same routine can configure multiple
- * implementations
- * @param containerName The containerName being configured, this allow
- * also optional per-container different behavior if needed, usually
- * should not be the case though.
+ * Function that is called when configuration of the dependencies is
+ * required.
+ *
+ * @param c
+ * dependency manager Component object, used for configuring the
+ * dependencies exported and imported
+ * @param imp
+ * Implementation class that is being configured, needed as long
+ * as the same routine can configure multiple implementations
+ * @param containerName
+ * The containerName being configured, this allow also optional
+ * per-container different behavior if needed, usually should not
+ * be the case though.
*/
public void configureInstance(Component c, Object imp, String containerName) {
if (imp.equals(Topology.class)) {
// export the service for Apps and Plugins
- c.setInterface(new String[] {
- IPluginOutTopologyService.class.getName(),
- ITopologyService.class.getName() }, null);
+ c.setInterface(
+ new String[] { IPluginOutTopologyService.class.getName(),
+ ITopologyService.class.getName() }, null);
// There can be multiple Topology listeners or there could
// be none, hence the dependency is optional
- c.add(createContainerServiceDependency(containerName).setService(
- IListenTopoUpdates.class).setCallbacks("setUpdateService",
- "unsetUpdateService").setRequired(false));
+ c.add(createContainerServiceDependency(containerName)
+ .setService(IListenTopoUpdates.class)
+ .setCallbacks("setUpdateService", "unsetUpdateService")
+ .setRequired(false));
// There can be multiple southbound plugins or there could
// be none, the dependency is optional
- c.add(createContainerServiceDependency(containerName).setService(
- IPluginInTopologyService.class).setCallbacks(
- "setPluginService", "unsetPluginService")
+ c.add(createContainerServiceDependency(containerName)
+ .setService(IPluginInTopologyService.class)
+ .setCallbacks("setPluginService", "unsetPluginService")
.setRequired(false));
}
if (imp.equals(Inventory.class)) {
// export the service
- c.setInterface(new String[] {
- IPluginOutInventoryService.class.getName(),
- IInventoryService.class.getName() }, null);
+ c.setInterface(
+ new String[] { IPluginOutInventoryService.class.getName(),
+ IInventoryService.class.getName() }, null);
// Now lets add a service dependency to make sure the
// provider of service exists
- c.add(createContainerServiceDependency(containerName).setService(
- IListenInventoryUpdates.class).setCallbacks(
- "setUpdateService", "unsetUpdateService")
+ c.add(createContainerServiceDependency(containerName)
+ .setService(IListenInventoryUpdates.class)
+ .setCallbacks("setUpdateService", "unsetUpdateService")
.setRequired(false));
- c
- .add(createContainerServiceDependency(containerName)
- .setService(IPluginInInventoryService.class)
- .setCallbacks("setPluginService",
- "unsetPluginService").setRequired(true));
+ c.add(createContainerServiceDependency(containerName)
+ .setService(IPluginInInventoryService.class)
+ .setCallbacks("setPluginService", "unsetPluginService")
+ .setRequired(true));
}
if (imp.equals(FlowProgrammerService.class)) {
- // It is the provider of IFlowProgrammerService
- c.setInterface(IFlowProgrammerService.class.getName(), null);
- //It is also the consumer of IPluginInFlowProgrammerService
- c.add(createServiceDependency().setService(
- IPluginInFlowProgrammerService.class).setCallbacks(
- "setService", "unsetService").setRequired(true));
+ c.setInterface(
+ new String[] { IFlowProgrammerService.class.getName(),
+ IPluginOutFlowProgrammerService.class.getName() },
+ null);
+
+ c.add(createServiceDependency()
+ .setService(IPluginInFlowProgrammerService.class)
+ .setCallbacks("setService", "unsetService")
+ .setRequired(false));
+ c.add(createServiceDependency()
+ .setService(IFlowProgrammerListener.class)
+ .setCallbacks("setListener", "unsetListener")
+ .setRequired(false));
}
if (imp.equals(ReadService.class)) {
// It is the provider of IReadService
c.setInterface(IReadService.class.getName(), null);
- //It is also the consumer of IPluginInReadService
- c.add(createContainerServiceDependency(containerName).setService(
- IPluginInReadService.class).setCallbacks("setService",
- "unsetService").setRequired(true));
+ // It is also the consumer of IPluginInReadService
+ c.add(createContainerServiceDependency(containerName)
+ .setService(IPluginInReadService.class)
+ .setCallbacks("setService", "unsetService")
+ .setRequired(true));
}
/************************/
/* DATA PACKET SERVICES */
/************************/
if (imp.equals(DataPacketService.class)) {
- c.setInterface(new String[] {
- IPluginOutDataPacketService.class.getName(),
- IDataPacketService.class.getName() }, null);
+ c.setInterface(
+ new String[] { IPluginOutDataPacketService.class.getName(),
+ IDataPacketService.class.getName() }, null);
// Optionally use PluginInDataService if any southbound
// protocol plugin exists
- c.add(createContainerServiceDependency(containerName).setService(
- IPluginInDataPacketService.class).setCallbacks(
- "setPluginInDataService", "unsetPluginInDataService")
- .setRequired(false));
+ c.add(createContainerServiceDependency(containerName)
+ .setService(IPluginInDataPacketService.class)
+ .setCallbacks("setPluginInDataService",
+ "unsetPluginInDataService").setRequired(false));
// Optionally listed to IListenDataPacket services
- c.add(createContainerServiceDependency(containerName).setService(
- IListenDataPacket.class).setCallbacks(
- "setListenDataPacket", "unsetListenDataPacket")
- .setRequired(false));
+ c.add(createContainerServiceDependency(containerName)
+ .setService(IListenDataPacket.class)
+ .setCallbacks("setListenDataPacket",
+ "unsetListenDataPacket").setRequired(false));
}
}
}
-
/*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
import org.eclipse.osgi.framework.console.CommandInterpreter;
import org.eclipse.osgi.framework.console.CommandProvider;
import org.opendaylight.controller.sal.core.NodeConnector;
import org.opendaylight.controller.sal.core.Node.NodeIDType;
import org.opendaylight.controller.sal.flowprogrammer.Flow;
+import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerListener;
import org.opendaylight.controller.sal.flowprogrammer.IFlowProgrammerService;
import org.opendaylight.controller.sal.flowprogrammer.IPluginInFlowProgrammerService;
+import org.opendaylight.controller.sal.flowprogrammer.IPluginOutFlowProgrammerService;
import org.opendaylight.controller.sal.match.Match;
import org.opendaylight.controller.sal.match.MatchType;
import org.opendaylight.controller.sal.utils.StatusCode;
import org.slf4j.LoggerFactory;
/**
- * The SAL Flow Programmer Service. It dispatches the flow programming
- * requests to the proper SDN protocol plugin
- *
- *
- *
+ * The SAL Flow Programmer Service. It dispatches the flow programming requests
+ * to the proper SDN protocol plugin and it notifies about asynchronous messages
+ * received from the network node related to flow programming.
*/
public class FlowProgrammerService implements IFlowProgrammerService,
- CommandProvider {
+ IPluginOutFlowProgrammerService, CommandProvider {
protected static final Logger logger = LoggerFactory
.getLogger(FlowProgrammerService.class);
- private ConcurrentHashMap<String, IPluginInFlowProgrammerService>
- pluginFlowProgrammer =
- new ConcurrentHashMap<String, IPluginInFlowProgrammerService>();
+ private ConcurrentHashMap<String, IPluginInFlowProgrammerService> pluginFlowProgrammer;
+ private Set<IFlowProgrammerListener> listener;
+
+ public FlowProgrammerService() {
+ pluginFlowProgrammer = new ConcurrentHashMap<String, IPluginInFlowProgrammerService>();
+ listener = new HashSet<IFlowProgrammerListener>();
+ }
/**
* Function called by the dependency manager when all the required
* dependencies are satisfied
- *
+ *
*/
void init() {
logger.debug("INIT called!");
}
/**
- * Function called by the dependency manager when at least one
- * dependency become unsatisfied or when the component is shutting
- * down because for example bundle is being stopped.
- *
+ * Function called by the dependency manager when at least one dependency
+ * become unsatisfied or when the component is shutting down because for
+ * example bundle is being stopped.
+ *
*/
void destroy() {
// Clear previous registration to avoid they are left hanging
}
/**
- * Function called by dependency manager after "init ()" is called
- * and after the services provided by the class are registered in
- * the service registry
- *
+ * Function called by dependency manager after "init ()" is called and after
+ * the services provided by the class are registered in the service registry
+ *
*/
void start() {
logger.debug("START called!");
}
/**
- * Function called by the dependency manager before the services
- * exported by the component are unregistered, this will be
- * followed by a "destroy ()" calls
- *
+ * Function called by the dependency manager before the services exported by
+ * the component are unregistered, this will be followed by a "destroy ()"
+ * calls
+ *
*/
void stop() {
logger.debug("STOP called!");
String type = null;
for (Object e : props.entrySet()) {
Map.Entry entry = (Map.Entry) e;
- logger.trace("Prop key:({}) value:({})",entry.getKey(),
- entry.getValue());
+ logger.trace("Prop key:({}) value:({})", entry.getKey(),
+ entry.getValue());
}
Object value = props.get("protocolPluginType");
+ "protocolPluginType provided");
} else {
this.pluginFlowProgrammer.put(type, s);
- logger.debug("Stored the pluginFlowProgrammer for type: {}",type);
+ logger.debug("Stored the pluginFlowProgrammer for type: {}", type);
}
}
- public void unsetService(Map props,
- IPluginInFlowProgrammerService s) {
+ public void unsetService(Map props, IPluginInFlowProgrammerService s) {
if (this.pluginFlowProgrammer == null) {
logger.error("pluginFlowProgrammer store null");
return;
logger.debug("Received unsetpluginFlowProgrammer request");
for (Object e : props.entrySet()) {
Map.Entry entry = (Map.Entry) e;
- logger.trace("Prop key:({}) value:({})",entry.getKey(),
- entry.getValue());
+ logger.trace("Prop key:({}) value:({})", entry.getKey(),
+ entry.getValue());
}
Object value = props.get("protocoloPluginType");
}
}
+ public void setListener(IFlowProgrammerListener s) {
+ this.listener.add(s);
+ }
+
+ public void unsetListener(IFlowProgrammerListener s) {
+ this.listener.remove(s);
+ }
+
@Override
public Status addFlow(Node node, Flow flow) {
if (pluginFlowProgrammer != null) {
if (this.pluginFlowProgrammer.get(node.getType()) != null) {
- return this.pluginFlowProgrammer.get(node.getType())
- .addFlow(node, flow);
+ return this.pluginFlowProgrammer.get(node.getType()).addFlow(
+ node, flow);
}
}
return new Status(StatusCode.NOSERVICE, "Plugin unuvailable");
if (pluginFlowProgrammer != null) {
if (this.pluginFlowProgrammer.get(node.getType()) != null) {
return this.pluginFlowProgrammer.get(node.getType())
- .removeFlow(node, flow);
+ .removeFlow(node, flow);
}
}
return new Status(StatusCode.NOSERVICE, "Plugin unuvailable");
if (pluginFlowProgrammer != null) {
if (this.pluginFlowProgrammer.get(node.getType()) != null) {
return this.pluginFlowProgrammer.get(node.getType())
- .removeAllFlows(node);
+ .removeAllFlows(node);
}
}
return new Status(StatusCode.NOSERVICE, "Plugin unuvailable");
if (pluginFlowProgrammer != null) {
if (this.pluginFlowProgrammer.get(node.getType()) != null) {
return this.pluginFlowProgrammer.get(node.getType())
- .modifyFlow(node, oldFlow, newFlow);
+ .modifyFlow(node, oldFlow, newFlow);
}
}
return new Status(StatusCode.NOSERVICE, "Plugin unuvailable");
}
+ @Override
+ public void flowRemoved(Node node, Flow flow) {
+ for (IFlowProgrammerListener l : listener) {
+ l.flowRemoved(node, flow);
+ }
+ }
+
// ---------------- OSGI TEST CODE ------------------------------//
private void registerWithOSGIConsole() {
public String getHelp() {
StringBuffer help = new StringBuffer();
help.append("---SAL Flow Programmer testing commands---\n");
- help
- .append("\t addflow <sid> - Add a sample flow to the openflow switch <sid>\n");
- help
- .append("\t removeflow <sid> - Remove the sample flow from the openflow switch <sid>\n");
+ help.append("\t addflow <sid> - Add a sample flow to the openflow switch <sid>\n");
+ help.append("\t removeflow <sid> - Remove the sample flow from the openflow switch <sid>\n");
return help.toString();
}
Flow flowA = getSampleFlow(node);
Flow flowB = getSampleFlow(node);
Match matchB = flowB.getMatch();
- matchB.setField(MatchType.NW_DST, InetAddress
- .getByName("190.190.190.190"));
+ matchB.setField(MatchType.NW_DST,
+ InetAddress.getByName("190.190.190.190"));
flowB.setMatch(matchB);
ci.println(this.modifyFlow(node, flowA, flowB));
}
.getByName("2001:420:281:1004:407a:57f4:4d15:c355");
InetAddress dstIP = InetAddress
.getByName("2001:420:281:1004:e123:e688:d655:a1b0");
- InetAddress ipMask = null; //InetAddress.getByName("ffff:ffff:ffff:ffff:0:0:0:0"); V6Match implementation assumes no mask is specified
- InetAddress ipMask2 = null; //InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
+ InetAddress ipMask = null; // InetAddress.getByName("ffff:ffff:ffff:ffff:0:0:0:0");
+ // V6Match implementation assumes no mask is
+ // specified
+ InetAddress ipMask2 = null; // InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:0");
short ethertype = EtherTypes.IPv6.shortValue();
short vlan = (short) 27;
byte vlanPr = (byte) 3;
Byte tos = 4;
byte proto = IPProtocols.UDP.byteValue();
short src = (short) 5500;
- //short dst = 80;
+ // short dst = 80;
/*
* Create a SAL Flow aFlow
match.setField(MatchType.DL_DST, dstMac);
match.setField(MatchType.DL_TYPE, ethertype);
match.setField(MatchType.DL_VLAN, vlan);
- match.setField(MatchType.DL_VLAN_PR, vlanPr); //V6Match does not handle this properly...
+ match.setField(MatchType.DL_VLAN_PR, vlanPr); // V6Match does not handle
+ // this properly...
match.setField(MatchType.NW_SRC, srcIP, ipMask);
match.setField(MatchType.NW_DST, dstIP, ipMask2);
match.setField(MatchType.NW_TOS, tos);
match.setField(MatchType.NW_PROTO, proto);
- match.setField(MatchType.TP_SRC, src); //V6Match does not handle this properly...
- //match.setField(MatchType.TP_DST, dst); V6Match does not handle this properly...
+ match.setField(MatchType.TP_SRC, src); // V6Match does not handle this
+ // properly...
+ // match.setField(MatchType.TP_DST, dst); V6Match does not handle this
+ // properly...
List<Action> actions = new ArrayList<Action>();
actions.add(new Output(oport));
return flow;
}
+
}
*/\r
package org.opendaylight.controller.sal.binding.yang.types.test;\r
\r
-import static org.junit.Assert.assertEquals;\r
-import static org.junit.Assert.assertTrue;\r
+import static org.junit.Assert.*;\r
\r
import java.util.List;\r
import java.util.Set;\r
\r
+import org.junit.Ignore;\r
import org.junit.Test;\r
import org.opendaylight.controller.sal.binding.generator.api.BindingGenerator;\r
import org.opendaylight.controller.sal.binding.generator.impl.BindingGeneratorImpl;\r
assertTrue(genTypes != null);\r
assertEquals(11, genTypes.size());\r
}\r
- \r
+\r
+ @Ignore\r
@Test\r
public void testLeafrefResolving() {\r
final String topologyPath = getClass().getResource(\r
topologyPath, interfacesPath, inetTypesPath, yangTypesPath);\r
assertTrue(context != null);\r
assertEquals(4, context.getModules().size());\r
- \r
+\r
final BindingGenerator bindingGen = new BindingGeneratorImpl();\r
final List<Type> genTypes = bindingGen.generateTypes(context);\r
- \r
+\r
assertEquals(21, genTypes.size());\r
assertTrue(genTypes != null);\r
- \r
+\r
for (final Type genType : genTypes) {\r
if (genType.getName().equals("Interface") && genType instanceof GeneratedType) {\r
// System.out.println(((GeneratedType)genType).getMethodDefinitions().toString());\r
} else if (genType.getName().equals("NetworkLink") && genType instanceof GeneratedType) {\r
// System.out.println(((GeneratedType)genType).getMethodDefinitions().toString());\r
- } \r
+ }\r
}\r
}\r
\r
<dependencies>
<dependency>
<groupId>org.opendaylight.controller</groupId>
- <artifactId>yang-to-sources</artifactId>
+ <artifactId>maven-yang</artifactId>
<version>1.0</version>
<type>test-jar</type>
</dependency>
<dependencies>
<dependency>
<groupId>org.opendaylight.controller</groupId>
- <artifactId>yang-to-sources</artifactId>
+ <artifactId>maven-yang</artifactId>
<version>1.0</version>
<type>test-jar</type>
</dependency>
<dependencies>
<dependency>
<groupId>org.opendaylight.controller</groupId>
- <artifactId>yang-to-sources</artifactId>
+ <artifactId>maven-yang</artifactId>
<version>1.0</version>
<type>test-jar</type>
</dependency>
<dependencies>
<dependency>
<groupId>org.opendaylight.controller</groupId>
- <artifactId>yang-to-sources</artifactId>
+ <artifactId>maven-yang</artifactId>
<version>1.0</version>
<type>test-jar</type>
</dependency>
<dependencies>
<dependency>
<groupId>org.opendaylight.controller</groupId>
- <artifactId>yang-to-sources</artifactId>
+ <artifactId>maven-yang</artifactId>
<version>1.0</version>
<type>test-jar</type>
</dependency>
<dependencies>
<dependency>
<groupId>org.opendaylight.controller</groupId>
- <artifactId>yang-to-sources</artifactId>
+ <artifactId>maven-yang</artifactId>
<version>1.0</version>
<type>test-jar</type>
</dependency>
<artifactId>maven-yang-plugin</artifactId>
<packaging>maven-plugin</packaging>
+ <description>
+ This plugin is a wrapper for "yang to source code" generation.
+ It can be configured by a set of third-party code generators and resource providers.
+ For further info see available goals.
+ Sample usage:
+
+ TODO: add sample usage when finished
+ </description>
<dependencies>
<dependency>
</dependency>
</dependencies>
+ <reporting>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-plugin-plugin</artifactId>
+ <version>3.2</version>
+ </plugin>
+ </plugins>
+ </reporting>
+
<build>
<plugins>
<plugin>
import com.google.common.base.Preconditions;
/**
- * Complex configuration arguments
+ * Base complex configuration arguments
*/
public abstract class ConfigArg {
public abstract void check();
+ /**
+ * Configuration argument for resource generator class and output directory.
+ */
public static final class ResourceProviderArg extends ConfigArg {
private String resourceProviderClass;
}
/**
- * Transfer object for code generator class and output directory.
+ * Configuration argument for code generator class and output directory.
*/
public static final class CodeGeneratorArg extends ConfigArg {
private String codeGeneratorClass;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Maps;
+/**
+ * Generate resources from yang files using user provided set of
+ * {@link ResourceGenerator}s. Can be used to copy yang files that served as
+ * blueprint for code generation into resources directory. Steps of this
+ * process:
+ * <ol>
+ * <li>List yang files from {@link #yangFilesRootDir} (If this goal is in the
+ * same execution as generate-sources, the same cached list will be used and the
+ * root folder will not be searched for yang files twice)</li>
+ * <li>For each {@link ResourceGenerator} from {@link #resourceProviders}:</li>
+ * <ol>
+ * <li>Instantiate using default constructor</li>
+ * <li>Call {@link ResourceGenerator#generateResourceFiles(Collection, File)}</li>
+ * </ol>
+ * </ol>
+ */
@Mojo(name = "generate-resources", defaultPhase = LifecyclePhase.GENERATE_RESOURCES)
public final class YangToResourcesMojo extends AbstractMojo {
private static final String LOG_PREFIX = "yang-to-resources:";
+ /**
+ * Classes implementing {@link ResourceGenerator} interface. An instance
+ * will be created out of every class using default constructor. Method
+ * {@link ResourceGenerator#generateResourceFiles(Collection, File)} will be
+ * called on every instance.
+ */
@Parameter(required = true)
private ResourceProviderArg[] resourceProviders;
+ /**
+ * Source directory that will be recursively searched for yang files (ending
+ * with .yang suffix).
+ */
@Parameter(required = true)
private String yangFilesRootDir;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Maps;
+/**
+ * Generate sources from yang files using user provided set of
+ * {@link CodeGenerator}s. Steps of this process:
+ * <ol>
+ * <li>List yang files from {@link #yangFilesRootDir}</li>
+ * <li>Process yang files using {@link YangModelParserImpl}</li>
+ * <li>For each {@link CodeGenerator} from {@link #codeGenerators}:</li>
+ * <ol>
+ * <li>Instantiate using default constructor</li>
+ * <li>Call {@link CodeGenerator#generateSources(SchemaContext, File)}</li>
+ * </ol>
+ * </ol>
+ */
@Mojo(name = "generate-sources", defaultPhase = LifecyclePhase.GENERATE_SOURCES)
public final class YangToSourcesMojo extends AbstractMojo {
private static final String LOG_PREFIX = "yang-to-sources:";
+ /**
+ * Classes implementing {@link CodeGenerator} interface. An instance will be
+ * created out of every class using default constructor. Method
+ * {@link CodeGenerator#generateSources(SchemaContext, File)} will be called
+ * on every instance.
+ */
@Parameter(required = true)
private CodeGeneratorArg[] codeGenerators;
+ /**
+ * Source directory that will be recursively searched for yang files (ending
+ * with .yang suffix).
+ */
@Parameter(required = true)
private String yangFilesRootDir;
import org.opendaylight.controller.yang.model.api.SchemaContext;
+/**
+ * Classes implementing this interface can be submitted to maven-yang-plugin's
+ * generate-sources goal.
+ */
public interface CodeGenerator {
+ /**
+ * Generate sources from provided {@link SchemaContext}
+ *
+ * @param context
+ * parsed from yang files
+ * @param outputBaseDir
+ * expected output directory for generated sources configured by
+ * user
+ * @return collection of files that were generated from schema context
+ */
Collection<File> generateSources(SchemaContext context, File outputBaseDir);
}
import java.io.File;
import java.util.Collection;
+/**
+ * Classes implementing this interface can be submitted to maven-yang-plugin's
+ * generate-resources goal.
+ */
public interface ResourceGenerator {
+ /**
+ * Generate resources (e.g. copy files into resources folder) from provided
+ * list of yang files
+ *
+ * @param resources
+ * list of parsed yang files
+ * @param outputBaseDir
+ * expected output directory for resources configured by user
+ */
void generateResourceFiles(Collection<File> resources, File outputBaseDir);
}
--- /dev/null
+package org.opendaylight.controller.yang.model.parser.builder.api;
+
+import org.opendaylight.controller.yang.model.api.TypeDefinition;
+
+public class AbstractTypeAwareBuilder implements TypeAwareBuilder {
+
+ protected TypeDefinition<?> type;
+ protected TypeDefinitionBuilder typedef;
+
+ @Override
+ public TypeDefinition<?> getType() {
+ return type;
+ }
+
+ @Override
+ public TypeDefinitionBuilder getTypedef() {
+ return typedef;
+ }
+
+ @Override
+ public void setType(TypeDefinition<?> type) {
+ this.type = type;
+ this.typedef = null;
+ }
+
+ @Override
+ public void setType(TypeDefinitionBuilder typedef) {
+ this.typedef = typedef;
+ this.type = null;
+ }
+
+}
*/
public interface TypeAwareBuilder {
- TypeDefinition<?> getType();
- void setType(TypeDefinition<?> type);
+ TypeDefinition<?> getType();
+
+ TypeDefinitionBuilder getTypedef();
+
+ void setType(TypeDefinition<?> type);
+
+ void setType(TypeDefinitionBuilder typedef);
}
*/\r
package org.opendaylight.controller.yang.model.parser.builder.api;\r
\r
+import java.util.List;\r
+\r
import org.opendaylight.controller.yang.common.QName;\r
+import org.opendaylight.controller.yang.model.api.SchemaPath;\r
+import org.opendaylight.controller.yang.model.api.Status;\r
import org.opendaylight.controller.yang.model.api.TypeDefinition;\r
+import org.opendaylight.controller.yang.model.api.type.LengthConstraint;\r
+import org.opendaylight.controller.yang.model.api.type.PatternConstraint;\r
+import org.opendaylight.controller.yang.model.api.type.RangeConstraint;\r
+import org.opendaylight.controller.yang.model.parser.builder.impl.UnknownSchemaNodeBuilder;\r
\r
/**\r
* Interface for builders of 'typedef' statement.\r
*/\r
-public interface TypeDefinitionBuilder {\r
+public interface TypeDefinitionBuilder extends TypeAwareBuilder,\r
+ SchemaNodeBuilder {\r
+\r
+ TypeDefinition<?> build();\r
\r
QName getQName();\r
\r
- TypeDefinition<?> getBaseType();\r
+ SchemaPath getPath();\r
\r
- TypeDefinition<?> build();\r
+ String getDescription();\r
\r
- void setUnits(String units);\r
+ String getReference();\r
+\r
+ Status getStatus();\r
+\r
+ List<RangeConstraint> getRanges();\r
+\r
+ void setRanges(List<RangeConstraint> ranges);\r
+\r
+ List<LengthConstraint> getLengths();\r
+\r
+ void setLengths(List<LengthConstraint> lengths);\r
+\r
+ List<PatternConstraint> getPatterns();\r
+\r
+ void setPatterns(List<PatternConstraint> patterns);\r
+\r
+ Integer getFractionDigits();\r
+\r
+ void setFractionDigits(Integer fractionDigits);\r
+\r
+ List<UnknownSchemaNodeBuilder> getUnknownNodes();\r
+\r
+ Object getDefaultValue();\r
\r
void setDefaultValue(Object defaultValue);\r
\r
+ String getUnits();\r
+\r
+ void setUnits(String units);\r
+\r
}\r
@Override
public ContainerSchemaNode build() {
// CHILD NODES
- Map<QName, DataSchemaNode> childs = new HashMap<QName, DataSchemaNode>();
+ final Map<QName, DataSchemaNode> childs = new HashMap<QName, DataSchemaNode>();
for (DataSchemaNodeBuilder node : childNodes) {
childs.put(node.getQName(), node.build());
}
instance.setChildNodes(childs);
// GROUPINGS
- Set<GroupingDefinition> groupingDefinitions = new HashSet<GroupingDefinition>();
+ final Set<GroupingDefinition> groupingDefinitions = new HashSet<GroupingDefinition>();
for (GroupingBuilder builder : groupings) {
groupingDefinitions.add(builder.build());
}
instance.setGroupings(groupingDefinitions);
// TYPEDEFS
- Set<TypeDefinition<?>> typedefs = new HashSet<TypeDefinition<?>>();
+ final Set<TypeDefinition<?>> typedefs = new HashSet<TypeDefinition<?>>();
for (TypeDefinitionBuilder entry : addedTypedefs) {
typedefs.add(entry.build());
}
instance.setTypeDefinitions(typedefs);
// USES
- Set<UsesNode> uses = new HashSet<UsesNode>();
+ final Set<UsesNode> uses = new HashSet<UsesNode>();
for (UsesNodeBuilder builder : addedUsesNodes) {
uses.add(builder.build());
}
// UNKNOWN NODES
final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
- for(UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
unknownNodes.add(b.build());
}
instance.setUnknownSchemaNodes(unknownNodes);
private Set<GroupingDefinition> groupings = Collections.emptySet();
private Set<TypeDefinition<?>> typeDefinitions = Collections.emptySet();
private Set<UsesNode> uses = Collections.emptySet();
- private List<UnknownSchemaNode> unknownSchemaNodes = Collections.emptyList();
+ private List<UnknownSchemaNode> unknownNodes = Collections.emptyList();
private boolean presence;
private ContainerSchemaNodeImpl(QName qname) {
@Override
public List<UnknownSchemaNode> getUnknownSchemaNodes() {
- return unknownSchemaNodes;
+ return unknownNodes;
}
- private void setUnknownSchemaNodes(List<UnknownSchemaNode> unknownSchemaNodes) {
- if(unknownSchemaNodes != null) {
- this.unknownSchemaNodes = unknownSchemaNodes;
+ private void setUnknownSchemaNodes(
+ List<UnknownSchemaNode> unknownSchemaNodes) {
+ if (unknownSchemaNodes != null) {
+ this.unknownNodes = unknownSchemaNodes;
}
}
import org.opendaylight.controller.yang.model.api.Status;
import org.opendaylight.controller.yang.model.api.TypeDefinition;
import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;
+import org.opendaylight.controller.yang.model.parser.builder.api.AbstractTypeAwareBuilder;
import org.opendaylight.controller.yang.model.parser.builder.api.DataSchemaNodeBuilder;
import org.opendaylight.controller.yang.model.parser.builder.api.SchemaNodeBuilder;
-import org.opendaylight.controller.yang.model.parser.builder.api.TypeAwareBuilder;
-public class LeafListSchemaNodeBuilder implements SchemaNodeBuilder,
- TypeAwareBuilder, DataSchemaNodeBuilder {
+public class LeafListSchemaNodeBuilder extends AbstractTypeAwareBuilder implements SchemaNodeBuilder,
+ DataSchemaNodeBuilder {
private final LeafListSchemaNodeImpl instance;
private final QName qname;
- private final ConstraintsBuilder constraintsBuilder = new ConstraintsBuilder();
+ private final ConstraintsBuilder constraints = new ConstraintsBuilder();
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
- private TypeDefinition<?> type;
LeafListSchemaNodeBuilder(QName qname) {
this.qname = qname;
@Override
public LeafListSchemaNode build() {
- instance.setConstraints(constraintsBuilder.build());
+ instance.setConstraints(constraints.build());
+
+ if(type == null) {
+ instance.setType(typedef.build());
+ } else {
+ instance.setType(type);
+ }
+
+ // UNKNOWN NODES
+ final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ unknownNodes.add(b.build());
+ }
+ instance.setUnknownSchemaNodes(unknownNodes);
+
return instance;
}
}
}
- @Override
- public TypeDefinition<?> getType() {
- return type;
- }
-
- @Override
- public void setType(TypeDefinition<?> type) {
- this.type = type;
- instance.setType(type);
- }
-
@Override
public void setAugmenting(boolean augmenting) {
instance.setAugmenting(augmenting);
@Override
public ConstraintsBuilder getConstraintsBuilder() {
- return constraintsBuilder;
+ return constraints;
}
public void setUserOrdered(boolean userOrdered) {
private Status status = Status.CURRENT;
private boolean augmenting;
private boolean configuration;
- private ConstraintDefinition constraints;
+ private ConstraintDefinition constraintsDef;
private TypeDefinition<?> type;
private boolean userOrdered;
- private List<UnknownSchemaNode> unknownSchemaNodes = Collections.emptyList();
+ private List<UnknownSchemaNode> unknownNodes = Collections
+ .emptyList();
private LeafListSchemaNodeImpl(QName qname) {
this.qname = qname;
@Override
public ConstraintDefinition getConstraints() {
- return constraints;
+ return constraintsDef;
}
- private void setConstraints(ConstraintDefinition constraints) {
- this.constraints = constraints;
+ private void setConstraints(ConstraintDefinition constraintsDef) {
+ this.constraintsDef = constraintsDef;
}
@Override
@Override
public List<UnknownSchemaNode> getUnknownSchemaNodes() {
- return unknownSchemaNodes;
+ return unknownNodes;
}
- private void setUnknownSchemaNodes(List<UnknownSchemaNode> unknownSchemaNodes) {
- if(unknownSchemaNodes != null) {
- this.unknownSchemaNodes = unknownSchemaNodes;
+ private void setUnknownSchemaNodes(
+ List<UnknownSchemaNode> unknownNodes) {
+ if (unknownNodes != null) {
+ this.unknownNodes = unknownNodes;
}
}
sb.append(", status=" + status);
sb.append(", augmenting=" + augmenting);
sb.append(", configuration=" + configuration);
- sb.append(", constraints=" + constraints);
+ sb.append(", constraints=" + constraintsDef);
sb.append(", type=" + type);
sb.append(", userOrdered=" + userOrdered);
sb.append("]");
import org.opendaylight.controller.yang.model.api.Status;
import org.opendaylight.controller.yang.model.api.TypeDefinition;
import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;
+import org.opendaylight.controller.yang.model.parser.builder.api.AbstractTypeAwareBuilder;
import org.opendaylight.controller.yang.model.parser.builder.api.DataSchemaNodeBuilder;
import org.opendaylight.controller.yang.model.parser.builder.api.SchemaNodeBuilder;
-import org.opendaylight.controller.yang.model.parser.builder.api.TypeAwareBuilder;
-
-public class LeafSchemaNodeBuilder implements DataSchemaNodeBuilder,
- SchemaNodeBuilder, TypeAwareBuilder {
+public class LeafSchemaNodeBuilder extends AbstractTypeAwareBuilder implements DataSchemaNodeBuilder,
+ SchemaNodeBuilder {
private final QName qname;
private final LeafSchemaNodeImpl instance;
- private final ConstraintsBuilder constraintsBuilder = new ConstraintsBuilder();
+ private final ConstraintsBuilder constraints = new ConstraintsBuilder();
private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
- private TypeDefinition<?> type;
LeafSchemaNodeBuilder(QName qname) {
this.qname = qname;
@Override
public LeafSchemaNode build() {
+ if(type == null) {
+ instance.setType(typedef.build());
+ } else {
+ instance.setType(type);
+ }
+
// UNKNOWN NODES
final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
for(UnknownSchemaNodeBuilder b : addedUnknownNodes) {
}
instance.setUnknownSchemaNodes(unknownNodes);
- instance.setConstraints(constraintsBuilder.build());
+ instance.setConstraints(constraints.build());
return instance;
}
@Override
public ConstraintsBuilder getConstraintsBuilder() {
- return constraintsBuilder;
- }
-
- @Override
- public TypeDefinition<?> getType() {
- return type;
- }
-
- @Override
- public void setType(TypeDefinition<?> type) {
- this.type = type;
- instance.setType(type);
+ return constraints;
}
@Override
- public void addUnknownSchemaNode(UnknownSchemaNodeBuilder unknownSchemaNodeBuilder) {
- addedUnknownNodes.add(unknownSchemaNodeBuilder);
+ public void addUnknownSchemaNode(UnknownSchemaNodeBuilder unknownNode) {
+ addedUnknownNodes.add(unknownNode);
}
private class LeafSchemaNodeImpl implements LeafSchemaNode {
private Status status = Status.CURRENT;
private boolean augmenting;
private boolean configuration;
- private ConstraintDefinition constraints;
+ private ConstraintDefinition constraintsDef;
private TypeDefinition<?> type;
- private List<UnknownSchemaNode> unknownSchemaNodes = Collections.emptyList();
+ private List<UnknownSchemaNode> unknownNodes = Collections.emptyList();
private LeafSchemaNodeImpl(QName qname) {
this.qname = qname;
@Override
public ConstraintDefinition getConstraints() {
- return constraints;
+ return constraintsDef;
}
- private void setConstraints(ConstraintDefinition constraints) {
- this.constraints = constraints;
+ private void setConstraints(ConstraintDefinition constraintsDef) {
+ this.constraintsDef = constraintsDef;
}
@Override
@Override
public List<UnknownSchemaNode> getUnknownSchemaNodes() {
- return unknownSchemaNodes;
+ return unknownNodes;
}
- private void setUnknownSchemaNodes(List<UnknownSchemaNode> unknownSchemaNodes) {
- if(unknownSchemaNodes != null) {
- this.unknownSchemaNodes = unknownSchemaNodes;
+ private void setUnknownSchemaNodes(List<UnknownSchemaNode> unknownNodes) {
+ if(unknownNodes != null) {
+ this.unknownNodes = unknownNodes;
}
}
sb.append(", status=" + status);
sb.append(", augmenting=" + augmenting);
sb.append(", configuration=" + configuration);
- sb.append(", constraints=" + constraints);
+ sb.append(", constraints=" + constraintsDef);
sb.append(", type=" + type);
- sb.append(", constraints=" + constraints);
+ sb.append(", constraints=" + constraintsDef);
sb.append("]");
return sb.toString();
}
-/*\r
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.\r
- *\r
- * This program and the accompanying materials are made available under the\r
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,\r
- * and is available at http://www.eclipse.org/legal/epl-v10.html\r
- */\r
-package org.opendaylight.controller.yang.model.parser.builder.impl;\r
-\r
-import java.net.URI;\r
-import java.util.ArrayList;\r
-import java.util.Collections;\r
-import java.util.Date;\r
-import java.util.HashMap;\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.Set;\r
-\r
-import org.opendaylight.controller.yang.common.QName;\r
-import org.opendaylight.controller.yang.model.api.AugmentationSchema;\r
-import org.opendaylight.controller.yang.model.api.DataSchemaNode;\r
-import org.opendaylight.controller.yang.model.api.Deviation;\r
-import org.opendaylight.controller.yang.model.api.ExtensionDefinition;\r
-import org.opendaylight.controller.yang.model.api.FeatureDefinition;\r
-import org.opendaylight.controller.yang.model.api.GroupingDefinition;\r
-import org.opendaylight.controller.yang.model.api.IdentitySchemaNode;\r
-import org.opendaylight.controller.yang.model.api.Module;\r
-import org.opendaylight.controller.yang.model.api.ModuleImport;\r
-import org.opendaylight.controller.yang.model.api.NotificationDefinition;\r
-import org.opendaylight.controller.yang.model.api.RpcDefinition;\r
-import org.opendaylight.controller.yang.model.api.TypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.UsesNode;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.AugmentationSchemaBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.Builder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.ChildNodeBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.DataSchemaNodeBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.GroupingBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.TypeAwareBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.TypeDefinitionAwareBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.TypeDefinitionBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.UsesNodeBuilder;\r
-\r
-/**\r
- * This builder builds Module object. If this module is dependent on external\r
- * module/modules, these dependencies must be resolved before module is built,\r
- * otherwise result may not be valid.\r
- */\r
-public class ModuleBuilder implements Builder {\r
-\r
- private final ModuleImpl instance;\r
- private final String name;\r
- private String prefix;\r
- private Date revision;\r
-\r
- private final Set<ModuleImport> imports = new HashSet<ModuleImport>();\r
- private Set<AugmentationSchema> augmentations;\r
-\r
- /**\r
- * All nodes, that can contain other nodes\r
- */\r
- private final Map<List<String>, Builder> moduleNodes = new HashMap<List<String>, Builder>();\r
-\r
- /**\r
- * Holds all child (DataSchemaNode) nodes: anyxml, choice, case, container,\r
- * list, leaf, leaf-list.\r
- */\r
- private final Map<List<String>, DataSchemaNodeBuilder> addedChilds = new HashMap<List<String>, DataSchemaNodeBuilder>();\r
-\r
- private final Map<List<String>, GroupingBuilder> addedGroupings = new HashMap<List<String>, GroupingBuilder>();\r
- private final Set<AugmentationSchemaBuilder> addedAugments = new HashSet<AugmentationSchemaBuilder>();\r
- private final Map<List<String>, UsesNodeBuilder> addedUsesNodes = new HashMap<List<String>, UsesNodeBuilder>();\r
- private final Map<List<String>, RpcDefinitionBuilder> addedRpcs = new HashMap<List<String>, RpcDefinitionBuilder>();\r
- private final Set<NotificationBuilder> addedNotifications = new HashSet<NotificationBuilder>();\r
- private final Set<IdentitySchemaNodeBuilder> addedIdentities = new HashSet<IdentitySchemaNodeBuilder>();\r
- private final Map<List<String>, FeatureBuilder> addedFeatures = new HashMap<List<String>, FeatureBuilder>();\r
- private final Map<String, DeviationBuilder> addedDeviations = new HashMap<String, DeviationBuilder>();\r
- private final Map<List<String>, TypeDefinitionBuilder> addedTypedefs = new HashMap<List<String>, TypeDefinitionBuilder>();\r
- private final List<ExtensionBuilder> addedExtensions = new ArrayList<ExtensionBuilder>();\r
-\r
- private final Map<List<String>, TypeAwareBuilder> dirtyNodes = new HashMap<List<String>, TypeAwareBuilder>();\r
- private final Map<List<String>, UnionTypeBuilder> unionTypes = new HashMap<List<String>, UnionTypeBuilder>();\r
-\r
- public ModuleBuilder(String name) {\r
- this.name = name;\r
- instance = new ModuleImpl(name);\r
- }\r
-\r
-\r
- /**\r
- * Build new Module object based on this builder.\r
- */\r
- @Override\r
- public Module build() {\r
- instance.setImports(imports);\r
-\r
- // TYPEDEFS\r
- final Set<TypeDefinition<?>> typedefs = buildModuleTypedefs(addedTypedefs);\r
- instance.setTypeDefinitions(typedefs);\r
-\r
- // CHILD NODES\r
- final Map<QName, DataSchemaNode> childNodes = buildModuleChildNodes(addedChilds);\r
- instance.setChildNodes(childNodes);\r
-\r
- // GROUPINGS\r
- final Set<GroupingDefinition> groupings = buildModuleGroupings(addedGroupings);\r
- instance.setGroupings(groupings);\r
-\r
- // USES\r
- final Set<UsesNode> usesNodeDefinitions = buildUsesNodes(addedUsesNodes);\r
- instance.setUses(usesNodeDefinitions);\r
-\r
- // FEATURES\r
- final Set<FeatureDefinition> features = buildModuleFeatures(addedFeatures);\r
- instance.setFeatures(features);\r
-\r
- // NOTIFICATIONS\r
- final Set<NotificationDefinition> notifications = new HashSet<NotificationDefinition>();\r
- for (NotificationBuilder entry : addedNotifications) {\r
- notifications.add((NotificationDefinition) entry.build());\r
- }\r
- instance.setNotifications(notifications);\r
-\r
- // AUGMENTATIONS\r
- instance.setAugmentations(augmentations);\r
-\r
- // RPCs\r
- final Set<RpcDefinition> rpcs = buildModuleRpcs(addedRpcs);\r
- instance.setRpcs(rpcs);\r
-\r
- // DEVIATIONS\r
- final Set<Deviation> deviations = new HashSet<Deviation>();\r
- for (Map.Entry<String, DeviationBuilder> entry : addedDeviations\r
- .entrySet()) {\r
- deviations.add(entry.getValue().build());\r
- }\r
- instance.setDeviations(deviations);\r
-\r
- // EXTENSIONS\r
- final List<ExtensionDefinition> extensions = new ArrayList<ExtensionDefinition>();\r
- for(ExtensionBuilder b : addedExtensions) {\r
- extensions.add(b.build());\r
- }\r
- instance.setExtensionSchemaNodes(extensions);\r
-\r
- // IDENTITIES\r
- final Set<IdentitySchemaNode> identities = new HashSet<IdentitySchemaNode>();\r
- for(IdentitySchemaNodeBuilder idBuilder : addedIdentities) {\r
- identities.add(idBuilder.build());\r
- }\r
- instance.setIdentities(identities);\r
-\r
- return instance;\r
- }\r
-\r
- public Builder getNode(List<String> path) {\r
- return moduleNodes.get(path);\r
- }\r
-\r
- public Map<List<String>, TypeAwareBuilder> getDirtyNodes() {\r
- return dirtyNodes;\r
- }\r
-\r
- public Set<AugmentationSchemaBuilder> getAddedAugments() {\r
- return addedAugments;\r
- }\r
-\r
- public Set<IdentitySchemaNodeBuilder> getAddedIdentities() {\r
- return addedIdentities;\r
- }\r
-\r
- public String getName() {\r
- return name;\r
- }\r
-\r
- public String getPrefix() {\r
- return prefix;\r
- }\r
-\r
- public Date getRevision() {\r
- return revision;\r
- }\r
-\r
- public void addDirtyNode(List<String> path) {\r
- List<String> dirtyNodePath = new ArrayList<String>(path);\r
- TypeAwareBuilder nodeBuilder = (TypeAwareBuilder) moduleNodes\r
- .get(dirtyNodePath);\r
- dirtyNodes.put(dirtyNodePath, nodeBuilder);\r
- }\r
-\r
- public void setNamespace(URI namespace) {\r
- instance.setNamespace(namespace);\r
- }\r
-\r
- public void setRevision(Date revision) {\r
- this.revision = revision;\r
- instance.setRevision(revision);\r
- }\r
-\r
- public void setPrefix(String prefix) {\r
- this.prefix = prefix;\r
- instance.setPrefix(prefix);\r
- }\r
-\r
- public void setYangVersion(String yangVersion) {\r
- instance.setYangVersion(yangVersion);\r
- }\r
-\r
- public void setDescription(String description) {\r
- instance.setDescription(description);\r
- }\r
-\r
- public void setReference(String reference) {\r
- instance.setReference(reference);\r
- }\r
-\r
- public void setOrganization(String organization) {\r
- instance.setOrganization(organization);\r
- }\r
-\r
- public void setContact(String contact) {\r
- instance.setContact(contact);\r
- }\r
-\r
- public void setAugmentations(Set<AugmentationSchema> augmentations) {\r
- this.augmentations = augmentations;\r
- }\r
-\r
- public boolean addModuleImport(final String moduleName,\r
- final Date revision, final String prefix) {\r
- ModuleImport moduleImport = createModuleImport(moduleName, revision,\r
- prefix);\r
- return imports.add(moduleImport);\r
- }\r
-\r
- public Set<ModuleImport> getModuleImports() {\r
- return imports;\r
- }\r
-\r
- public ExtensionBuilder addExtension(QName qname) {\r
- ExtensionBuilder builder = new ExtensionBuilder(qname);\r
- return builder;\r
- }\r
-\r
- public ContainerSchemaNodeBuilder addContainerNode(QName containerName,\r
- List<String> parentPath) {\r
- List<String> pathToNode = new ArrayList<String>(parentPath);\r
-\r
- ContainerSchemaNodeBuilder containerBuilder = new ContainerSchemaNodeBuilder(\r
- containerName);\r
-\r
- ChildNodeBuilder parent = (ChildNodeBuilder) moduleNodes\r
- .get(pathToNode);\r
- if (parent != null) {\r
- if(parent instanceof AugmentationSchemaBuilder) {\r
- containerBuilder.setAugmenting(true);\r
- }\r
- parent.addChildNode(containerBuilder);\r
- }\r
-\r
- pathToNode.add(containerName.getLocalName());\r
- moduleNodes.put(pathToNode, containerBuilder);\r
- addedChilds.put(pathToNode, containerBuilder);\r
-\r
- return containerBuilder;\r
- }\r
-\r
- public ListSchemaNodeBuilder addListNode(QName listName,\r
- List<String> parentPath) {\r
- List<String> pathToNode = new ArrayList<String>(parentPath);\r
-\r
- ListSchemaNodeBuilder listBuilder = new ListSchemaNodeBuilder(listName);\r
-\r
- ChildNodeBuilder parent = (ChildNodeBuilder) moduleNodes\r
- .get(pathToNode);\r
- if (parent != null) {\r
- if(parent instanceof AugmentationSchemaBuilder) {\r
- listBuilder.setAugmenting(true);\r
- }\r
- parent.addChildNode(listBuilder);\r
- }\r
-\r
- pathToNode.add(listName.getLocalName());\r
- moduleNodes.put(pathToNode, listBuilder);\r
- addedChilds.put(pathToNode, listBuilder);\r
-\r
- return listBuilder;\r
- }\r
-\r
- public LeafSchemaNodeBuilder addLeafNode(QName leafName,\r
- List<String> parentPath) {\r
- List<String> pathToNode = new ArrayList<String>(parentPath);\r
-\r
- LeafSchemaNodeBuilder leafBuilder = new LeafSchemaNodeBuilder(leafName);\r
-\r
- ChildNodeBuilder parent = (ChildNodeBuilder) moduleNodes.get(pathToNode);\r
- if (parent != null) {\r
- if(parent instanceof AugmentationSchemaBuilder) {\r
- leafBuilder.setAugmenting(true);\r
- }\r
- parent.addChildNode(leafBuilder);\r
- }\r
-\r
- pathToNode.add(leafName.getLocalName());\r
- addedChilds.put(pathToNode, leafBuilder);\r
- moduleNodes.put(pathToNode, leafBuilder);\r
-\r
- return leafBuilder;\r
- }\r
-\r
- public LeafListSchemaNodeBuilder addLeafListNode(QName leafListName,\r
- List<String> parentPath) {\r
- List<String> pathToNode = new ArrayList<String>(parentPath);\r
-\r
- LeafListSchemaNodeBuilder leafListBuilder = new LeafListSchemaNodeBuilder(\r
- leafListName);\r
- ChildNodeBuilder parent = (ChildNodeBuilder) moduleNodes.get(pathToNode);\r
- if (parent != null) {\r
- if(parent instanceof AugmentationSchemaBuilder) {\r
- leafListBuilder.setAugmenting(true);\r
- }\r
- parent.addChildNode(leafListBuilder);\r
- }\r
-\r
- pathToNode.add(leafListName.getLocalName());\r
- addedChilds.put(pathToNode, leafListBuilder);\r
- moduleNodes.put(pathToNode, leafListBuilder);\r
-\r
- return leafListBuilder;\r
- }\r
-\r
- public GroupingBuilder addGrouping(QName qname, List<String> parentPath) {\r
- List<String> pathToGroup = new ArrayList<String>(parentPath);\r
-\r
- GroupingBuilder builder = new GroupingBuilderImpl(qname);\r
- ChildNodeBuilder parentNodeBuilder = (ChildNodeBuilder) moduleNodes.get(pathToGroup);\r
- if (parentNodeBuilder != null) {\r
- parentNodeBuilder.addGrouping(builder);\r
- }\r
-\r
- pathToGroup.add(qname.getLocalName());\r
- moduleNodes.put(pathToGroup, builder);\r
- addedGroupings.put(pathToGroup, builder);\r
-\r
- return builder;\r
- }\r
-\r
- public AugmentationSchemaBuilder addAugment(String name,\r
- List<String> parentPath) {\r
- List<String> pathToAugment = new ArrayList<String>(parentPath);\r
-\r
- AugmentationSchemaBuilder builder = new AugmentationSchemaBuilderImpl(name);\r
-\r
- // augment can only be in 'module' or 'uses' statement\r
- UsesNodeBuilder parent = addedUsesNodes.get(pathToAugment);\r
- if (parent != null) {\r
- parent.addAugment(builder);\r
- }\r
-\r
- pathToAugment.add(name);\r
- moduleNodes.put(pathToAugment, builder);\r
- addedAugments.add(builder);\r
-\r
- return builder;\r
- }\r
-\r
- public UsesNodeBuilder addUsesNode(String groupingPathStr,\r
- List<String> parentPath) {\r
- List<String> pathToUses = new ArrayList<String>(parentPath);\r
-\r
- UsesNodeBuilder usesBuilder = new UsesNodeBuilderImpl(groupingPathStr);\r
-\r
- ChildNodeBuilder parent = (ChildNodeBuilder) moduleNodes.get(pathToUses);\r
- if (parent != null) {\r
- if(parent instanceof AugmentationSchemaBuilder) {\r
- usesBuilder.setAugmenting(true);\r
- }\r
- parent.addUsesNode(usesBuilder);\r
- }\r
-\r
- pathToUses.add(groupingPathStr);\r
- addedUsesNodes.put(pathToUses, usesBuilder);\r
-\r
- return usesBuilder;\r
- }\r
-\r
- public RpcDefinitionBuilder addRpc(QName qname, List<String> parentPath) {\r
- List<String> pathToRpc = new ArrayList<String>(parentPath);\r
-\r
- RpcDefinitionBuilder rpcBuilder = new RpcDefinitionBuilder(qname);\r
-\r
- pathToRpc.add(qname.getLocalName());\r
- addedRpcs.put(pathToRpc, rpcBuilder);\r
-\r
- QName inputQName = new QName(qname.getNamespace(), qname.getRevision(),\r
- qname.getPrefix(), "input");\r
- ContainerSchemaNodeBuilder inputBuilder = new ContainerSchemaNodeBuilder(inputQName);\r
- List<String> pathToInput = new ArrayList<String>(pathToRpc);\r
- pathToInput.add("input");\r
- moduleNodes.put(pathToInput, inputBuilder);\r
- rpcBuilder.setInput(inputBuilder);\r
-\r
- QName outputQName = new QName(qname.getNamespace(),\r
- qname.getRevision(), qname.getPrefix(), "output");\r
- ContainerSchemaNodeBuilder outputBuilder = new ContainerSchemaNodeBuilder(outputQName);\r
- List<String> pathToOutput = new ArrayList<String>(pathToRpc);\r
- pathToOutput.add("output");\r
- moduleNodes.put(pathToOutput, outputBuilder);\r
- rpcBuilder.setOutput(outputBuilder);\r
-\r
- return rpcBuilder;\r
- }\r
-\r
- public NotificationBuilder addNotification(QName notificationName,\r
- List<String> parentPath) {\r
- List<String> pathToNotification = new ArrayList<String>(parentPath);\r
-\r
- NotificationBuilder notificationBuilder = new NotificationBuilder(\r
- notificationName);\r
-\r
- pathToNotification.add(notificationName.getLocalName());\r
- moduleNodes.put(pathToNotification, notificationBuilder);\r
- addedNotifications.add(notificationBuilder);\r
-\r
- return notificationBuilder;\r
- }\r
-\r
- public FeatureBuilder addFeature(QName featureName, List<String> parentPath) {\r
- List<String> pathToFeature = new ArrayList<String>(parentPath);\r
- pathToFeature.add(featureName.getLocalName());\r
-\r
- FeatureBuilder builder = new FeatureBuilder(featureName);\r
- addedFeatures.put(pathToFeature, builder);\r
- return builder;\r
- }\r
-\r
- public TypedefBuilder addTypedef(QName typeDefName, List<String> parentPath) {\r
- List<String> pathToType = new ArrayList<String>(parentPath);\r
- TypedefBuilder builder = new TypedefBuilder(typeDefName);\r
- TypeDefinitionAwareBuilder parent = (TypeDefinitionAwareBuilder) moduleNodes.get(pathToType);\r
- if (parent != null) {\r
- parent.addTypedef(builder);\r
- }\r
- pathToType.add(typeDefName.getLocalName());\r
- addedTypedefs.put(pathToType, builder);\r
- moduleNodes.put(pathToType, builder);\r
- return builder;\r
- }\r
-\r
- public Set<TypeDefinitionBuilder> getModuleTypedefs() {\r
- Set<TypeDefinitionBuilder> typedefs = new HashSet<TypeDefinitionBuilder>();\r
- for (Map.Entry<List<String>, TypeDefinitionBuilder> entry : addedTypedefs.entrySet()) {\r
- if (entry.getKey().size() == 2) {\r
- typedefs.add(entry.getValue());\r
- }\r
- }\r
- return typedefs;\r
- }\r
-\r
- public void setType(TypeDefinition<?> type, List<String> parentPath) {\r
- TypeAwareBuilder parent = (TypeAwareBuilder) moduleNodes.get(parentPath);\r
- parent.setType(type);\r
- }\r
-\r
- public void addUnionType(List<String> parentPath) {\r
- TypeAwareBuilder parent = (TypeAwareBuilder) moduleNodes.get(parentPath);\r
- UnionTypeBuilder union = new UnionTypeBuilder();\r
- parent.setType(union.build());\r
-\r
- List<String> path = new ArrayList<String>(parentPath);\r
- path.add("union");\r
-\r
- unionTypes.put(path, union);\r
- moduleNodes.put(path, union);\r
- }\r
-\r
- public DeviationBuilder addDeviation(String targetPath) {\r
- DeviationBuilder builder = new DeviationBuilder(targetPath);\r
- addedDeviations.put(targetPath, builder);\r
- return builder;\r
- }\r
-\r
- public IdentitySchemaNodeBuilder addIdentity(QName qname) {\r
- IdentitySchemaNodeBuilder builder = new IdentitySchemaNodeBuilder(qname);\r
- addedIdentities.add(builder);\r
-\r
- return builder;\r
- }\r
-\r
- public void addConfiguration(boolean configuration, List<String> parentPath) {\r
- Builder builder = moduleNodes.get(parentPath);\r
- if (builder instanceof DeviationBuilder) {\r
- // skip\r
- // TODO\r
- } else {\r
- DataSchemaNodeBuilder configBuilder = (DataSchemaNodeBuilder) moduleNodes.get(parentPath);\r
- configBuilder.setConfiguration(configuration);\r
- }\r
- }\r
-\r
- public UnknownSchemaNodeBuilder addUnknownSchemaNode(QName qname, List<String> parentPath) {\r
- UnknownSchemaNodeBuilder builder = new UnknownSchemaNodeBuilder(qname);\r
- return builder;\r
- }\r
-\r
-\r
- private class ModuleImpl implements Module {\r
- private URI namespace;\r
- private final String name;\r
- private Date revision;\r
- private String prefix;\r
- private String yangVersion;\r
- private String description;\r
- private String reference;\r
- private String organization;\r
- private String contact;\r
- private Set<ModuleImport> imports = Collections.emptySet();\r
- private Set<FeatureDefinition> features = Collections.emptySet();\r
- private Set<TypeDefinition<?>> typeDefinitions = Collections.emptySet();\r
- private Set<NotificationDefinition> notifications = Collections.emptySet();\r
- private Set<AugmentationSchema> augmentations = Collections.emptySet();\r
- private Set<RpcDefinition> rpcs = Collections.emptySet();\r
- private Set<Deviation> deviations = Collections.emptySet();\r
- private Map<QName, DataSchemaNode> childNodes = Collections.emptyMap();\r
- private Set<GroupingDefinition> groupings = Collections.emptySet();\r
- private Set<UsesNode> uses = Collections.emptySet();\r
- private List<ExtensionDefinition> extensionSchemaNodes = Collections.emptyList();\r
- private Set<IdentitySchemaNode> identities = Collections.emptySet();\r
-\r
- private ModuleImpl(String name) {\r
- this.name = name;\r
- }\r
-\r
- @Override\r
- public URI getNamespace() {\r
- return namespace;\r
- }\r
-\r
- private void setNamespace(URI namespace) {\r
- this.namespace = namespace;\r
- }\r
-\r
- @Override\r
- public String getName() {\r
- return name;\r
- }\r
-\r
- @Override\r
- public Date getRevision() {\r
- return revision;\r
- }\r
-\r
- private void setRevision(Date revision) {\r
- this.revision = revision;\r
- }\r
-\r
- @Override\r
- public String getPrefix() {\r
- return prefix;\r
- }\r
-\r
- private void setPrefix(String prefix) {\r
- this.prefix = prefix;\r
- }\r
-\r
- @Override\r
- public String getYangVersion() {\r
- return yangVersion;\r
- }\r
-\r
- private void setYangVersion(String yangVersion) {\r
- this.yangVersion = yangVersion;\r
- }\r
-\r
- @Override\r
- public String getDescription() {\r
- return description;\r
- }\r
-\r
- private void setDescription(String description) {\r
- this.description = description;\r
- }\r
-\r
- @Override\r
- public String getReference() {\r
- return reference;\r
- }\r
-\r
- private void setReference(String reference) {\r
- this.reference = reference;\r
- }\r
-\r
- @Override\r
- public String getOrganization() {\r
- return organization;\r
- }\r
-\r
- private void setOrganization(String organization) {\r
- this.organization = organization;\r
- }\r
-\r
- @Override\r
- public String getContact() {\r
- return contact;\r
- }\r
-\r
- private void setContact(String contact) {\r
- this.contact = contact;\r
- }\r
-\r
- @Override\r
- public Set<ModuleImport> getImports() {\r
- return imports;\r
- }\r
-\r
- private void setImports(Set<ModuleImport> imports) {\r
- if(imports != null) {\r
- this.imports = imports;\r
- }\r
- }\r
-\r
- @Override\r
- public Set<FeatureDefinition> getFeatures() {\r
- return features;\r
- }\r
-\r
- private void setFeatures(Set<FeatureDefinition> features) {\r
- if(features != null) {\r
- this.features = features;\r
- }\r
- }\r
-\r
- @Override\r
- public Set<TypeDefinition<?>> getTypeDefinitions() {\r
- return typeDefinitions;\r
- }\r
-\r
- private void setTypeDefinitions(Set<TypeDefinition<?>> typeDefinitions) {\r
- if(typeDefinitions != null) {\r
- this.typeDefinitions = typeDefinitions;\r
- }\r
- }\r
-\r
- @Override\r
- public Set<NotificationDefinition> getNotifications() {\r
- return notifications;\r
- }\r
-\r
- private void setNotifications(Set<NotificationDefinition> notifications) {\r
- if(notifications != null) {\r
- this.notifications = notifications;\r
- }\r
- }\r
-\r
- @Override\r
- public Set<AugmentationSchema> getAugmentations() {\r
- return augmentations;\r
- }\r
-\r
- private void setAugmentations(Set<AugmentationSchema> augmentations) {\r
- if(augmentations != null) {\r
- this.augmentations = augmentations;\r
- }\r
- }\r
-\r
- @Override\r
- public Set<RpcDefinition> getRpcs() {\r
- return rpcs;\r
- }\r
-\r
- private void setRpcs(Set<RpcDefinition> rpcs) {\r
- if(rpcs != null) {\r
- this.rpcs = rpcs;\r
- }\r
- }\r
-\r
- @Override\r
- public Set<Deviation> getDeviations() {\r
- return deviations;\r
- }\r
-\r
- private void setDeviations(Set<Deviation> deviations) {\r
- if(deviations != null) {\r
- this.deviations = deviations;\r
- }\r
- }\r
-\r
- @Override\r
- public Set<DataSchemaNode> getChildNodes() {\r
- return new HashSet<DataSchemaNode>(childNodes.values());\r
- }\r
-\r
- private void setChildNodes(Map<QName, DataSchemaNode> childNodes) {\r
- if(childNodes != null) {\r
- this.childNodes = childNodes;\r
- }\r
- }\r
-\r
- @Override\r
- public Set<GroupingDefinition> getGroupings() {\r
- return groupings;\r
- }\r
-\r
- private void setGroupings(Set<GroupingDefinition> groupings) {\r
- if(groupings != null) {\r
- this.groupings = groupings;\r
- }\r
- }\r
-\r
- @Override\r
- public Set<UsesNode> getUses() {\r
- return uses;\r
- }\r
-\r
- private void setUses(Set<UsesNode> uses) {\r
- if(uses != null) {\r
- this.uses = uses;\r
- }\r
- }\r
-\r
- @Override\r
- public List<ExtensionDefinition> getExtensionSchemaNodes() {\r
- return extensionSchemaNodes;\r
- }\r
-\r
- private void setExtensionSchemaNodes(List<ExtensionDefinition> extensionSchemaNodes) {\r
- if(extensionSchemaNodes != null) {\r
- this.extensionSchemaNodes = extensionSchemaNodes;\r
- }\r
- }\r
-\r
- @Override\r
- public Set<IdentitySchemaNode> getIdentities() {\r
- return identities;\r
- }\r
-\r
- private void setIdentities(Set<IdentitySchemaNode> identities) {\r
- if(identities != null) {\r
- this.identities = identities;\r
- }\r
- }\r
-\r
- @Override\r
- public DataSchemaNode getDataChildByName(QName name) {\r
- return childNodes.get(name);\r
- }\r
-\r
- @Override\r
- public DataSchemaNode getDataChildByName(String name) {\r
- DataSchemaNode result = null;\r
- for (Map.Entry<QName, DataSchemaNode> entry : childNodes.entrySet()) {\r
- if (entry.getKey().getLocalName().equals(name)) {\r
- result = entry.getValue();\r
- break;\r
- }\r
- }\r
- return result;\r
- }\r
-\r
- @Override\r
- public int hashCode() {\r
- final int prime = 31;\r
- int result = 1;\r
- result = prime * result + ((namespace == null) ? 0 : namespace.hashCode());\r
- result = prime * result + ((name == null) ? 0 : name.hashCode());\r
- result = prime * result + ((revision == null) ? 0 : revision.hashCode());\r
- result = prime * result + ((prefix == null) ? 0 : prefix.hashCode());\r
- result = prime * result + ((yangVersion == null) ? 0 : yangVersion.hashCode());\r
- return result;\r
- }\r
-\r
- @Override\r
- public boolean equals(Object obj) {\r
- if (this == obj) {\r
- return true;\r
- }\r
- if (obj == null) {\r
- return false;\r
- }\r
- if (getClass() != obj.getClass()) {\r
- return false;\r
- }\r
- ModuleImpl other = (ModuleImpl) obj;\r
- if (namespace == null) {\r
- if (other.namespace != null) {\r
- return false;\r
- }\r
- } else if (!namespace.equals(other.namespace)) {\r
- return false;\r
- }\r
- if (name == null) {\r
- if (other.name != null) {\r
- return false;\r
- }\r
- } else if (!name.equals(other.name)) {\r
- return false;\r
- }\r
- if (revision == null) {\r
- if (other.revision != null) {\r
- return false;\r
- }\r
- } else if (!revision.equals(other.revision)) {\r
- return false;\r
- }\r
- if (prefix == null) {\r
- if (other.prefix != null) {\r
- return false;\r
- }\r
- } else if (!prefix.equals(other.prefix)) {\r
- return false;\r
- }\r
- if (yangVersion == null) {\r
- if (other.yangVersion != null) {\r
- return false;\r
- }\r
- } else if (!yangVersion.equals(other.yangVersion)) {\r
- return false;\r
- }\r
- return true;\r
- }\r
-\r
- @Override\r
- public String toString() {\r
- StringBuilder sb = new StringBuilder(\r
- ModuleImpl.class.getSimpleName());\r
- sb.append("[\n");\r
- sb.append("name=" + name + ",\n");\r
- sb.append("namespace=" + namespace + ",\n");\r
- sb.append("revision=" + revision + ",\n");\r
- sb.append("prefix=" + prefix + ",\n");\r
- sb.append("yangVersion=" + yangVersion + ",\n");\r
- sb.append("description=" + description + ",\n");\r
- sb.append("reference=" + reference + ",\n");\r
- sb.append("organization=" + organization + ",\n");\r
- sb.append("contact=" + contact + ",\n");\r
- sb.append("childNodes=" + childNodes.values() + ",\n");\r
- sb.append("groupings=" + groupings + ",\n");\r
- sb.append("imports=" + imports + ",\n");\r
- sb.append("features=" + features + ",\n");\r
- sb.append("typeDefinitions=" + typeDefinitions + ",\n");\r
- sb.append("notifications=" + notifications + ",\n");\r
- sb.append("augmentations=" + augmentations + ",\n");\r
- sb.append("rpcs=" + rpcs + ",\n");\r
- sb.append("deviations=" + deviations + "\n");\r
- sb.append("uses=" + uses + "\n");\r
- sb.append("]");\r
- return sb.toString();\r
- }\r
- }\r
-\r
- private ModuleImport createModuleImport(final String moduleName,\r
- final Date revision, final String prefix) {\r
- ModuleImport moduleImport = new ModuleImport() {\r
- @Override\r
- public String getModuleName() {\r
- return moduleName;\r
- }\r
-\r
- @Override\r
- public Date getRevision() {\r
- return revision;\r
- }\r
-\r
- @Override\r
- public String getPrefix() {\r
- return prefix;\r
- }\r
-\r
- @Override\r
- public int hashCode() {\r
- final int prime = 31;\r
- int result = 1;\r
- result = prime * result\r
- + ((moduleName == null) ? 0 : moduleName.hashCode());\r
- result = prime * result\r
- + ((revision == null) ? 0 : revision.hashCode());\r
- result = prime * result\r
- + ((prefix == null) ? 0 : prefix.hashCode());\r
- return result;\r
- }\r
-\r
- @Override\r
- public boolean equals(Object obj) {\r
- if (this == obj) {\r
- return true;\r
- }\r
- if (obj == null) {\r
- return false;\r
- }\r
- if (getClass() != obj.getClass()) {\r
- return false;\r
- }\r
- ModuleImport other = (ModuleImport) obj;\r
- if (getModuleName() == null) {\r
- if (other.getModuleName() != null) {\r
- return false;\r
- }\r
- } else if (!getModuleName().equals(other.getModuleName())) {\r
- return false;\r
- }\r
- if (getRevision() == null) {\r
- if (other.getRevision() != null) {\r
- return false;\r
- }\r
- } else if (!getRevision().equals(other.getRevision())) {\r
- return false;\r
- }\r
- if (getPrefix() == null) {\r
- if (other.getPrefix() != null) {\r
- return false;\r
- }\r
- } else if (!getPrefix().equals(other.getPrefix())) {\r
- return false;\r
- }\r
- return true;\r
- }\r
-\r
- @Override\r
- public String toString() {\r
- return "ModuleImport[moduleName=" + moduleName + ", revision="\r
- + revision + ", prefix=" + prefix + "]";\r
- }\r
- };\r
- return moduleImport;\r
- }\r
-\r
- /**\r
- * Traverse through given addedChilds and add only direct module childs.\r
- * Direct module child path size is 2 (1. module name, 2. child name).\r
- *\r
- * @param addedChilds\r
- * @return map of children, where key is child QName and value is child\r
- * itself\r
- */\r
- private Map<QName, DataSchemaNode> buildModuleChildNodes(\r
- Map<List<String>, DataSchemaNodeBuilder> addedChilds) {\r
- final Map<QName, DataSchemaNode> childNodes = new HashMap<QName, DataSchemaNode>();\r
- for (Map.Entry<List<String>, DataSchemaNodeBuilder> entry : addedChilds\r
- .entrySet()) {\r
- if (entry.getKey().size() == 2) {\r
- DataSchemaNode node = entry.getValue().build();\r
- QName qname = entry.getValue().getQName();\r
- childNodes.put(qname, node);\r
- }\r
- }\r
- return childNodes;\r
- }\r
-\r
- /**\r
- * Traverse through given addedGroupings and add only direct module\r
- * groupings. Direct module grouping path size is 2 (1. module name, 2.\r
- * grouping name).\r
- *\r
- * @param addedGroupings\r
- * @return set of built GroupingDefinition objects\r
- */\r
- private Set<GroupingDefinition> buildModuleGroupings(\r
- Map<List<String>, GroupingBuilder> addedGroupings) {\r
- final Set<GroupingDefinition> groupings = new HashSet<GroupingDefinition>();\r
- for (Map.Entry<List<String>, GroupingBuilder> entry : addedGroupings\r
- .entrySet()) {\r
- if (entry.getKey().size() == 2) {\r
- groupings.add(entry.getValue().build());\r
- }\r
- }\r
- return groupings;\r
- }\r
-\r
- /**\r
- * Traverse through given addedRpcs and build RpcDefinition objects.\r
- *\r
- * @param addedRpcs\r
- * @return set of built RpcDefinition objects\r
- */\r
- private Set<RpcDefinition> buildModuleRpcs(\r
- Map<List<String>, RpcDefinitionBuilder> addedRpcs) {\r
- final Set<RpcDefinition> rpcs = new HashSet<RpcDefinition>();\r
- RpcDefinitionBuilder builder;\r
- for (Map.Entry<List<String>, RpcDefinitionBuilder> entry : addedRpcs\r
- .entrySet()) {\r
- builder = entry.getValue();\r
- RpcDefinition rpc = builder.build();\r
- rpcs.add(rpc);\r
- }\r
- return rpcs;\r
- }\r
-\r
- /**\r
- * Traverse through given addedTypedefs and add only direct module typedef\r
- * statements. Direct module typedef path size is 2 (1. module name, 2.\r
- * typedef name).\r
- *\r
- * @param addedTypedefs\r
- * @return set of built module typedef statements\r
- */\r
- private Set<TypeDefinition<?>> buildModuleTypedefs(\r
- Map<List<String>, TypeDefinitionBuilder> addedTypedefs) {\r
- Set<TypeDefinition<?>> typedefs = new HashSet<TypeDefinition<?>>();\r
- for (Map.Entry<List<String>, TypeDefinitionBuilder> entry : addedTypedefs\r
- .entrySet()) {\r
- if (entry.getKey().size() == 2) {\r
- TypeDefinition<? extends TypeDefinition<?>> node = entry\r
- .getValue().build();\r
- typedefs.add(node);\r
- }\r
- }\r
- return typedefs;\r
- }\r
-\r
- /**\r
- * Traverse through given addedUsesNodes and add only direct module uses\r
- * nodes. Direct module uses node path size is 2 (1. module name, 2. uses\r
- * name).\r
- *\r
- * @param addedUsesNodes\r
- * @return set of built module uses nodes\r
- */\r
- private Set<UsesNode> buildUsesNodes(\r
- Map<List<String>, UsesNodeBuilder> addedUsesNodes) {\r
- final Set<UsesNode> usesNodeDefinitions = new HashSet<UsesNode>();\r
- for (Map.Entry<List<String>, UsesNodeBuilder> entry : addedUsesNodes\r
- .entrySet()) {\r
- if (entry.getKey().size() == 2) {\r
- usesNodeDefinitions.add(entry.getValue().build());\r
- }\r
- }\r
- return usesNodeDefinitions;\r
- }\r
-\r
- /**\r
- * Traverse through given addedFeatures and add only direct module features.\r
- * Direct module feature path size is 2 (1. module name, 2. feature name).\r
- *\r
- * @param addedFeatures\r
- * @return set of built module features\r
- */\r
- private Set<FeatureDefinition> buildModuleFeatures(\r
- Map<List<String>, FeatureBuilder> addedFeatures) {\r
- Set<FeatureDefinition> features = new HashSet<FeatureDefinition>();\r
- for (Map.Entry<List<String>, FeatureBuilder> entry : addedFeatures\r
- .entrySet()) {\r
- if (entry.getKey().size() == 2) {\r
- features.add(entry.getValue().build());\r
- }\r
- }\r
- return features;\r
- }\r
-\r
-}\r
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.yang.model.parser.builder.impl;
+
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.opendaylight.controller.yang.common.QName;
+import org.opendaylight.controller.yang.model.api.AugmentationSchema;
+import org.opendaylight.controller.yang.model.api.DataSchemaNode;
+import org.opendaylight.controller.yang.model.api.Deviation;
+import org.opendaylight.controller.yang.model.api.ExtensionDefinition;
+import org.opendaylight.controller.yang.model.api.FeatureDefinition;
+import org.opendaylight.controller.yang.model.api.GroupingDefinition;
+import org.opendaylight.controller.yang.model.api.IdentitySchemaNode;
+import org.opendaylight.controller.yang.model.api.Module;
+import org.opendaylight.controller.yang.model.api.ModuleImport;
+import org.opendaylight.controller.yang.model.api.NotificationDefinition;
+import org.opendaylight.controller.yang.model.api.RpcDefinition;
+import org.opendaylight.controller.yang.model.api.TypeDefinition;
+import org.opendaylight.controller.yang.model.api.UsesNode;
+import org.opendaylight.controller.yang.model.parser.builder.api.AugmentationSchemaBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.Builder;
+import org.opendaylight.controller.yang.model.parser.builder.api.ChildNodeBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.DataSchemaNodeBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.GroupingBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.TypeAwareBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.TypeDefinitionAwareBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.TypeDefinitionBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.UsesNodeBuilder;
+import org.opendaylight.controller.yang.model.parser.util.YangParseException;
+
+/**
+ * This builder builds Module object. If this module is dependent on external
+ * module/modules, these dependencies must be resolved before module is built,
+ * otherwise result may not be valid.
+ */
+public class ModuleBuilder implements Builder {
+
+ private final ModuleImpl instance;
+ private final String name;
+ private String prefix;
+ private Date revision;
+
+ private final Set<ModuleImport> imports = new HashSet<ModuleImport>();
+ private Set<AugmentationSchema> augmentations;
+
+ /**
+ * All nodes, that can contain other nodes
+ */
+ private final Map<List<String>, Builder> moduleNodes = new HashMap<List<String>, Builder>();
+
+ /**
+ * Holds all child (DataSchemaNode) nodes: anyxml, choice, case, container,
+ * list, leaf, leaf-list.
+ */
+ private final Map<List<String>, DataSchemaNodeBuilder> addedChilds = new HashMap<List<String>, DataSchemaNodeBuilder>();
+
+ private final Map<List<String>, GroupingBuilder> addedGroupings = new HashMap<List<String>, GroupingBuilder>();
+ private final Set<AugmentationSchemaBuilder> addedAugments = new HashSet<AugmentationSchemaBuilder>();
+ private final Map<List<String>, UsesNodeBuilder> addedUsesNodes = new HashMap<List<String>, UsesNodeBuilder>();
+ private final Map<List<String>, RpcDefinitionBuilder> addedRpcs = new HashMap<List<String>, RpcDefinitionBuilder>();
+ private final Set<NotificationBuilder> addedNotifications = new HashSet<NotificationBuilder>();
+ private final Set<IdentitySchemaNodeBuilder> addedIdentities = new HashSet<IdentitySchemaNodeBuilder>();
+ private final Map<List<String>, FeatureBuilder> addedFeatures = new HashMap<List<String>, FeatureBuilder>();
+ private final Map<String, DeviationBuilder> addedDeviations = new HashMap<String, DeviationBuilder>();
+ private final Map<List<String>, TypeDefinitionBuilder> addedTypedefs = new HashMap<List<String>, TypeDefinitionBuilder>();
+ private final List<ExtensionBuilder> addedExtensions = new ArrayList<ExtensionBuilder>();
+
+ private final Map<List<String>, TypeAwareBuilder> dirtyNodes = new HashMap<List<String>, TypeAwareBuilder>();
+
+ public ModuleBuilder(String name) {
+ this.name = name;
+ instance = new ModuleImpl(name);
+ }
+
+
+ /**
+ * Build new Module object based on this builder.
+ */
+ @Override
+ public Module build() {
+ instance.setImports(imports);
+
+ // TYPEDEFS
+ final Set<TypeDefinition<?>> typedefs = buildModuleTypedefs(addedTypedefs);
+ instance.setTypeDefinitions(typedefs);
+
+ // CHILD NODES
+ final Map<QName, DataSchemaNode> childNodes = buildModuleChildNodes(addedChilds);
+ instance.setChildNodes(childNodes);
+
+ // GROUPINGS
+ final Set<GroupingDefinition> groupings = buildModuleGroupings(addedGroupings);
+ instance.setGroupings(groupings);
+
+ // USES
+ final Set<UsesNode> usesDefinitions = buildUsesNodes(addedUsesNodes);
+ instance.setUses(usesDefinitions);
+
+ // FEATURES
+ final Set<FeatureDefinition> features = buildModuleFeatures(addedFeatures);
+ instance.setFeatures(features);
+
+ // NOTIFICATIONS
+ final Set<NotificationDefinition> notifications = new HashSet<NotificationDefinition>();
+ for (NotificationBuilder entry : addedNotifications) {
+ notifications.add((NotificationDefinition) entry.build());
+ }
+ instance.setNotifications(notifications);
+
+ // AUGMENTATIONS
+ instance.setAugmentations(augmentations);
+
+ // RPCs
+ final Set<RpcDefinition> rpcs = buildModuleRpcs(addedRpcs);
+ instance.setRpcs(rpcs);
+
+ // DEVIATIONS
+ final Set<Deviation> deviations = new HashSet<Deviation>();
+ for (Map.Entry<String, DeviationBuilder> entry : addedDeviations
+ .entrySet()) {
+ deviations.add(entry.getValue().build());
+ }
+ instance.setDeviations(deviations);
+
+ // EXTENSIONS
+ final List<ExtensionDefinition> extensions = new ArrayList<ExtensionDefinition>();
+ for(ExtensionBuilder b : addedExtensions) {
+ extensions.add(b.build());
+ }
+ instance.setExtensionSchemaNodes(extensions);
+
+ // IDENTITIES
+ final Set<IdentitySchemaNode> identities = new HashSet<IdentitySchemaNode>();
+ for(IdentitySchemaNodeBuilder idBuilder : addedIdentities) {
+ identities.add(idBuilder.build());
+ }
+ instance.setIdentities(identities);
+
+ return instance;
+ }
+
+ public Builder getNode(List<String> path) {
+ return moduleNodes.get(path);
+ }
+
+ public Map<List<String>, TypeAwareBuilder> getDirtyNodes() {
+ return dirtyNodes;
+ }
+
+ public Set<AugmentationSchemaBuilder> getAddedAugments() {
+ return addedAugments;
+ }
+
+ public Set<IdentitySchemaNodeBuilder> getAddedIdentities() {
+ return addedIdentities;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public String getPrefix() {
+ return prefix;
+ }
+
+ public Date getRevision() {
+ return revision;
+ }
+
+ public void addDirtyNode(List<String> path) {
+ List<String> dirtyNodePath = new ArrayList<String>(path);
+ TypeAwareBuilder nodeBuilder = (TypeAwareBuilder) moduleNodes
+ .get(dirtyNodePath);
+ dirtyNodes.put(dirtyNodePath, nodeBuilder);
+ }
+
+ public void setNamespace(URI namespace) {
+ instance.setNamespace(namespace);
+ }
+
+ public void setRevision(Date revision) {
+ this.revision = revision;
+ instance.setRevision(revision);
+ }
+
+ public void setPrefix(String prefix) {
+ this.prefix = prefix;
+ instance.setPrefix(prefix);
+ }
+
+ public void setYangVersion(String yangVersion) {
+ instance.setYangVersion(yangVersion);
+ }
+
+ public void setDescription(String description) {
+ instance.setDescription(description);
+ }
+
+ public void setReference(String reference) {
+ instance.setReference(reference);
+ }
+
+ public void setOrganization(String organization) {
+ instance.setOrganization(organization);
+ }
+
+ public void setContact(String contact) {
+ instance.setContact(contact);
+ }
+
+ public void setAugmentations(Set<AugmentationSchema> augmentations) {
+ this.augmentations = augmentations;
+ }
+
+ public boolean addModuleImport(final String moduleName,
+ final Date revision, final String prefix) {
+ ModuleImport moduleImport = createModuleImport(moduleName, revision,
+ prefix);
+ return imports.add(moduleImport);
+ }
+
+ public Set<ModuleImport> getModuleImports() {
+ return imports;
+ }
+
+ public ExtensionBuilder addExtension(QName qname) {
+ return new ExtensionBuilder(qname);
+ }
+
+ public ContainerSchemaNodeBuilder addContainerNode(QName containerName,
+ List<String> parentPath) {
+ List<String> pathToNode = new ArrayList<String>(parentPath);
+
+ ContainerSchemaNodeBuilder containerBuilder = new ContainerSchemaNodeBuilder(
+ containerName);
+
+ ChildNodeBuilder parent = (ChildNodeBuilder) moduleNodes
+ .get(pathToNode);
+ if (parent != null) {
+ if(parent instanceof AugmentationSchemaBuilder) {
+ containerBuilder.setAugmenting(true);
+ }
+ parent.addChildNode(containerBuilder);
+ }
+
+ pathToNode.add(containerName.getLocalName());
+ moduleNodes.put(pathToNode, containerBuilder);
+ addedChilds.put(pathToNode, containerBuilder);
+
+ return containerBuilder;
+ }
+
+ public ListSchemaNodeBuilder addListNode(QName listName,
+ List<String> parentPath) {
+ List<String> pathToNode = new ArrayList<String>(parentPath);
+
+ ListSchemaNodeBuilder listBuilder = new ListSchemaNodeBuilder(listName);
+
+ ChildNodeBuilder parent = (ChildNodeBuilder) moduleNodes
+ .get(pathToNode);
+ if (parent != null) {
+ if(parent instanceof AugmentationSchemaBuilder) {
+ listBuilder.setAugmenting(true);
+ }
+ parent.addChildNode(listBuilder);
+ }
+
+ pathToNode.add(listName.getLocalName());
+ moduleNodes.put(pathToNode, listBuilder);
+ addedChilds.put(pathToNode, listBuilder);
+
+ return listBuilder;
+ }
+
+ public LeafSchemaNodeBuilder addLeafNode(QName leafName,
+ List<String> parentPath) {
+ List<String> pathToNode = new ArrayList<String>(parentPath);
+
+ LeafSchemaNodeBuilder leafBuilder = new LeafSchemaNodeBuilder(leafName);
+
+ ChildNodeBuilder parent = (ChildNodeBuilder) moduleNodes.get(pathToNode);
+ if (parent != null) {
+ if(parent instanceof AugmentationSchemaBuilder) {
+ leafBuilder.setAugmenting(true);
+ }
+ parent.addChildNode(leafBuilder);
+ }
+
+ pathToNode.add(leafName.getLocalName());
+ addedChilds.put(pathToNode, leafBuilder);
+ moduleNodes.put(pathToNode, leafBuilder);
+
+ return leafBuilder;
+ }
+
+ public LeafListSchemaNodeBuilder addLeafListNode(QName leafListName,
+ List<String> parentPath) {
+ List<String> pathToNode = new ArrayList<String>(parentPath);
+
+ LeafListSchemaNodeBuilder leafListBuilder = new LeafListSchemaNodeBuilder(
+ leafListName);
+ ChildNodeBuilder parent = (ChildNodeBuilder) moduleNodes.get(pathToNode);
+ if (parent != null) {
+ if(parent instanceof AugmentationSchemaBuilder) {
+ leafListBuilder.setAugmenting(true);
+ }
+ parent.addChildNode(leafListBuilder);
+ }
+
+ pathToNode.add(leafListName.getLocalName());
+ addedChilds.put(pathToNode, leafListBuilder);
+ moduleNodes.put(pathToNode, leafListBuilder);
+
+ return leafListBuilder;
+ }
+
+ public GroupingBuilder addGrouping(QName qname, List<String> parentPath) {
+ List<String> pathToGroup = new ArrayList<String>(parentPath);
+
+ GroupingBuilder builder = new GroupingBuilderImpl(qname);
+ ChildNodeBuilder parentNodeBuilder = (ChildNodeBuilder) moduleNodes.get(pathToGroup);
+ if (parentNodeBuilder != null) {
+ parentNodeBuilder.addGrouping(builder);
+ }
+
+ pathToGroup.add(qname.getLocalName());
+ moduleNodes.put(pathToGroup, builder);
+ addedGroupings.put(pathToGroup, builder);
+
+ return builder;
+ }
+
+ public AugmentationSchemaBuilder addAugment(String name,
+ List<String> parentPath) {
+ List<String> pathToAugment = new ArrayList<String>(parentPath);
+
+ AugmentationSchemaBuilder builder = new AugmentationSchemaBuilderImpl(name);
+
+ // augment can only be in 'module' or 'uses' statement
+ UsesNodeBuilder parent = addedUsesNodes.get(pathToAugment);
+ if (parent != null) {
+ parent.addAugment(builder);
+ }
+
+ pathToAugment.add(name);
+ moduleNodes.put(pathToAugment, builder);
+ addedAugments.add(builder);
+
+ return builder;
+ }
+
+ public UsesNodeBuilder addUsesNode(String groupingPathStr,
+ List<String> parentPath) {
+ List<String> pathToUses = new ArrayList<String>(parentPath);
+
+ UsesNodeBuilder usesBuilder = new UsesNodeBuilderImpl(groupingPathStr);
+
+ ChildNodeBuilder parent = (ChildNodeBuilder) moduleNodes.get(pathToUses);
+ if (parent != null) {
+ parent.addUsesNode(usesBuilder);
+ }
+
+ pathToUses.add(groupingPathStr);
+ addedUsesNodes.put(pathToUses, usesBuilder);
+
+ return usesBuilder;
+ }
+
+ public RpcDefinitionBuilder addRpc(QName qname, List<String> parentPath) {
+ List<String> pathToRpc = new ArrayList<String>(parentPath);
+
+ RpcDefinitionBuilder rpcBuilder = new RpcDefinitionBuilder(qname);
+
+ pathToRpc.add(qname.getLocalName());
+ addedRpcs.put(pathToRpc, rpcBuilder);
+
+ QName inputQName = new QName(qname.getNamespace(), qname.getRevision(),
+ qname.getPrefix(), "input");
+ ContainerSchemaNodeBuilder inputBuilder = new ContainerSchemaNodeBuilder(inputQName);
+ List<String> pathToInput = new ArrayList<String>(pathToRpc);
+ pathToInput.add("input");
+ moduleNodes.put(pathToInput, inputBuilder);
+ rpcBuilder.setInput(inputBuilder);
+
+ QName outputQName = new QName(qname.getNamespace(),
+ qname.getRevision(), qname.getPrefix(), "output");
+ ContainerSchemaNodeBuilder outputBuilder = new ContainerSchemaNodeBuilder(outputQName);
+ List<String> pathToOutput = new ArrayList<String>(pathToRpc);
+ pathToOutput.add("output");
+ moduleNodes.put(pathToOutput, outputBuilder);
+ rpcBuilder.setOutput(outputBuilder);
+
+ return rpcBuilder;
+ }
+
+ public NotificationBuilder addNotification(QName notificationName,
+ List<String> parentPath) {
+ List<String> pathToNotification = new ArrayList<String>(parentPath);
+
+ NotificationBuilder builder = new NotificationBuilder(
+ notificationName);
+
+ pathToNotification.add(notificationName.getLocalName());
+ moduleNodes.put(pathToNotification, builder);
+ addedNotifications.add(builder);
+
+ return builder;
+ }
+
+ public FeatureBuilder addFeature(QName featureName, List<String> parentPath) {
+ List<String> pathToFeature = new ArrayList<String>(parentPath);
+ pathToFeature.add(featureName.getLocalName());
+
+ FeatureBuilder builder = new FeatureBuilder(featureName);
+ addedFeatures.put(pathToFeature, builder);
+ return builder;
+ }
+
+ public TypedefBuilder addTypedef(QName typeDefName, List<String> parentPath) {
+ List<String> pathToType = new ArrayList<String>(parentPath);
+ TypedefBuilder builder = new TypedefBuilder(typeDefName);
+ TypeDefinitionAwareBuilder parent = (TypeDefinitionAwareBuilder) moduleNodes.get(pathToType);
+ if (parent != null) {
+ parent.addTypedef(builder);
+ }
+ pathToType.add(typeDefName.getLocalName());
+ addedTypedefs.put(pathToType, builder);
+ moduleNodes.put(pathToType, builder);
+ return builder;
+ }
+
+ public Set<TypeDefinitionBuilder> getModuleTypedefs() {
+ Set<TypeDefinitionBuilder> typedefs = new HashSet<TypeDefinitionBuilder>();
+ for (Map.Entry<List<String>, TypeDefinitionBuilder> entry : addedTypedefs.entrySet()) {
+ if (entry.getKey().size() == 2) {
+ typedefs.add(entry.getValue());
+ }
+ }
+ return typedefs;
+ }
+
+ public void setType(TypeDefinition<?> type, List<String> parentPath) {
+ TypeAwareBuilder parent = (TypeAwareBuilder) moduleNodes.get(parentPath);
+ if(parent == null) {
+ throw new YangParseException("Failed to set type '"+ type.getQName().getLocalName() +"'. Parent node not found.");
+ }
+ parent.setType(type);
+ }
+
+ public void addUnionType(List<String> parentPath) {
+ TypeAwareBuilder parent = (TypeAwareBuilder) moduleNodes.get(parentPath);
+ UnionTypeBuilder union = new UnionTypeBuilder();
+ parent.setType(union);
+
+ List<String> path = new ArrayList<String>(parentPath);
+ path.add("union");
+
+ moduleNodes.put(path, union);
+ }
+
+ public DeviationBuilder addDeviation(String targetPath) {
+ DeviationBuilder builder = new DeviationBuilder(targetPath);
+ addedDeviations.put(targetPath, builder);
+ return builder;
+ }
+
+ public IdentitySchemaNodeBuilder addIdentity(QName qname) {
+ IdentitySchemaNodeBuilder builder = new IdentitySchemaNodeBuilder(qname);
+ addedIdentities.add(builder);
+ return builder;
+ }
+
+ public void addConfiguration(boolean configuration, List<String> parentPath) {
+ Builder builder = moduleNodes.get(parentPath);
+ if (builder instanceof DeviationBuilder) {
+ // skip
+ // TODO
+ } else {
+ DataSchemaNodeBuilder configBuilder = (DataSchemaNodeBuilder) moduleNodes.get(parentPath);
+ configBuilder.setConfiguration(configuration);
+ }
+ }
+
+ public UnknownSchemaNodeBuilder addUnknownSchemaNode(QName qname, List<String> parentPath) {
+ return new UnknownSchemaNodeBuilder(qname);
+ }
+
+
+ private class ModuleImpl implements Module {
+ private URI namespace;
+ private final String name;
+ private Date revision;
+ private String prefix;
+ private String yangVersion;
+ private String description;
+ private String reference;
+ private String organization;
+ private String contact;
+ private Set<ModuleImport> imports = Collections.emptySet();
+ private Set<FeatureDefinition> features = Collections.emptySet();
+ private Set<TypeDefinition<?>> typeDefinitions = Collections.emptySet();
+ private Set<NotificationDefinition> notifications = Collections.emptySet();
+ private Set<AugmentationSchema> augmentations = Collections.emptySet();
+ private Set<RpcDefinition> rpcs = Collections.emptySet();
+ private Set<Deviation> deviations = Collections.emptySet();
+ private Map<QName, DataSchemaNode> childNodes = Collections.emptyMap();
+ private Set<GroupingDefinition> groupings = Collections.emptySet();
+ private Set<UsesNode> uses = Collections.emptySet();
+ private List<ExtensionDefinition> extensionNodes = Collections.emptyList();
+ private Set<IdentitySchemaNode> identities = Collections.emptySet();
+
+ private ModuleImpl(String name) {
+ this.name = name;
+ }
+
+ @Override
+ public URI getNamespace() {
+ return namespace;
+ }
+
+ private void setNamespace(URI namespace) {
+ this.namespace = namespace;
+ }
+
+ @Override
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public Date getRevision() {
+ return revision;
+ }
+
+ private void setRevision(Date revision) {
+ this.revision = revision;
+ }
+
+ @Override
+ public String getPrefix() {
+ return prefix;
+ }
+
+ private void setPrefix(String prefix) {
+ this.prefix = prefix;
+ }
+
+ @Override
+ public String getYangVersion() {
+ return yangVersion;
+ }
+
+ private void setYangVersion(String yangVersion) {
+ this.yangVersion = yangVersion;
+ }
+
+ @Override
+ public String getDescription() {
+ return description;
+ }
+
+ private void setDescription(String description) {
+ this.description = description;
+ }
+
+ @Override
+ public String getReference() {
+ return reference;
+ }
+
+ private void setReference(String reference) {
+ this.reference = reference;
+ }
+
+ @Override
+ public String getOrganization() {
+ return organization;
+ }
+
+ private void setOrganization(String organization) {
+ this.organization = organization;
+ }
+
+ @Override
+ public String getContact() {
+ return contact;
+ }
+
+ private void setContact(String contact) {
+ this.contact = contact;
+ }
+
+ @Override
+ public Set<ModuleImport> getImports() {
+ return imports;
+ }
+
+ private void setImports(Set<ModuleImport> imports) {
+ if(imports != null) {
+ this.imports = imports;
+ }
+ }
+
+ @Override
+ public Set<FeatureDefinition> getFeatures() {
+ return features;
+ }
+
+ private void setFeatures(Set<FeatureDefinition> features) {
+ if(features != null) {
+ this.features = features;
+ }
+ }
+
+ @Override
+ public Set<TypeDefinition<?>> getTypeDefinitions() {
+ return typeDefinitions;
+ }
+
+ private void setTypeDefinitions(Set<TypeDefinition<?>> typeDefinitions) {
+ if(typeDefinitions != null) {
+ this.typeDefinitions = typeDefinitions;
+ }
+ }
+
+ @Override
+ public Set<NotificationDefinition> getNotifications() {
+ return notifications;
+ }
+
+ private void setNotifications(Set<NotificationDefinition> notifications) {
+ if(notifications != null) {
+ this.notifications = notifications;
+ }
+ }
+
+ @Override
+ public Set<AugmentationSchema> getAugmentations() {
+ return augmentations;
+ }
+
+ private void setAugmentations(Set<AugmentationSchema> augmentations) {
+ if(augmentations != null) {
+ this.augmentations = augmentations;
+ }
+ }
+
+ @Override
+ public Set<RpcDefinition> getRpcs() {
+ return rpcs;
+ }
+
+ private void setRpcs(Set<RpcDefinition> rpcs) {
+ if(rpcs != null) {
+ this.rpcs = rpcs;
+ }
+ }
+
+ @Override
+ public Set<Deviation> getDeviations() {
+ return deviations;
+ }
+
+ private void setDeviations(Set<Deviation> deviations) {
+ if(deviations != null) {
+ this.deviations = deviations;
+ }
+ }
+
+ @Override
+ public Set<DataSchemaNode> getChildNodes() {
+ return new HashSet<DataSchemaNode>(childNodes.values());
+ }
+
+ private void setChildNodes(Map<QName, DataSchemaNode> childNodes) {
+ if(childNodes != null) {
+ this.childNodes = childNodes;
+ }
+ }
+
+ @Override
+ public Set<GroupingDefinition> getGroupings() {
+ return groupings;
+ }
+
+ private void setGroupings(Set<GroupingDefinition> groupings) {
+ if(groupings != null) {
+ this.groupings = groupings;
+ }
+ }
+
+ @Override
+ public Set<UsesNode> getUses() {
+ return uses;
+ }
+
+ private void setUses(Set<UsesNode> uses) {
+ if(uses != null) {
+ this.uses = uses;
+ }
+ }
+
+ @Override
+ public List<ExtensionDefinition> getExtensionSchemaNodes() {
+ return extensionNodes;
+ }
+
+ private void setExtensionSchemaNodes(List<ExtensionDefinition> extensionSchemaNodes) {
+ if(extensionSchemaNodes != null) {
+ this.extensionNodes = extensionSchemaNodes;
+ }
+ }
+
+ @Override
+ public Set<IdentitySchemaNode> getIdentities() {
+ return identities;
+ }
+
+ private void setIdentities(Set<IdentitySchemaNode> identities) {
+ if(identities != null) {
+ this.identities = identities;
+ }
+ }
+
+ @Override
+ public DataSchemaNode getDataChildByName(QName name) {
+ return childNodes.get(name);
+ }
+
+ @Override
+ public DataSchemaNode getDataChildByName(String name) {
+ DataSchemaNode result = null;
+ for (Map.Entry<QName, DataSchemaNode> entry : childNodes.entrySet()) {
+ if (entry.getKey().getLocalName().equals(name)) {
+ result = entry.getValue();
+ break;
+ }
+ }
+ return result;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((namespace == null) ? 0 : namespace.hashCode());
+ result = prime * result + ((name == null) ? 0 : name.hashCode());
+ result = prime * result + ((revision == null) ? 0 : revision.hashCode());
+ result = prime * result + ((prefix == null) ? 0 : prefix.hashCode());
+ result = prime * result + ((yangVersion == null) ? 0 : yangVersion.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ ModuleImpl other = (ModuleImpl) obj;
+ if (namespace == null) {
+ if (other.namespace != null) {
+ return false;
+ }
+ } else if (!namespace.equals(other.namespace)) {
+ return false;
+ }
+ if (name == null) {
+ if (other.name != null) {
+ return false;
+ }
+ } else if (!name.equals(other.name)) {
+ return false;
+ }
+ if (revision == null) {
+ if (other.revision != null) {
+ return false;
+ }
+ } else if (!revision.equals(other.revision)) {
+ return false;
+ }
+ if (prefix == null) {
+ if (other.prefix != null) {
+ return false;
+ }
+ } else if (!prefix.equals(other.prefix)) {
+ return false;
+ }
+ if (yangVersion == null) {
+ if (other.yangVersion != null) {
+ return false;
+ }
+ } else if (!yangVersion.equals(other.yangVersion)) {
+ return false;
+ }
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder(
+ ModuleImpl.class.getSimpleName());
+ sb.append("[\n");
+ sb.append("name=" + name + ",\n");
+ sb.append("namespace=" + namespace + ",\n");
+ sb.append("revision=" + revision + ",\n");
+ sb.append("prefix=" + prefix + ",\n");
+ sb.append("yangVersion=" + yangVersion + ",\n");
+ sb.append("description=" + description + ",\n");
+ sb.append("reference=" + reference + ",\n");
+ sb.append("organization=" + organization + ",\n");
+ sb.append("contact=" + contact + ",\n");
+ sb.append("childNodes=" + childNodes.values() + ",\n");
+ sb.append("groupings=" + groupings + ",\n");
+ sb.append("imports=" + imports + ",\n");
+ sb.append("features=" + features + ",\n");
+ sb.append("typeDefinitions=" + typeDefinitions + ",\n");
+ sb.append("notifications=" + notifications + ",\n");
+ sb.append("augmentations=" + augmentations + ",\n");
+ sb.append("rpcs=" + rpcs + ",\n");
+ sb.append("deviations=" + deviations + "\n");
+ sb.append("uses=" + uses + "\n");
+ sb.append("]");
+ return sb.toString();
+ }
+ }
+
+ private ModuleImport createModuleImport(final String moduleName,
+ final Date revision, final String prefix) {
+ ModuleImport moduleImport = new ModuleImport() {
+ @Override
+ public String getModuleName() {
+ return moduleName;
+ }
+
+ @Override
+ public Date getRevision() {
+ return revision;
+ }
+
+ @Override
+ public String getPrefix() {
+ return prefix;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result
+ + ((moduleName == null) ? 0 : moduleName.hashCode());
+ result = prime * result
+ + ((revision == null) ? 0 : revision.hashCode());
+ result = prime * result
+ + ((prefix == null) ? 0 : prefix.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ ModuleImport other = (ModuleImport) obj;
+ if (getModuleName() == null) {
+ if (other.getModuleName() != null) {
+ return false;
+ }
+ } else if (!getModuleName().equals(other.getModuleName())) {
+ return false;
+ }
+ if (getRevision() == null) {
+ if (other.getRevision() != null) {
+ return false;
+ }
+ } else if (!getRevision().equals(other.getRevision())) {
+ return false;
+ }
+ if (getPrefix() == null) {
+ if (other.getPrefix() != null) {
+ return false;
+ }
+ } else if (!getPrefix().equals(other.getPrefix())) {
+ return false;
+ }
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return "ModuleImport[moduleName=" + moduleName + ", revision="
+ + revision + ", prefix=" + prefix + "]";
+ }
+ };
+ return moduleImport;
+ }
+
+ /**
+ * Traverse through given addedChilds and add only direct module childs.
+ * Direct module child path size is 2 (1. module name, 2. child name).
+ *
+ * @param addedChilds
+ * @return map of children, where key is child QName and value is child
+ * itself
+ */
+ private Map<QName, DataSchemaNode> buildModuleChildNodes(
+ Map<List<String>, DataSchemaNodeBuilder> addedChilds) {
+ final Map<QName, DataSchemaNode> childNodes = new HashMap<QName, DataSchemaNode>();
+ for (Map.Entry<List<String>, DataSchemaNodeBuilder> entry : addedChilds.entrySet()) {
+ List<String> path = entry.getKey();
+ DataSchemaNodeBuilder child = entry.getValue();
+ if (path.size() == 2) {
+ DataSchemaNode node = child.build();
+ QName qname = node.getQName();
+ childNodes.put(qname, node);
+ }
+ }
+ return childNodes;
+ }
+
+ /**
+ * Traverse through given addedGroupings and add only direct module
+ * groupings. Direct module grouping path size is 2 (1. module name, 2.
+ * grouping name).
+ *
+ * @param addedGroupings
+ * @return set of built GroupingDefinition objects
+ */
+ private Set<GroupingDefinition> buildModuleGroupings(
+ Map<List<String>, GroupingBuilder> addedGroupings) {
+ final Set<GroupingDefinition> groupings = new HashSet<GroupingDefinition>();
+ for (Map.Entry<List<String>, GroupingBuilder> entry : addedGroupings
+ .entrySet()) {
+ if (entry.getKey().size() == 2) {
+ groupings.add(entry.getValue().build());
+ }
+ }
+ return groupings;
+ }
+
+ /**
+ * Traverse through given addedRpcs and build RpcDefinition objects.
+ *
+ * @param addedRpcs
+ * @return set of built RpcDefinition objects
+ */
+ private Set<RpcDefinition> buildModuleRpcs(
+ Map<List<String>, RpcDefinitionBuilder> addedRpcs) {
+ final Set<RpcDefinition> rpcs = new HashSet<RpcDefinition>();
+ RpcDefinitionBuilder builder;
+ for (Map.Entry<List<String>, RpcDefinitionBuilder> entry : addedRpcs
+ .entrySet()) {
+ builder = entry.getValue();
+ RpcDefinition rpc = builder.build();
+ rpcs.add(rpc);
+ }
+ return rpcs;
+ }
+
+ /**
+ * Traverse through given addedTypedefs and add only direct module typedef
+ * statements. Direct module typedef path size is 2 (1. module name, 2.
+ * typedef name).
+ *
+ * @param addedTypedefs
+ * @return set of built module typedef statements
+ */
+ private Set<TypeDefinition<?>> buildModuleTypedefs(
+ Map<List<String>, TypeDefinitionBuilder> addedTypedefs) {
+ Set<TypeDefinition<?>> typedefs = new HashSet<TypeDefinition<?>>();
+ for (Map.Entry<List<String>, TypeDefinitionBuilder> entry : addedTypedefs.entrySet()) {
+ List<String> key = entry.getKey();
+ TypeDefinitionBuilder typedefBuilder = entry.getValue();
+ if (key.size() == 2) {
+ TypeDefinition<? extends TypeDefinition<?>> node = typedefBuilder.build();
+ typedefs.add(node);
+ }
+ }
+ return typedefs;
+ }
+
+ /**
+ * Traverse through given addedUsesNodes and add only direct module uses
+ * nodes. Direct module uses node path size is 2 (1. module name, 2. uses
+ * name).
+ *
+ * @param addedUsesNodes
+ * @return set of built module uses nodes
+ */
+ private Set<UsesNode> buildUsesNodes(
+ Map<List<String>, UsesNodeBuilder> addedUsesNodes) {
+ final Set<UsesNode> usesNodeDefinitions = new HashSet<UsesNode>();
+ for (Map.Entry<List<String>, UsesNodeBuilder> entry : addedUsesNodes
+ .entrySet()) {
+ if (entry.getKey().size() == 2) {
+ usesNodeDefinitions.add(entry.getValue().build());
+ }
+ }
+ return usesNodeDefinitions;
+ }
+
+ /**
+ * Traverse through given addedFeatures and add only direct module features.
+ * Direct module feature path size is 2 (1. module name, 2. feature name).
+ *
+ * @param addedFeatures
+ * @return set of built module features
+ */
+ private Set<FeatureDefinition> buildModuleFeatures(
+ Map<List<String>, FeatureBuilder> addedFeatures) {
+ Set<FeatureDefinition> features = new HashSet<FeatureDefinition>();
+ for (Map.Entry<List<String>, FeatureBuilder> entry : addedFeatures
+ .entrySet()) {
+ if (entry.getKey().size() == 2) {
+ features.add(entry.getValue().build());
+ }
+ }
+ return features;
+ }
+
+}
*/
package org.opendaylight.controller.yang.model.parser.builder.impl;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.opendaylight.controller.yang.model.api.Status;
import org.opendaylight.controller.yang.model.api.TypeDefinition;
import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;
-import org.opendaylight.controller.yang.model.parser.builder.api.SchemaNodeBuilder;
-import org.opendaylight.controller.yang.model.parser.builder.api.TypeAwareBuilder;
+import org.opendaylight.controller.yang.model.api.type.LengthConstraint;
+import org.opendaylight.controller.yang.model.api.type.PatternConstraint;
+import org.opendaylight.controller.yang.model.api.type.RangeConstraint;
+import org.opendaylight.controller.yang.model.parser.builder.api.AbstractTypeAwareBuilder;
import org.opendaylight.controller.yang.model.parser.builder.api.TypeDefinitionBuilder;
+import org.opendaylight.controller.yang.model.parser.util.YangParseException;
+import org.opendaylight.controller.yang.model.util.ExtendedType;
import org.opendaylight.controller.yang.model.util.UnknownType;
-import org.opendaylight.controller.yang.model.util.YangTypesConverter;
-
-public class TypedefBuilder implements TypeDefinitionBuilder,
- SchemaNodeBuilder, TypeAwareBuilder {
+public class TypedefBuilder extends AbstractTypeAwareBuilder implements
+ TypeDefinitionBuilder {
private final QName qname;
private SchemaPath schemaPath;
- private TypeDefinition<?> baseType;
+
+ private final List<UnknownSchemaNodeBuilder> addedUnknownNodes = new ArrayList<UnknownSchemaNodeBuilder>();
+ private List<RangeConstraint> ranges = Collections.emptyList();
+ private List<LengthConstraint> lengths = Collections.emptyList();
+ private List<PatternConstraint> patterns = Collections.emptyList();
+ private Integer fractionDigits = null;
private String description;
private String reference;
private String units;
private Object defaultValue;
- TypedefBuilder(QName qname) {
+ public TypedefBuilder(QName qname) {
this.qname = qname;
}
@Override
public TypeDefinition<? extends TypeDefinition<?>> build() {
- final TypeDefinition<?> type = YangTypesConverter
- .javaTypeForBaseYangType(qname);
- if (type != null) {
- return type;
+ TypeDefinition<?> result = null;
+ ExtendedType.Builder typeBuilder = null;
+ if ((type == null || type instanceof UnknownType) && typedef == null) {
+ throw new YangParseException("Unresolved type: '"
+ + qname.getLocalName() + "'.");
+ }
+ if (type == null || type instanceof UnknownType) {
+ typeBuilder = new ExtendedType.Builder(qname, typedef.build(),
+ description, reference);
} else {
- if (baseType != null) {
- // typedef
- TypeDefinitionImpl instance = new TypeDefinitionImpl(qname);
- instance.setDescription(description);
- instance.setReference(reference);
- instance.setStatus(status);
- instance.setPath(schemaPath);
- instance.setBaseType(baseType);
- instance.setUnits(units);
- instance.setDefaultValue(defaultValue);
- return instance;
- } else {
- // type
- final UnknownType.Builder unknownBuilder = new UnknownType.Builder(
- qname, description, reference);
- unknownBuilder.status(status);
- return unknownBuilder.build();
- }
+ typeBuilder = new ExtendedType.Builder(qname, type, description,
+ reference);
+ }
+ typeBuilder.status(status);
+ typeBuilder.units(units);
+ typeBuilder.defaultValue(defaultValue);
+
+ typeBuilder.ranges(ranges);
+ typeBuilder.lengths(lengths);
+ typeBuilder.patterns(patterns);
+
+ // UNKNOWN NODES
+ final List<UnknownSchemaNode> unknownNodes = new ArrayList<UnknownSchemaNode>();
+ for (UnknownSchemaNodeBuilder b : addedUnknownNodes) {
+ unknownNodes.add(b.build());
}
+ typeBuilder.unknownSchemaNodes(unknownNodes);
+ result = typeBuilder.build();
+ return result;
}
@Override
return qname;
}
+ @Override
+ public SchemaPath getPath() {
+ return schemaPath;
+ }
+
@Override
public void setPath(final SchemaPath schemaPath) {
this.schemaPath = schemaPath;
}
+ @Override
+ public String getDescription() {
+ return description;
+ }
+
@Override
public void setDescription(final String description) {
this.description = description;
}
+ @Override
+ public String getReference() {
+ return reference;
+ }
+
@Override
public void setReference(final String reference) {
this.reference = reference;
}
+ @Override
+ public Status getStatus() {
+ return status;
+ }
+
@Override
public void setStatus(final Status status) {
if (status != null) {
}
}
+ @Override
+ public String getUnits() {
+ return units;
+ }
+
@Override
public void setUnits(String units) {
this.units = units;
}
@Override
- public void setDefaultValue(Object defaultValue) {
- this.defaultValue = defaultValue;
+ public Object getDefaultValue() {
+ return defaultValue;
}
@Override
- public TypeDefinition<?> getType() {
- return baseType;
+ public void setDefaultValue(Object defaultValue) {
+ this.defaultValue = defaultValue;
}
@Override
- public void setType(TypeDefinition<?> baseType) {
- this.baseType = baseType;
+ public List<UnknownSchemaNodeBuilder> getUnknownNodes() {
+ return addedUnknownNodes;
}
@Override
- public TypeDefinition<?> getBaseType() {
- return baseType;
+ public void addUnknownSchemaNode(UnknownSchemaNodeBuilder unknownNode) {
+ addedUnknownNodes.add(unknownNode);
}
@Override
- public void addUnknownSchemaNode(UnknownSchemaNodeBuilder unknownSchemaNodeBuilder) {
- // TODO
+ public List<RangeConstraint> getRanges() {
+ return ranges;
}
- private static class TypeDefinitionImpl<T extends TypeDefinition<T>>
- implements TypeDefinition<T> {
-
- private final QName qname;
- private SchemaPath path;
- private String description;
- private String reference;
- private Status status = Status.CURRENT;
- private Object defaultValue;
- private T baseType;
- private String units;
- private List<UnknownSchemaNode> unknownSchemaNodes = Collections.emptyList();
-
- private TypeDefinitionImpl(QName qname) {
- this.qname = qname;
- }
-
- @Override
- public QName getQName() {
- return qname;
- }
-
- @Override
- public SchemaPath getPath() {
- return path;
- }
-
- private void setPath(SchemaPath path) {
- this.path = path;
- }
-
- @Override
- public String getDescription() {
- return description;
- }
-
- private void setDescription(String description) {
- this.description = description;
- }
-
- @Override
- public String getReference() {
- return reference;
- }
-
- private void setReference(String reference) {
- this.reference = reference;
- }
-
- @Override
- public Status getStatus() {
- return status;
- }
-
- private void setStatus(Status status) {
- this.status = status;
- }
-
- @Override
- public T getBaseType() {
- return baseType;
- }
-
- private void setBaseType(T type) {
- this.baseType = type;
- }
-
- @Override
- public String getUnits() {
- return units;
+ @Override
+ public void setRanges(List<RangeConstraint> ranges) {
+ if (ranges != null) {
+ this.ranges = ranges;
}
+ }
- private void setUnits(String units) {
- this.units = units;
- }
+ @Override
+ public List<LengthConstraint> getLengths() {
+ return lengths;
+ }
- @Override
- public Object getDefaultValue() {
- return defaultValue;
+ @Override
+ public void setLengths(List<LengthConstraint> lengths) {
+ if (lengths != null) {
+ this.lengths = lengths;
}
+ }
- private void setDefaultValue(Object defaultValue) {
- this.defaultValue = defaultValue;
- }
+ @Override
+ public List<PatternConstraint> getPatterns() {
+ return patterns;
+ }
- @Override
- public List<UnknownSchemaNode> getUnknownSchemaNodes() {
- return unknownSchemaNodes;
+ @Override
+ public void setPatterns(List<PatternConstraint> patterns) {
+ if (patterns != null) {
+ this.patterns = patterns;
}
+ }
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((qname == null) ? 0 : qname.hashCode());
- result = prime * result + ((path == null) ? 0 : path.hashCode());
- result = prime * result
- + ((description == null) ? 0 : description.hashCode());
- result = prime * result
- + ((reference == null) ? 0 : reference.hashCode());
- result = prime * result
- + ((status == null) ? 0 : status.hashCode());
- result = prime * result
- + ((baseType == null) ? 0 : baseType.hashCode());
- result = prime * result + ((units == null) ? 0 : units.hashCode());
- return result;
- }
+ @Override
+ public Integer getFractionDigits() {
+ return fractionDigits;
+ }
- @Override
- public boolean equals(Object obj) {
- if (this == obj) {
- return true;
- }
- if (obj == null) {
- return false;
- }
- if (getClass() != obj.getClass()) {
- return false;
- }
- TypeDefinitionImpl other = (TypeDefinitionImpl) obj;
- if (qname == null) {
- if (other.qname != null) {
- return false;
- }
- } else if (!qname.equals(other.qname)) {
- return false;
- }
- if (path == null) {
- if (other.path != null) {
- return false;
- }
- } else if (!path.equals(other.path)) {
- return false;
- }
- if (description == null) {
- if (other.description != null) {
- return false;
- }
- } else if (!description.equals(other.description)) {
- return false;
- }
- if (reference == null) {
- if (other.reference != null) {
- return false;
- }
- } else if (!reference.equals(other.reference)) {
- return false;
- }
- if (status == null) {
- if (other.status != null) {
- return false;
- }
- } else if (!status.equals(other.status)) {
- return false;
- }
- if (baseType == null) {
- if (other.baseType != null) {
- return false;
- }
- } else if (!baseType.equals(other.baseType)) {
- return false;
- }
- if (units == null) {
- if (other.units != null) {
- return false;
- }
- } else if (!units.equals(other.units)) {
- return false;
- }
- return true;
- }
+ @Override
+ public void setFractionDigits(Integer fractionDigits) {
+ this.fractionDigits = fractionDigits;
+ }
- @Override
- public String toString() {
- final StringBuilder sb = new StringBuilder(
- TypeDefinitionImpl.class.getSimpleName());
- sb.append("[");
- sb.append("qname=" + qname);
- sb.append(", path=" + path);
- sb.append(", description=" + description);
- sb.append(", reference=" + reference);
- sb.append(", status=" + status);
- sb.append(", baseType=" + baseType + "]");
- return sb.toString();
+ @Override
+ public String toString() {
+ String result = "TypedefBuilder[" + qname.getLocalName();
+ result += ", type=";
+ if (type == null) {
+ result += typedef;
+ } else {
+ result += type;
}
+ result += "]";
+ return result;
}
}
package org.opendaylight.controller.yang.model.parser.builder.impl;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
import org.opendaylight.controller.yang.common.QName;
+import org.opendaylight.controller.yang.model.api.SchemaPath;
+import org.opendaylight.controller.yang.model.api.Status;
import org.opendaylight.controller.yang.model.api.TypeDefinition;
+import org.opendaylight.controller.yang.model.api.type.LengthConstraint;
+import org.opendaylight.controller.yang.model.api.type.PatternConstraint;
+import org.opendaylight.controller.yang.model.api.type.RangeConstraint;
+import org.opendaylight.controller.yang.model.parser.builder.api.AbstractTypeAwareBuilder;
import org.opendaylight.controller.yang.model.parser.builder.api.Builder;
-import org.opendaylight.controller.yang.model.parser.builder.api.TypeAwareBuilder;
import org.opendaylight.controller.yang.model.parser.builder.api.TypeDefinitionBuilder;
import org.opendaylight.controller.yang.model.util.UnionType;
-public class UnionTypeBuilder implements TypeAwareBuilder, TypeDefinitionBuilder, Builder {
-
+/**
+ * Builder for YANG union type. User can add type to this union as
+ * TypeDefinition object (resolved type) or in form of TypeDefinitionBuilder.
+ * When build is called, types in builder form will be transformed to
+ * TypeDefinition objects and add to resolved types.
+ */
+public class UnionTypeBuilder extends AbstractTypeAwareBuilder implements
+ TypeDefinitionBuilder, Builder {
private final List<TypeDefinition<?>> types;
private final List<TypeDefinitionBuilder> typedefs;
private final UnionType instance;
@Override
public TypeDefinition<?> getType() {
- return instance;
+ return null;
+ }
+
+ public List<TypeDefinitionBuilder> getTypedefs() {
+ return Collections.unmodifiableList(typedefs);
+ }
+
+ @Override
+ public TypeDefinitionBuilder getTypedef() {
+ return null;
}
@Override
types.add(type);
}
- public void addType(TypeDefinitionBuilder tdb) {
+ @Override
+ public void setType(TypeDefinitionBuilder tdb) {
typedefs.add(tdb);
}
@Override
public UnionType build() {
- for(TypeDefinitionBuilder tdb : typedefs) {
+ for (TypeDefinitionBuilder tdb : typedefs) {
types.add(tdb.build());
}
return instance;
}
+ @Override
+ public void setPath(SchemaPath schemaPath) {
+ // TODO Auto-generated method stub
+ }
+
+ @Override
+ public void setDescription(String description) {
+ // TODO Auto-generated method stub
+ }
+
+ @Override
+ public void setReference(String reference) {
+ // TODO Auto-generated method stub
+ }
+
+ @Override
+ public void setStatus(Status status) {
+ // TODO Auto-generated method stub
+ }
+
+ @Override
+ public void addUnknownSchemaNode(
+ UnknownSchemaNodeBuilder unknownSchemaNodeBuilder) {
+ // TODO Auto-generated method stub
+ }
+
@Override
public QName getQName() {
// TODO Auto-generated method stub
}
@Override
- public TypeDefinition<?> getBaseType() {
+ public SchemaPath getPath() {
// TODO Auto-generated method stub
return null;
}
@Override
- public void setUnits(String units) {
+ public String getDescription() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public String getReference() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Status getStatus() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public List<RangeConstraint> getRanges() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void setRanges(List<RangeConstraint> ranges) {
+ // TODO Auto-generated method stub
+ }
+
+ @Override
+ public List<LengthConstraint> getLengths() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void setLengths(List<LengthConstraint> lengths) {
+ // TODO Auto-generated method stub
+ }
+
+ @Override
+ public List<PatternConstraint> getPatterns() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void setPatterns(List<PatternConstraint> patterns) {
+ // TODO Auto-generated method stub
+ }
+
+ @Override
+ public Integer getFractionDigits() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void setFractionDigits(Integer fractionDigits) {
// TODO Auto-generated method stub
+ }
+ @Override
+ public List<UnknownSchemaNodeBuilder> getUnknownNodes() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Object getDefaultValue() {
+ // TODO Auto-generated method stub
+ return null;
}
@Override
public void setDefaultValue(Object defaultValue) {
// TODO Auto-generated method stub
+ }
+
+ @Override
+ public String getUnits() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+ @Override
+ public void setUnits(String units) {
+ // TODO Auto-generated method stub
+ }
+
+ @Override
+ public String toString() {
+ String result = "UnionTypeBuilder[";
+ result += ", types=" + types;
+ result += ", typedefs=" + typedefs;
+ result += "]";
+ return result;
}
}
-/*\r
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.\r
- *\r
- * This program and the accompanying materials are made available under the\r
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,\r
- * and is available at http://www.eclipse.org/legal/epl-v10.html\r
- */\r
-package org.opendaylight.controller.yang.model.parser.impl;\r
-\r
-import java.io.File;\r
-import java.io.FileInputStream;\r
-import java.io.FileNotFoundException;\r
-import java.io.IOException;\r
-import java.io.InputStream;\r
-import java.util.ArrayList;\r
-import java.util.Calendar;\r
-import java.util.Date;\r
-import java.util.HashMap;\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Map;\r
-import java.util.Set;\r
-import java.util.TreeMap;\r
-\r
-import org.antlr.v4.runtime.ANTLRInputStream;\r
-import org.antlr.v4.runtime.CommonTokenStream;\r
-import org.antlr.v4.runtime.tree.ParseTree;\r
-import org.antlr.v4.runtime.tree.ParseTreeWalker;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangLexer;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser;\r
-import org.opendaylight.controller.yang.common.QName;\r
-import org.opendaylight.controller.yang.model.api.AugmentationSchema;\r
-import org.opendaylight.controller.yang.model.api.DataSchemaNode;\r
-import org.opendaylight.controller.yang.model.api.ExtensionDefinition;\r
-import org.opendaylight.controller.yang.model.api.Module;\r
-import org.opendaylight.controller.yang.model.api.ModuleImport;\r
-import org.opendaylight.controller.yang.model.api.NotificationDefinition;\r
-import org.opendaylight.controller.yang.model.api.RpcDefinition;\r
-import org.opendaylight.controller.yang.model.api.SchemaContext;\r
-import org.opendaylight.controller.yang.model.api.SchemaPath;\r
-import org.opendaylight.controller.yang.model.api.TypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.type.BinaryTypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.type.BitsTypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.type.BitsTypeDefinition.Bit;\r
-import org.opendaylight.controller.yang.model.api.type.DecimalTypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.type.IntegerTypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.type.LengthConstraint;\r
-import org.opendaylight.controller.yang.model.api.type.PatternConstraint;\r
-import org.opendaylight.controller.yang.model.api.type.RangeConstraint;\r
-import org.opendaylight.controller.yang.model.api.type.StringTypeDefinition;\r
-import org.opendaylight.controller.yang.model.parser.api.YangModelParser;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.AugmentationSchemaBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.AugmentationTargetBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.ChildNodeBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.DataSchemaNodeBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.TypeAwareBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.TypeDefinitionBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.impl.IdentitySchemaNodeBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.impl.ModuleBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.impl.UnionTypeBuilder;\r
-import org.opendaylight.controller.yang.model.util.BaseConstraints;\r
-import org.opendaylight.controller.yang.model.util.BinaryType;\r
-import org.opendaylight.controller.yang.model.util.BitsType;\r
-import org.opendaylight.controller.yang.model.util.StringType;\r
-import org.opendaylight.controller.yang.model.util.UnknownType;\r
-import org.opendaylight.controller.yang.model.util.YangTypesConverter;\r
-import org.slf4j.Logger;\r
-import org.slf4j.LoggerFactory;\r
-\r
-public class YangModelParserImpl implements YangModelParser {\r
-\r
- private static final Logger logger = LoggerFactory\r
- .getLogger(YangModelParserImpl.class);\r
-\r
- @Override\r
- public Module parseYangModel(String yangFile) {\r
- final Map<String, TreeMap<Date, ModuleBuilder>> modules = resolveModuleBuildersFromStreams(yangFile);\r
- Set<Module> result = build(modules);\r
- return result.iterator().next();\r
- }\r
-\r
- @Override\r
- public Set<Module> parseYangModels(String... yangFiles) {\r
- final Map<String, TreeMap<Date, ModuleBuilder>> modules = resolveModuleBuildersFromStreams(yangFiles);\r
- Set<Module> result = build(modules);\r
- return result;\r
- }\r
-\r
- @Override\r
- public Set<Module> parseYangModelsFromStreams(\r
- InputStream... yangModelStreams) {\r
- final Map<String, TreeMap<Date, ModuleBuilder>> modules = resolveModuleBuildersFromStreams(yangModelStreams);\r
- Set<Module> result = build(modules);\r
- return result;\r
- }\r
-\r
- @Override\r
- public SchemaContext resolveSchemaContext(Set<Module> modules) {\r
- return new SchemaContextImpl(modules);\r
- }\r
-\r
- private Map<String, TreeMap<Date, ModuleBuilder>> resolveModuleBuildersFromStreams(\r
- String... yangFiles) {\r
- InputStream[] streams = new InputStream[yangFiles.length];\r
- for (int i = 0; i < yangFiles.length; i++) {\r
- final String yangFileName = yangFiles[i];\r
- final File yangFile = new File(yangFileName);\r
- FileInputStream inStream = null;\r
- try {\r
- inStream = new FileInputStream(yangFile);\r
- } catch (FileNotFoundException e) {\r
- logger.warn("Exception while reading yang stream: " + inStream,\r
- e);\r
- }\r
- streams[i] = inStream;\r
- }\r
- return resolveModuleBuildersFromStreams(streams);\r
- }\r
-\r
- private Map<String, TreeMap<Date, ModuleBuilder>> resolveModuleBuildersFromStreams(\r
- InputStream... yangFiles) {\r
- final Map<String, TreeMap<Date, ModuleBuilder>> modules = new HashMap<String, TreeMap<Date, ModuleBuilder>>();\r
- final ParseTreeWalker walker = new ParseTreeWalker();\r
- final List<ParseTree> trees = parseStreams(yangFiles);\r
- final ModuleBuilder[] builders = new ModuleBuilder[trees.size()];\r
-\r
- for (int i = 0; i < trees.size(); i++) {\r
- final YangModelParserListenerImpl yangModelParser = new YangModelParserListenerImpl();\r
- walker.walk(yangModelParser, trees.get(i));\r
- builders[i] = yangModelParser.getModuleBuilder();\r
- }\r
-\r
- for (ModuleBuilder builder : builders) {\r
- final String builderName = builder.getName();\r
- Date builderRevision = builder.getRevision();\r
- if (builderRevision == null) {\r
- builderRevision = createEpochTime();\r
- }\r
- TreeMap<Date, ModuleBuilder> builderByRevision = modules\r
- .get(builderName);\r
- if (builderByRevision == null) {\r
- builderByRevision = new TreeMap<Date, ModuleBuilder>();\r
- }\r
- builderByRevision.put(builderRevision, builder);\r
- modules.put(builderName, builderByRevision);\r
- }\r
- return modules;\r
- }\r
-\r
- private List<ParseTree> parseStreams(InputStream... yangStreams) {\r
- List<ParseTree> trees = new ArrayList<ParseTree>();\r
- for (InputStream yangStream : yangStreams) {\r
- trees.add(parseStream(yangStream));\r
- }\r
- return trees;\r
- }\r
-\r
- private ParseTree parseStream(InputStream yangStream) {\r
- ParseTree result = null;\r
- try {\r
- final ANTLRInputStream input = new ANTLRInputStream(yangStream);\r
- final YangLexer lexer = new YangLexer(input);\r
- final CommonTokenStream tokens = new CommonTokenStream(lexer);\r
- final YangParser parser = new YangParser(tokens);\r
- result = parser.yang();\r
- } catch (IOException e) {\r
- logger.warn("Exception while reading yang file: " + yangStream, e);\r
- }\r
- return result;\r
- }\r
-\r
- private Set<Module> build(Map<String, TreeMap<Date, ModuleBuilder>> modules) {\r
- // first validate\r
- for (Map.Entry<String, TreeMap<Date, ModuleBuilder>> entry : modules\r
- .entrySet()) {\r
- for (Map.Entry<Date, ModuleBuilder> childEntry : entry.getValue()\r
- .entrySet()) {\r
- ModuleBuilder moduleBuilder = childEntry.getValue();\r
- validateBuilder(modules, moduleBuilder);\r
- }\r
- }\r
- // then build\r
- final Set<Module> result = new HashSet<Module>();\r
- for (Map.Entry<String, TreeMap<Date, ModuleBuilder>> entry : modules\r
- .entrySet()) {\r
- final Map<Date, Module> modulesByRevision = new HashMap<Date, Module>();\r
- for (Map.Entry<Date, ModuleBuilder> childEntry : entry.getValue()\r
- .entrySet()) {\r
- ModuleBuilder moduleBuilder = childEntry.getValue();\r
- modulesByRevision.put(childEntry.getKey(),\r
- moduleBuilder.build());\r
- result.add(moduleBuilder.build());\r
- }\r
- }\r
-\r
- return result;\r
- }\r
-\r
- private void validateBuilder(\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- ModuleBuilder builder) {\r
- resolveTypedefs(modules, builder);\r
- resolveAugments(modules, builder);\r
- resolveIdentities(modules, builder);\r
- }\r
-\r
- /**\r
- * Search for dirty nodes (node which contains UnknownType) and resolve\r
- * unknown types.\r
- *\r
- * @param modules\r
- * all available modules\r
- * @param module\r
- * current module\r
- */\r
- private void resolveTypedefs(\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- ModuleBuilder module) {\r
- Map<List<String>, TypeAwareBuilder> dirtyNodes = module.getDirtyNodes();\r
- if (dirtyNodes.size() == 0) {\r
- return;\r
- } else {\r
- for (Map.Entry<List<String>, TypeAwareBuilder> entry : dirtyNodes\r
- .entrySet()) {\r
- TypeAwareBuilder typeToResolve = entry.getValue();\r
-\r
- if (typeToResolve instanceof UnionTypeBuilder) {\r
- resolveUnionTypeBuilder(modules, module,\r
- (UnionTypeBuilder) typeToResolve);\r
- } else {\r
- UnknownType ut = (UnknownType) typeToResolve.getType();\r
- TypeDefinition<?> resolvedType = findTargetType(ut,\r
- modules, module);\r
- typeToResolve.setType(resolvedType);\r
- }\r
- }\r
- }\r
- }\r
-\r
- private UnionTypeBuilder resolveUnionTypeBuilder(\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- ModuleBuilder builder, UnionTypeBuilder unionTypeBuilderToResolve) {\r
- List<TypeDefinition<?>> resolvedTypes = new ArrayList<TypeDefinition<?>>();\r
- List<TypeDefinition<?>> typesToRemove = new ArrayList<TypeDefinition<?>>();\r
-\r
- for (TypeDefinition<?> td : unionTypeBuilderToResolve.getTypes()) {\r
- if (td instanceof UnknownType) {\r
- TypeDefinition<?> resolvedType = findTargetType(\r
- (UnknownType) td, modules, builder);\r
- resolvedTypes.add(resolvedType);\r
- typesToRemove.add(td);\r
- }\r
- }\r
-\r
- List<TypeDefinition<?>> unionTypeBuilderTypes = unionTypeBuilderToResolve\r
- .getTypes();\r
- unionTypeBuilderTypes.addAll(resolvedTypes);\r
- unionTypeBuilderTypes.removeAll(typesToRemove);\r
-\r
- return unionTypeBuilderToResolve;\r
- }\r
-\r
- private TypeDefinition<?> findTargetType(UnknownType ut,\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- ModuleBuilder builder) {\r
-\r
- Map<TypeDefinitionBuilder, TypeConstraints> foundedTypeDefinitionBuilder = findTypeDefinitionBuilderWithConstraints(\r
- modules, ut, builder);\r
- TypeDefinitionBuilder targetType = foundedTypeDefinitionBuilder\r
- .entrySet().iterator().next().getKey();\r
- TypeConstraints constraints = foundedTypeDefinitionBuilder.entrySet()\r
- .iterator().next().getValue();\r
-\r
- TypeDefinition<?> targetTypeBaseType = targetType.getBaseType();\r
-\r
- // RANGE\r
- List<RangeConstraint> ranges = ut.getRangeStatements();\r
- resolveRanges(ranges, targetType, modules, builder);\r
- // LENGTH\r
- List<LengthConstraint> lengths = ut.getLengthStatements();\r
- resolveLengths(lengths, targetType, modules, builder);\r
- // PATTERN\r
- List<PatternConstraint> patterns = ut.getPatterns();\r
- // Fraction Digits\r
- Integer fractionDigits = ut.getFractionDigits();\r
-\r
- targetTypeBaseType = mergeConstraints(targetTypeBaseType, constraints, ranges, lengths,\r
- patterns, fractionDigits);\r
-\r
- return targetTypeBaseType;\r
- }\r
-\r
- /**\r
- * Merge curent constraints with founded type constraints\r
- *\r
- * @param targetTypeBaseType\r
- * @param constraints\r
- * @param ranges\r
- * @param lengths\r
- * @param patterns\r
- * @param fractionDigits\r
- */\r
- private TypeDefinition<?> mergeConstraints(TypeDefinition<?> targetTypeBaseType,\r
- TypeConstraints constraints, List<RangeConstraint> ranges,\r
- List<LengthConstraint> lengths, List<PatternConstraint> patterns,\r
- Integer fractionDigits) {\r
- String targetTypeBaseTypeName = targetTypeBaseType.getQName()\r
- .getLocalName();\r
- // enumeration, leafref and identityref omitted because they have no\r
- // restrictions\r
- if (targetTypeBaseType instanceof DecimalTypeDefinition) {\r
- List<RangeConstraint> fullRanges = new ArrayList<RangeConstraint>();\r
- fullRanges.addAll(constraints.getRanges());\r
- fullRanges.addAll(ranges);\r
- Integer fd = fractionDigits == null ? constraints\r
- .getFractionDigits() : fractionDigits;\r
- targetTypeBaseType = YangTypesConverter\r
- .javaTypeForBaseYangDecimal64Type(fullRanges, fd);\r
- } else if (targetTypeBaseType instanceof IntegerTypeDefinition) {\r
- List<RangeConstraint> fullRanges = new ArrayList<RangeConstraint>();\r
- fullRanges.addAll(constraints.getRanges());\r
- fullRanges.addAll(ranges);\r
- if (targetTypeBaseTypeName.startsWith("int")) {\r
- targetTypeBaseType = YangTypesConverter\r
- .javaTypeForBaseYangSignedIntegerType(\r
- targetTypeBaseTypeName, fullRanges);\r
- } else {\r
- targetTypeBaseType = YangTypesConverter\r
- .javaTypeForBaseYangUnsignedIntegerType(\r
- targetTypeBaseTypeName, fullRanges);\r
- }\r
- } else if (targetTypeBaseType instanceof StringTypeDefinition) {\r
- List<LengthConstraint> fullLengths = new ArrayList<LengthConstraint>();\r
- fullLengths.addAll(constraints.getLengths());\r
- fullLengths.addAll(lengths);\r
- List<PatternConstraint> fullPatterns = new ArrayList<PatternConstraint>();\r
- fullPatterns.addAll(constraints.getPatterns());\r
- fullPatterns.addAll(patterns);\r
- targetTypeBaseType = new StringType(fullLengths, fullPatterns);\r
- } else if (targetTypeBaseType instanceof BitsTypeDefinition) {\r
- BitsTypeDefinition bitsType = (BitsTypeDefinition) targetTypeBaseType;\r
- List<Bit> bits = bitsType.getBits();\r
- targetTypeBaseType = new BitsType(bits);\r
- } else if (targetTypeBaseType instanceof BinaryTypeDefinition) {\r
- targetTypeBaseType = new BinaryType(null, lengths, null);\r
- }\r
- return targetTypeBaseType;\r
- }\r
-\r
- private TypeDefinitionBuilder findTypeDefinitionBuilder(\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- UnknownType unknownType, ModuleBuilder builder) {\r
- Map<TypeDefinitionBuilder, TypeConstraints> result = findTypeDefinitionBuilderWithConstraints(\r
- modules, unknownType, builder);\r
- return result.entrySet().iterator().next().getKey();\r
- }\r
-\r
- private Map<TypeDefinitionBuilder, TypeConstraints> findTypeDefinitionBuilderWithConstraints(\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- UnknownType unknownType, ModuleBuilder builder) {\r
- return findTypeDefinitionBuilderWithConstraints(new TypeConstraints(),\r
- modules, unknownType, builder);\r
- }\r
-\r
- /**\r
- * Traverse through all referenced types chain until base YANG type is\r
- * founded.\r
- *\r
- * @param constraints\r
- * current type constraints\r
- * @param modules\r
- * all available modules\r
- * @param unknownType\r
- * unknown type\r
- * @param builder\r
- * current module\r
- * @return map, where key is type referenced and value is its constraints\r
- */\r
- private Map<TypeDefinitionBuilder, TypeConstraints> findTypeDefinitionBuilderWithConstraints(\r
- TypeConstraints constraints,\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- UnknownType unknownType, ModuleBuilder builder) {\r
- Map<TypeDefinitionBuilder, TypeConstraints> result = new HashMap<TypeDefinitionBuilder, TypeConstraints>();\r
- QName unknownTypeQName = unknownType.getQName();\r
- String unknownTypeName = unknownTypeQName.getLocalName();\r
- String unknownTypePrefix = unknownTypeQName.getPrefix();\r
-\r
- // search for module which contains referenced typedef\r
- ModuleBuilder dependentModuleBuilder;\r
- if (unknownTypePrefix.equals(builder.getPrefix())) {\r
- dependentModuleBuilder = builder;\r
- } else {\r
- dependentModuleBuilder = findDependentModule(modules, builder,\r
- unknownTypePrefix);\r
- }\r
-\r
- // pull all typedef statements from dependent module...\r
- final Set<TypeDefinitionBuilder> typedefs = dependentModuleBuilder\r
- .getModuleTypedefs();\r
- // and search for referenced typedef\r
- TypeDefinitionBuilder lookedUpBuilder = null;\r
- for (TypeDefinitionBuilder tdb : typedefs) {\r
- QName qname = tdb.getQName();\r
- if (qname.getLocalName().equals(unknownTypeName)) {\r
- lookedUpBuilder = tdb;\r
- break;\r
- }\r
- }\r
-\r
- // if referenced type is UnknownType again, search recursively with\r
- // current constraints\r
- TypeDefinition<?> referencedType = lookedUpBuilder.getBaseType();\r
- if (referencedType instanceof UnknownType) {\r
- UnknownType unknown = (UnknownType) lookedUpBuilder.getBaseType();\r
-\r
- final List<RangeConstraint> ranges = unknown.getRangeStatements();\r
- constraints.addRanges(ranges);\r
- final List<LengthConstraint> lengths = unknown\r
- .getLengthStatements();\r
- constraints.addLengths(lengths);\r
- final List<PatternConstraint> patterns = unknown.getPatterns();\r
- constraints.addPatterns(patterns);\r
- return findTypeDefinitionBuilderWithConstraints(constraints,\r
- modules, unknown, dependentModuleBuilder);\r
- } else {\r
- // pull restriction from this base type and add them to\r
- // 'constraints'\r
- if (referencedType instanceof DecimalTypeDefinition) {\r
- constraints.addRanges(((DecimalTypeDefinition) referencedType)\r
- .getRangeStatements());\r
- constraints\r
- .setFractionDigits(((DecimalTypeDefinition) referencedType)\r
- .getFractionDigits());\r
- } else if (referencedType instanceof IntegerTypeDefinition) {\r
- constraints.addRanges(((IntegerTypeDefinition) referencedType)\r
- .getRangeStatements());\r
- } else if (referencedType instanceof StringTypeDefinition) {\r
- constraints.addPatterns(((StringTypeDefinition) referencedType)\r
- .getPatterns());\r
- } else if (referencedType instanceof BinaryTypeDefinition) {\r
- constraints.addLengths(((BinaryTypeDefinition) referencedType)\r
- .getLengthConstraints());\r
- }\r
- result.put(lookedUpBuilder, constraints);\r
- return result;\r
- }\r
- }\r
-\r
- /**\r
- * Go through all augmentation definitions and resolve them. This means find\r
- * referenced node and add child nodes to it.\r
- *\r
- * @param modules\r
- * all available modules\r
- * @param module\r
- * current module\r
- */\r
- private void resolveAugments(\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- ModuleBuilder module) {\r
- Set<AugmentationSchemaBuilder> augmentBuilders = module\r
- .getAddedAugments();\r
-\r
- Set<AugmentationSchema> augments = new HashSet<AugmentationSchema>();\r
- for (AugmentationSchemaBuilder augmentBuilder : augmentBuilders) {\r
- SchemaPath augmentTargetSchemaPath = augmentBuilder.getTargetPath();\r
- String prefix = null;\r
- List<String> augmentTargetPath = new ArrayList<String>();\r
-\r
- for (QName pathPart : augmentTargetSchemaPath.getPath()) {\r
- prefix = pathPart.getPrefix();\r
- augmentTargetPath.add(pathPart.getLocalName());\r
- }\r
- ModuleBuilder dependentModule = findDependentModule(modules,\r
- module, prefix);\r
- //\r
- augmentTargetPath.add(0, dependentModule.getName());\r
- //\r
-\r
-\r
- AugmentationTargetBuilder augmentTarget = (AugmentationTargetBuilder) dependentModule\r
- .getNode(augmentTargetPath);\r
- AugmentationSchema result = augmentBuilder.build();\r
- augmentTarget.addAugmentation(result);\r
- fillAugmentTarget(augmentBuilder, (ChildNodeBuilder) augmentTarget);\r
- augments.add(result);\r
- }\r
- module.setAugmentations(augments);\r
- }\r
-\r
- /**\r
- * Add all augment's child nodes to given target.\r
- *\r
- * @param augment\r
- * @param target\r
- */\r
- private void fillAugmentTarget(AugmentationSchemaBuilder augment,\r
- ChildNodeBuilder target) {\r
- for (DataSchemaNodeBuilder builder : augment.getChildNodes()) {\r
- builder.setAugmenting(true);\r
- target.addChildNode(builder);\r
- }\r
- }\r
-\r
- /**\r
- * Go through identity statements defined in current module and resolve\r
- * their 'base' statement if present.\r
- *\r
- * @param modules\r
- * all modules\r
- * @param module\r
- * module being resolved\r
- */\r
- private void resolveIdentities(\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- ModuleBuilder module) {\r
- Set<IdentitySchemaNodeBuilder> identities = module.getAddedIdentities();\r
- for (IdentitySchemaNodeBuilder identity : identities) {\r
- String baseIdentityName = identity.getBaseIdentityName();\r
- if (baseIdentityName != null) {\r
- String baseIdentityPrefix = null;\r
- String baseIdentityLocalName = null;\r
- if (baseIdentityName.contains(":")) {\r
- String[] splitted = baseIdentityName.split(":");\r
- baseIdentityPrefix = splitted[0];\r
- baseIdentityLocalName = splitted[1];\r
- } else {\r
- baseIdentityPrefix = module.getPrefix();\r
- baseIdentityLocalName = baseIdentityName;\r
- }\r
- ModuleBuilder dependentModule;\r
- if (baseIdentityPrefix.equals(module.getPrefix())) {\r
- dependentModule = module;\r
- } else {\r
- dependentModule = findDependentModule(modules, module,\r
- baseIdentityPrefix);\r
- }\r
-\r
- Set<IdentitySchemaNodeBuilder> dependentModuleIdentities = dependentModule\r
- .getAddedIdentities();\r
- for (IdentitySchemaNodeBuilder idBuilder : dependentModuleIdentities) {\r
- if (idBuilder.getQName().getLocalName()\r
- .equals(baseIdentityLocalName)) {\r
- identity.setBaseIdentity(idBuilder);\r
- }\r
- }\r
- }\r
- }\r
- }\r
-\r
- /**\r
- * Find dependent module based on given prefix\r
- *\r
- * @param modules\r
- * all available modules\r
- * @param module\r
- * current module\r
- * @param prefix\r
- * target module prefix\r
- * @return dependent module builder\r
- */\r
- private ModuleBuilder findDependentModule(\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- ModuleBuilder module, String prefix) {\r
- ModuleImport dependentModuleImport = getModuleImport(module, prefix);\r
- String dependentModuleName = dependentModuleImport.getModuleName();\r
- Date dependentModuleRevision = dependentModuleImport.getRevision();\r
-\r
- TreeMap<Date, ModuleBuilder> moduleBuildersByRevision = modules\r
- .get(dependentModuleName);\r
- ModuleBuilder dependentModule;\r
- if (dependentModuleRevision == null) {\r
- dependentModule = moduleBuildersByRevision.lastEntry().getValue();\r
- } else {\r
- dependentModule = moduleBuildersByRevision\r
- .get(dependentModuleRevision);\r
- }\r
- return dependentModule;\r
- }\r
-\r
- /**\r
- * Get module import referenced by given prefix.\r
- *\r
- * @param builder\r
- * module to search\r
- * @param prefix\r
- * prefix associated with import\r
- * @return ModuleImport based on given prefix\r
- */\r
- private ModuleImport getModuleImport(ModuleBuilder builder, String prefix) {\r
- ModuleImport moduleImport = null;\r
- for (ModuleImport mi : builder.getModuleImports()) {\r
- if (mi.getPrefix().equals(prefix)) {\r
- moduleImport = mi;\r
- break;\r
- }\r
- }\r
- return moduleImport;\r
- }\r
-\r
- /**\r
- * Helper method for resolving special 'min' or 'max' values in range\r
- * constraint\r
- *\r
- * @param ranges\r
- * ranges to resolve\r
- * @param targetType\r
- * target type\r
- * @param modules\r
- * all available modules\r
- * @param builder\r
- * current module\r
- */\r
- private void resolveRanges(List<RangeConstraint> ranges,\r
- TypeDefinitionBuilder targetType,\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- ModuleBuilder builder) {\r
- if (ranges != null && ranges.size() > 0) {\r
- Long min = (Long) ranges.get(0).getMin();\r
- Long max = (Long) ranges.get(ranges.size() - 1).getMax();\r
- // if range contains one of the special values 'min' or 'max'\r
- if (min.equals(Long.MIN_VALUE) || max.equals(Long.MAX_VALUE)) {\r
- Long[] values = parseRangeConstraint(targetType, modules,\r
- builder);\r
- if (min.equals(Long.MIN_VALUE)) {\r
- min = values[0];\r
- RangeConstraint oldFirst = ranges.get(0);\r
- RangeConstraint newFirst = BaseConstraints.rangeConstraint(\r
- min, oldFirst.getMax(), oldFirst.getDescription(),\r
- oldFirst.getReference());\r
- ranges.set(0, newFirst);\r
- }\r
- if (max.equals(Long.MAX_VALUE)) {\r
- max = values[1];\r
- RangeConstraint oldLast = ranges.get(ranges.size() - 1);\r
- RangeConstraint newLast = BaseConstraints.rangeConstraint(\r
- oldLast.getMin(), max, oldLast.getDescription(),\r
- oldLast.getReference());\r
- ranges.set(ranges.size() - 1, newLast);\r
- }\r
- }\r
- }\r
- }\r
-\r
- /**\r
- * Helper method for resolving special 'min' or 'max' values in length\r
- * constraint\r
- *\r
- * @param lengths\r
- * lengths to resolve\r
- * @param targetType\r
- * target type\r
- * @param modules\r
- * all available modules\r
- * @param builder\r
- * current module\r
- */\r
- private void resolveLengths(List<LengthConstraint> lengths,\r
- TypeDefinitionBuilder targetType,\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- ModuleBuilder builder) {\r
- if (lengths != null && lengths.size() > 0) {\r
- Long min = lengths.get(0).getMin().longValue();\r
- Long max = lengths.get(lengths.size() - 1).getMax().longValue();\r
- // if length contains one of the special values 'min' or 'max'\r
- if (min.equals(Long.MIN_VALUE) || max.equals(Long.MAX_VALUE)) {\r
- Long[] values = parseRangeConstraint(targetType, modules,\r
- builder);\r
- if (min.equals(Long.MIN_VALUE)) {\r
- min = values[0];\r
- LengthConstraint oldFirst = lengths.get(0);\r
- LengthConstraint newFirst = BaseConstraints\r
- .lengthConstraint(min, oldFirst.getMax(),\r
- oldFirst.getDescription(),\r
- oldFirst.getReference());\r
- lengths.set(0, newFirst);\r
- }\r
- if (max.equals(Long.MAX_VALUE)) {\r
- max = values[1];\r
- LengthConstraint oldLast = lengths.get(lengths.size() - 1);\r
- LengthConstraint newLast = BaseConstraints\r
- .lengthConstraint(oldLast.getMin(), max,\r
- oldLast.getDescription(),\r
- oldLast.getReference());\r
- lengths.set(lengths.size() - 1, newLast);\r
- }\r
- }\r
- }\r
- }\r
-\r
- private Long[] parseRangeConstraint(TypeDefinitionBuilder targetType,\r
- Map<String, TreeMap<Date, ModuleBuilder>> modules,\r
- ModuleBuilder builder) {\r
- TypeDefinition<?> targetBaseType = targetType.getBaseType();\r
-\r
- if (targetBaseType instanceof IntegerTypeDefinition) {\r
- IntegerTypeDefinition itd = (IntegerTypeDefinition) targetBaseType;\r
- List<RangeConstraint> ranges = itd.getRangeStatements();\r
- Long min = (Long) ranges.get(0).getMin();\r
- Long max = (Long) ranges.get(ranges.size() - 1).getMax();\r
- return new Long[] { min, max };\r
- } else if (targetBaseType instanceof DecimalTypeDefinition) {\r
- DecimalTypeDefinition dtd = (DecimalTypeDefinition) targetBaseType;\r
- List<RangeConstraint> ranges = dtd.getRangeStatements();\r
- Long min = (Long) ranges.get(0).getMin();\r
- Long max = (Long) ranges.get(ranges.size() - 1).getMax();\r
- return new Long[] { min, max };\r
- } else {\r
- return parseRangeConstraint(\r
- findTypeDefinitionBuilder(modules,\r
- (UnknownType) targetBaseType, builder), modules,\r
- builder);\r
- }\r
- }\r
-\r
- private Date createEpochTime() {\r
- Calendar c = Calendar.getInstance();\r
- c.setTimeInMillis(0);\r
- return c.getTime();\r
- }\r
-\r
- private static class SchemaContextImpl implements SchemaContext {\r
- private final Set<Module> modules;\r
-\r
- private SchemaContextImpl(Set<Module> modules) {\r
- this.modules = modules;\r
- }\r
-\r
- @Override\r
- public Set<DataSchemaNode> getDataDefinitions() {\r
- final Set<DataSchemaNode> dataDefs = new HashSet<DataSchemaNode>();\r
- for (Module m : modules) {\r
- dataDefs.addAll(m.getChildNodes());\r
- }\r
- return dataDefs;\r
- }\r
-\r
- @Override\r
- public Set<Module> getModules() {\r
- return modules;\r
- }\r
-\r
- @Override\r
- public Set<NotificationDefinition> getNotifications() {\r
- final Set<NotificationDefinition> notifications = new HashSet<NotificationDefinition>();\r
- for (Module m : modules) {\r
- notifications.addAll(m.getNotifications());\r
- }\r
- return notifications;\r
- }\r
-\r
- @Override\r
- public Set<RpcDefinition> getOperations() {\r
- final Set<RpcDefinition> rpcs = new HashSet<RpcDefinition>();\r
- for (Module m : modules) {\r
- rpcs.addAll(m.getRpcs());\r
- }\r
- return rpcs;\r
- }\r
-\r
- @Override\r
- public Set<ExtensionDefinition> getExtensions() {\r
- final Set<ExtensionDefinition> extensions = new HashSet<ExtensionDefinition>();\r
- for (Module m : modules) {\r
- extensions.addAll(m.getExtensionSchemaNodes());\r
- }\r
- return extensions;\r
- }\r
- }\r
-\r
- private static class TypeConstraints {\r
- private final List<RangeConstraint> ranges = new ArrayList<RangeConstraint>();\r
- private final List<LengthConstraint> lengths = new ArrayList<LengthConstraint>();\r
- private final List<PatternConstraint> patterns = new ArrayList<PatternConstraint>();\r
- private Integer fractionDigits;\r
-\r
- public List<RangeConstraint> getRanges() {\r
- return ranges;\r
- }\r
-\r
- public void addRanges(List<RangeConstraint> ranges) {\r
- this.ranges.addAll(0, ranges);\r
- }\r
-\r
- public List<LengthConstraint> getLengths() {\r
- return lengths;\r
- }\r
-\r
- public void addLengths(List<LengthConstraint> lengths) {\r
- this.lengths.addAll(0, lengths);\r
- }\r
-\r
- public List<PatternConstraint> getPatterns() {\r
- return patterns;\r
- }\r
-\r
- public void addPatterns(List<PatternConstraint> patterns) {\r
- this.patterns.addAll(0, patterns);\r
- }\r
-\r
- public Integer getFractionDigits() {\r
- return fractionDigits;\r
- }\r
-\r
- public void setFractionDigits(Integer fractionDigits) {\r
- if (fractionDigits != null) {\r
- this.fractionDigits = fractionDigits;\r
- }\r
- }\r
- }\r
-\r
-}\r
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.yang.model.parser.impl;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+
+import org.antlr.v4.runtime.ANTLRInputStream;
+import org.antlr.v4.runtime.CommonTokenStream;
+import org.antlr.v4.runtime.tree.ParseTree;
+import org.antlr.v4.runtime.tree.ParseTreeWalker;
+import org.opendaylight.controller.antlrv4.code.gen.YangLexer;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser;
+import org.opendaylight.controller.yang.common.QName;
+import org.opendaylight.controller.yang.model.api.AugmentationSchema;
+import org.opendaylight.controller.yang.model.api.DataSchemaNode;
+import org.opendaylight.controller.yang.model.api.ExtensionDefinition;
+import org.opendaylight.controller.yang.model.api.Module;
+import org.opendaylight.controller.yang.model.api.ModuleImport;
+import org.opendaylight.controller.yang.model.api.NotificationDefinition;
+import org.opendaylight.controller.yang.model.api.RpcDefinition;
+import org.opendaylight.controller.yang.model.api.SchemaContext;
+import org.opendaylight.controller.yang.model.api.SchemaPath;
+import org.opendaylight.controller.yang.model.api.TypeDefinition;
+import org.opendaylight.controller.yang.model.api.type.BinaryTypeDefinition;
+import org.opendaylight.controller.yang.model.api.type.DecimalTypeDefinition;
+import org.opendaylight.controller.yang.model.api.type.IntegerTypeDefinition;
+import org.opendaylight.controller.yang.model.api.type.LengthConstraint;
+import org.opendaylight.controller.yang.model.api.type.PatternConstraint;
+import org.opendaylight.controller.yang.model.api.type.RangeConstraint;
+import org.opendaylight.controller.yang.model.api.type.StringTypeDefinition;
+import org.opendaylight.controller.yang.model.parser.api.YangModelParser;
+import org.opendaylight.controller.yang.model.parser.builder.api.AugmentationSchemaBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.AugmentationTargetBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.ChildNodeBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.DataSchemaNodeBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.TypeAwareBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.api.TypeDefinitionBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.impl.IdentitySchemaNodeBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.impl.ModuleBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.impl.TypedefBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.impl.UnionTypeBuilder;
+import org.opendaylight.controller.yang.model.parser.util.TypeConstraints;
+import org.opendaylight.controller.yang.model.parser.util.YangParseException;
+import org.opendaylight.controller.yang.model.util.ExtendedType;
+import org.opendaylight.controller.yang.model.util.UnknownType;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class YangModelParserImpl implements YangModelParser {
+
+ private static final Logger logger = LoggerFactory
+ .getLogger(YangModelParserImpl.class);
+
+ @Override
+ public Module parseYangModel(final String yangFile) {
+ final Map<String, TreeMap<Date, ModuleBuilder>> modules = resolveModuleBuildersFromStreams(yangFile);
+ final Set<Module> result = build(modules);
+ return result.iterator().next();
+ }
+
+ @Override
+ public Set<Module> parseYangModels(final String... yangFiles) {
+ final Map<String, TreeMap<Date, ModuleBuilder>> modules = resolveModuleBuildersFromStreams(yangFiles);
+ return build(modules);
+ }
+
+ @Override
+ public Set<Module> parseYangModelsFromStreams(
+ final InputStream... yangModelStreams) {
+ final Map<String, TreeMap<Date, ModuleBuilder>> modules = resolveModuleBuildersFromStreams(yangModelStreams);
+ return build(modules);
+ }
+
+ @Override
+ public SchemaContext resolveSchemaContext(final Set<Module> modules) {
+ return new SchemaContextImpl(modules);
+ }
+
+ private Map<String, TreeMap<Date, ModuleBuilder>> resolveModuleBuildersFromStreams(
+ String... yangFiles) {
+ InputStream[] streams = new InputStream[yangFiles.length];
+ FileInputStream inStream = null;
+ for (int i = 0; i < yangFiles.length; i++) {
+ final String yangFileName = yangFiles[i];
+ final File yangFile = new File(yangFileName);
+ try {
+ inStream = new FileInputStream(yangFile);
+ } catch (FileNotFoundException e) {
+ logger.warn("Exception while reading yang stream: " + inStream,
+ e);
+ }
+ streams[i] = inStream;
+ }
+ return resolveModuleBuildersFromStreams(streams);
+ }
+
+ private Map<String, TreeMap<Date, ModuleBuilder>> resolveModuleBuildersFromStreams(
+ InputStream... yangFiles) {
+ final Map<String, TreeMap<Date, ModuleBuilder>> modules = new HashMap<String, TreeMap<Date, ModuleBuilder>>();
+ final ParseTreeWalker walker = new ParseTreeWalker();
+ final List<ParseTree> trees = parseStreams(yangFiles);
+ final ModuleBuilder[] builders = new ModuleBuilder[trees.size()];
+
+ YangModelParserListenerImpl yangModelParser = null;
+ for (int i = 0; i < trees.size(); i++) {
+ yangModelParser = new YangModelParserListenerImpl();
+ walker.walk(yangModelParser, trees.get(i));
+ builders[i] = yangModelParser.getModuleBuilder();
+ }
+
+ for (ModuleBuilder builder : builders) {
+ final String builderName = builder.getName();
+ Date builderRevision = builder.getRevision();
+ if (builderRevision == null) {
+ builderRevision = createEpochTime();
+ }
+ TreeMap<Date, ModuleBuilder> builderByRevision = modules
+ .get(builderName);
+ if (builderByRevision == null) {
+ builderByRevision = new TreeMap<Date, ModuleBuilder>();
+ }
+ builderByRevision.put(builderRevision, builder);
+ modules.put(builderName, builderByRevision);
+ }
+ return modules;
+ }
+
+ private List<ParseTree> parseStreams(InputStream... yangStreams) {
+ final List<ParseTree> trees = new ArrayList<ParseTree>();
+ for (InputStream yangStream : yangStreams) {
+ trees.add(parseStream(yangStream));
+ }
+ return trees;
+ }
+
+ private ParseTree parseStream(InputStream yangStream) {
+ ParseTree result = null;
+ try {
+ final ANTLRInputStream input = new ANTLRInputStream(yangStream);
+ final YangLexer lexer = new YangLexer(input);
+ final CommonTokenStream tokens = new CommonTokenStream(lexer);
+ final YangParser parser = new YangParser(tokens);
+ result = parser.yang();
+ } catch (IOException e) {
+ logger.warn("Exception while reading yang file: " + yangStream, e);
+ }
+ return result;
+ }
+
+ private Set<Module> build(Map<String, TreeMap<Date, ModuleBuilder>> modules) {
+ // validate
+ for (Map.Entry<String, TreeMap<Date, ModuleBuilder>> entry : modules
+ .entrySet()) {
+ for (Map.Entry<Date, ModuleBuilder> childEntry : entry.getValue()
+ .entrySet()) {
+ ModuleBuilder moduleBuilder = childEntry.getValue();
+ validateModule(modules, moduleBuilder);
+ }
+ }
+
+ // build
+ final Set<Module> result = new HashSet<Module>();
+ for (Map.Entry<String, TreeMap<Date, ModuleBuilder>> entry : modules
+ .entrySet()) {
+ final Map<Date, Module> modulesByRevision = new HashMap<Date, Module>();
+ for (Map.Entry<Date, ModuleBuilder> childEntry : entry.getValue()
+ .entrySet()) {
+ ModuleBuilder moduleBuilder = childEntry.getValue();
+ Module module = moduleBuilder.build();
+ modulesByRevision.put(childEntry.getKey(), module);
+ result.add(module);
+ }
+ }
+ return result;
+ }
+
+ private void validateModule(
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder builder) {
+ resolveDirtyNodes(modules, builder);
+ resolveAugments(modules, builder);
+ resolveIdentities(modules, builder);
+ }
+
+ /**
+ * Search for dirty nodes (node which contains UnknownType) and resolve
+ * unknown types.
+ *
+ * @param modules
+ * all available modules
+ * @param module
+ * current module
+ */
+ private void resolveDirtyNodes(
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder module) {
+ final Map<List<String>, TypeAwareBuilder> dirtyNodes = module
+ .getDirtyNodes();
+ if (!dirtyNodes.isEmpty()) {
+ for (Map.Entry<List<String>, TypeAwareBuilder> entry : dirtyNodes
+ .entrySet()) {
+
+ TypeAwareBuilder typeToResolve = entry.getValue();
+ if (typeToResolve instanceof UnionTypeBuilder) {
+ UnionTypeBuilder union = (UnionTypeBuilder) typeToResolve;
+ List<TypeDefinition<?>> unionTypes = union.getTypes();
+ List<UnknownType> toRemove = new ArrayList<UnknownType>();
+ for (TypeDefinition<?> td : unionTypes) {
+ if (td instanceof UnknownType) {
+ UnknownType unknownType = (UnknownType) td;
+ TypeDefinitionBuilder resolvedType = findTargetTypeUnion(
+ typeToResolve, unknownType, modules, module);
+ union.setType(resolvedType);
+ toRemove.add(unknownType);
+ }
+ }
+ unionTypes.removeAll(toRemove);
+ } else {
+ TypeDefinitionBuilder resolvedType = findTargetType(
+ typeToResolve, modules, module);
+ typeToResolve.setType(resolvedType);
+ }
+ }
+ }
+ }
+
+ private TypeDefinitionBuilder findTargetType(
+ TypeAwareBuilder typeToResolve,
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder builder) {
+ TypeConstraints constraints = new TypeConstraints();
+
+ TypeDefinitionBuilder targetType = findTypedef(typeToResolve, modules,
+ builder);
+ TypeConstraints tConstraints = findConstraints(typeToResolve,
+ constraints, modules, builder);
+ targetType.setRanges(tConstraints.getRange());
+ targetType.setLengths(tConstraints.getLength());
+ targetType.setPatterns(tConstraints.getPatterns());
+ targetType.setFractionDigits(tConstraints.getFractionDigits());
+
+ return targetType;
+ }
+
+ private TypeDefinitionBuilder findTargetTypeUnion(
+ TypeAwareBuilder typeToResolve, UnknownType unknownType,
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder builder) {
+ TypeConstraints constraints = new TypeConstraints();
+
+ TypeDefinitionBuilder targetType = findTypedefUnion(typeToResolve,
+ unknownType, modules, builder);
+ TypeConstraints tConstraints = findConstraints(typeToResolve,
+ constraints, modules, builder);
+ targetType.setRanges(tConstraints.getRange());
+ targetType.setLengths(tConstraints.getLength());
+ targetType.setPatterns(tConstraints.getPatterns());
+ targetType.setFractionDigits(tConstraints.getFractionDigits());
+
+ return targetType;
+ }
+
+ private TypeDefinitionBuilder findTypedef(TypeAwareBuilder typeToResolve,
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder builder) {
+
+ TypeDefinition<?> baseTypeToResolve = typeToResolve.getType();
+ if (baseTypeToResolve != null
+ && !(baseTypeToResolve instanceof UnknownType)) {
+ return (TypeDefinitionBuilder) typeToResolve;
+ }
+
+ UnknownType unknownType = (UnknownType) typeToResolve.getType();
+
+ QName unknownTypeQName = unknownType.getQName();
+ String unknownTypeName = unknownTypeQName.getLocalName();
+ String unknownTypePrefix = unknownTypeQName.getPrefix();
+
+ // search for module which contains referenced typedef
+ ModuleBuilder dependentModule = findDependentModule(modules, builder,
+ unknownTypePrefix);
+ TypeDefinitionBuilder lookedUpBuilder = findTypedefBuilder(
+ dependentModule.getModuleTypedefs(), unknownTypeName);
+
+ TypeDefinitionBuilder lookedUpBuilderCopy = copyTypedefBuilder(
+ lookedUpBuilder, typeToResolve instanceof TypeDefinitionBuilder);
+ TypeDefinitionBuilder resolvedCopy = resolveCopiedBuilder(
+ lookedUpBuilderCopy, modules, dependentModule);
+ return resolvedCopy;
+ }
+
+ private TypeDefinitionBuilder findTypedefUnion(
+ TypeAwareBuilder typeToResolve, UnknownType unknownType,
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder builder) {
+
+ TypeDefinition<?> baseTypeToResolve = typeToResolve.getType();
+ if (baseTypeToResolve != null
+ && !(baseTypeToResolve instanceof UnknownType)) {
+ return (TypeDefinitionBuilder) typeToResolve;
+ }
+
+ QName unknownTypeQName = unknownType.getQName();
+ String unknownTypeName = unknownTypeQName.getLocalName();
+ String unknownTypePrefix = unknownTypeQName.getPrefix();
+
+ // search for module which contains referenced typedef
+ ModuleBuilder dependentModule = findDependentModule(modules, builder,
+ unknownTypePrefix);
+ TypeDefinitionBuilder lookedUpBuilder = findTypedefBuilder(
+ dependentModule.getModuleTypedefs(), unknownTypeName);
+
+ TypeDefinitionBuilder lookedUpBuilderCopy = copyTypedefBuilder(
+ lookedUpBuilder, typeToResolve instanceof TypeDefinitionBuilder);
+ TypeDefinitionBuilder resolvedCopy = resolveCopiedBuilder(
+ lookedUpBuilderCopy, modules, dependentModule);
+ return resolvedCopy;
+ }
+
+ private TypeDefinitionBuilder copyTypedefBuilder(TypeDefinitionBuilder old,
+ boolean seekByTypedefBuilder) {
+ if (old instanceof UnionTypeBuilder) {
+ UnionTypeBuilder oldUnion = (UnionTypeBuilder) old;
+ UnionTypeBuilder newUnion = new UnionTypeBuilder();
+ for (TypeDefinition<?> td : oldUnion.getTypes()) {
+ newUnion.setType(td);
+ }
+ for (TypeDefinitionBuilder tdb : oldUnion.getTypedefs()) {
+ newUnion.setType(copyTypedefBuilder(tdb, true));
+ }
+ return newUnion;
+ }
+
+ QName oldQName = old.getQName();
+ QName newQName = new QName(oldQName.getNamespace(),
+ oldQName.getRevision(), oldQName.getPrefix(),
+ oldQName.getLocalName());
+ TypeDefinitionBuilder tdb = new TypedefBuilder(newQName);
+
+ tdb.setRanges(old.getRanges());
+ tdb.setLengths(old.getLengths());
+ tdb.setPatterns(old.getPatterns());
+
+ TypeDefinition<?> oldType = old.getType();
+ if (oldType == null) {
+ tdb.setType(old.getTypedef());
+ } else {
+ tdb.setType(oldType);
+ }
+
+ if (!seekByTypedefBuilder) {
+ tdb.setDescription(old.getDescription());
+ tdb.setReference(old.getReference());
+ tdb.setStatus(old.getStatus());
+ tdb.setDefaultValue(old.getDefaultValue());
+ tdb.setUnits(old.getUnits());
+ }
+ return tdb;
+ }
+
+ private TypeDefinitionBuilder resolveCopiedBuilder(
+ TypeDefinitionBuilder copied,
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder builder) {
+
+ if (copied instanceof UnionTypeBuilder) {
+ UnionTypeBuilder union = (UnionTypeBuilder) copied;
+ List<TypeDefinition<?>> unionTypes = union.getTypes();
+ List<UnknownType> toRemove = new ArrayList<UnknownType>();
+ for (TypeDefinition<?> td : unionTypes) {
+ if (td instanceof UnknownType) {
+ UnknownType unknownType = (UnknownType) td;
+ TypeDefinitionBuilder resolvedType = findTargetTypeUnion(
+ union, unknownType, modules, builder);
+ union.setType(resolvedType);
+ toRemove.add(unknownType);
+ }
+ }
+ unionTypes.removeAll(toRemove);
+
+ return union;
+ }
+
+ TypeDefinition<?> base = copied.getType();
+ TypeDefinitionBuilder baseTdb = copied.getTypedef();
+ if (base != null && !(base instanceof UnknownType)) {
+ return copied;
+ } else if (base instanceof UnknownType) {
+ UnknownType unknownType = (UnknownType) base;
+ QName unknownTypeQName = unknownType.getQName();
+ String unknownTypePrefix = unknownTypeQName.getPrefix();
+ ModuleBuilder dependentModule = findDependentModule(modules,
+ builder, unknownTypePrefix);
+ TypeDefinitionBuilder unknownTypeBuilder = findTypedef(copied,
+ modules, dependentModule);
+ copied.setType(unknownTypeBuilder);
+ return copied;
+ } else if (base == null && baseTdb != null) {
+ // make a copy of baseTypeDef and call again
+ TypeDefinitionBuilder baseTdbCopy = copyTypedefBuilder(baseTdb,
+ true);
+ TypeDefinitionBuilder baseTdbCopyResolved = resolveCopiedBuilder(
+ baseTdbCopy, modules, builder);
+ copied.setType(baseTdbCopyResolved);
+ return copied;
+ } else {
+ throw new IllegalStateException(
+ "TypeDefinitionBuilder in unexpected state");
+ }
+ }
+
+ private TypeDefinitionBuilder findTypedef(QName unknownTypeQName,
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder builder) {
+
+ String unknownTypeName = unknownTypeQName.getLocalName();
+ String unknownTypePrefix = unknownTypeQName.getPrefix();
+
+ // search for module which contains referenced typedef
+ ModuleBuilder dependentModule = findDependentModule(modules, builder,
+ unknownTypePrefix);
+
+ TypeDefinitionBuilder lookedUpBuilder = findTypedefBuilder(
+ dependentModule.getModuleTypedefs(), unknownTypeName);
+
+ TypeDefinitionBuilder copied = copyTypedefBuilder(lookedUpBuilder, true);
+ return copied;
+ }
+
+ private TypeConstraints findConstraints(TypeAwareBuilder typeToResolve,
+ TypeConstraints constraints,
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder builder) {
+
+ // union type cannot be restricted
+ if (typeToResolve instanceof UnionTypeBuilder) {
+ return constraints;
+ }
+
+ // if referenced type is UnknownType again, search recursively with
+ // current constraints
+ TypeDefinition<?> referencedType = typeToResolve.getType();
+ if (referencedType == null) {
+ TypeDefinitionBuilder tdb = (TypeDefinitionBuilder) typeToResolve;
+ final List<RangeConstraint> ranges = tdb.getRanges();
+ constraints.addRanges(ranges);
+ final List<LengthConstraint> lengths = tdb.getLengths();
+ constraints.addLengths(lengths);
+ final List<PatternConstraint> patterns = tdb.getPatterns();
+ constraints.addPatterns(patterns);
+ final Integer fractionDigits = tdb.getFractionDigits();
+ constraints.setFractionDigits(fractionDigits);
+ return constraints;
+ } else if (referencedType instanceof ExtendedType) {
+ ExtendedType ext = (ExtendedType) referencedType;
+ final List<RangeConstraint> ranges = ext.getRanges();
+ constraints.addRanges(ranges);
+ final List<LengthConstraint> lengths = ext.getLengths();
+ constraints.addLengths(lengths);
+ final List<PatternConstraint> patterns = ext.getPatterns();
+ constraints.addPatterns(patterns);
+ final Integer fractionDigits = ext.getFractionDigits();
+ constraints.setFractionDigits(fractionDigits);
+ return findConstraints(
+ findTypedef(ext.getQName(), modules, builder), constraints,
+ modules, builder);
+ } else if (referencedType instanceof UnknownType) {
+ UnknownType unknown = (UnknownType) referencedType;
+
+ final List<RangeConstraint> ranges = unknown.getRangeStatements();
+ constraints.addRanges(ranges);
+ final List<LengthConstraint> lengths = unknown
+ .getLengthStatements();
+ constraints.addLengths(lengths);
+ final List<PatternConstraint> patterns = unknown.getPatterns();
+ constraints.addPatterns(patterns);
+ final Integer fractionDigits = unknown.getFractionDigits();
+ constraints.setFractionDigits(fractionDigits);
+
+ String unknownTypePrefix = unknown.getQName().getPrefix();
+ if (unknownTypePrefix == null || "".equals(unknownTypePrefix)) {
+ unknownTypePrefix = builder.getPrefix();
+ }
+ ModuleBuilder dependentModule = findDependentModule(modules,
+ builder, unknown.getQName().getPrefix());
+ TypeDefinitionBuilder unknownTypeBuilder = findTypedef(
+ unknown.getQName(), modules, builder);
+ return findConstraints(unknownTypeBuilder, constraints, modules,
+ dependentModule);
+ } else {
+ // HANDLE BASE YANG TYPE
+ mergeConstraints(referencedType, constraints);
+ return constraints;
+ }
+
+ }
+
+ /**
+ * Go through all typedef statements from given module and search for one
+ * with given name
+ *
+ * @param typedefs
+ * typedef statements to search
+ * @param name
+ * name of searched typedef
+ * @return typedef with name equals to given name
+ */
+ private TypeDefinitionBuilder findTypedefBuilder(
+ Set<TypeDefinitionBuilder> typedefs, String name) {
+ TypeDefinitionBuilder result = null;
+ for (TypeDefinitionBuilder td : typedefs) {
+ if (td.getQName().getLocalName().equals(name)) {
+ result = td;
+ break;
+ }
+ }
+ if (result == null) {
+ throw new YangParseException(
+ "Target module does not contain typedef '" + name + "'.");
+ }
+ return result;
+ }
+
+ /**
+ * Pull restriction from referenced type and add them to given constraints
+ *
+ * @param referencedType
+ * @param constraints
+ */
+ private void mergeConstraints(TypeDefinition<?> referencedType,
+ TypeConstraints constraints) {
+
+ if (referencedType instanceof DecimalTypeDefinition) {
+ constraints.addRanges(((DecimalTypeDefinition) referencedType)
+ .getRangeStatements());
+ constraints
+ .setFractionDigits(((DecimalTypeDefinition) referencedType)
+ .getFractionDigits());
+ } else if (referencedType instanceof IntegerTypeDefinition) {
+ constraints.addRanges(((IntegerTypeDefinition) referencedType)
+ .getRangeStatements());
+ } else if (referencedType instanceof StringTypeDefinition) {
+ constraints.addPatterns(((StringTypeDefinition) referencedType)
+ .getPatterns());
+ constraints.addLengths(((StringTypeDefinition) referencedType)
+ .getLengthStatements());
+ } else if (referencedType instanceof BinaryTypeDefinition) {
+ constraints.addLengths(((BinaryTypeDefinition) referencedType)
+ .getLengthConstraints());
+ }
+ }
+
+ /**
+ * Go through all augmentation definitions and resolve them. This means find
+ * referenced node and add child nodes to it.
+ *
+ * @param modules
+ * all available modules
+ * @param module
+ * current module
+ */
+ private void resolveAugments(
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder module) {
+ Set<AugmentationSchemaBuilder> augmentBuilders = module
+ .getAddedAugments();
+
+ Set<AugmentationSchema> augments = new HashSet<AugmentationSchema>();
+ for (AugmentationSchemaBuilder augmentBuilder : augmentBuilders) {
+ SchemaPath augmentTargetSchemaPath = augmentBuilder.getTargetPath();
+ String prefix = null;
+ List<String> augmentTargetPath = new ArrayList<String>();
+
+ for (QName pathPart : augmentTargetSchemaPath.getPath()) {
+ prefix = pathPart.getPrefix();
+ augmentTargetPath.add(pathPart.getLocalName());
+ }
+ ModuleBuilder dependentModule = findDependentModule(modules,
+ module, prefix);
+ augmentTargetPath.add(0, dependentModule.getName());
+
+ AugmentationTargetBuilder augmentTarget = (AugmentationTargetBuilder) dependentModule
+ .getNode(augmentTargetPath);
+ AugmentationSchema result = augmentBuilder.build();
+ augmentTarget.addAugmentation(result);
+ fillAugmentTarget(augmentBuilder, (ChildNodeBuilder) augmentTarget);
+ augments.add(result);
+ }
+ module.setAugmentations(augments);
+ }
+
+ /**
+ * Add all augment's child nodes to given target.
+ *
+ * @param augment
+ * @param target
+ */
+ private void fillAugmentTarget(AugmentationSchemaBuilder augment,
+ ChildNodeBuilder target) {
+ for (DataSchemaNodeBuilder builder : augment.getChildNodes()) {
+ builder.setAugmenting(true);
+ target.addChildNode(builder);
+ }
+ }
+
+ /**
+ * Go through identity statements defined in current module and resolve
+ * their 'base' statement if present.
+ *
+ * @param modules
+ * all modules
+ * @param module
+ * module being resolved
+ */
+ private void resolveIdentities(
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder module) {
+ Set<IdentitySchemaNodeBuilder> identities = module.getAddedIdentities();
+ for (IdentitySchemaNodeBuilder identity : identities) {
+ String baseIdentityName = identity.getBaseIdentityName();
+ if (baseIdentityName != null) {
+ String baseIdentityPrefix = null;
+ String baseIdentityLocalName = null;
+ if (baseIdentityName.contains(":")) {
+ String[] splitted = baseIdentityName.split(":");
+ baseIdentityPrefix = splitted[0];
+ baseIdentityLocalName = splitted[1];
+ } else {
+ baseIdentityPrefix = module.getPrefix();
+ baseIdentityLocalName = baseIdentityName;
+ }
+ ModuleBuilder dependentModule = findDependentModule(modules,
+ module, baseIdentityPrefix);
+
+ Set<IdentitySchemaNodeBuilder> dependentModuleIdentities = dependentModule
+ .getAddedIdentities();
+ for (IdentitySchemaNodeBuilder idBuilder : dependentModuleIdentities) {
+ if (idBuilder.getQName().getLocalName()
+ .equals(baseIdentityLocalName)) {
+ identity.setBaseIdentity(idBuilder);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Find dependent module based on given prefix
+ *
+ * @param modules
+ * all available modules
+ * @param module
+ * current module
+ * @param prefix
+ * target module prefix
+ * @return
+ */
+ private ModuleBuilder findDependentModule(
+ Map<String, TreeMap<Date, ModuleBuilder>> modules,
+ ModuleBuilder module, String prefix) {
+ ModuleBuilder dependentModule = null;
+ Date dependentModuleRevision = null;
+
+ if (prefix.equals(module.getPrefix())) {
+ dependentModule = module;
+ } else {
+ ModuleImport dependentModuleImport = getModuleImport(module, prefix);
+ if (dependentModuleImport == null) {
+ throw new YangParseException("No import found with prefix '"
+ + prefix + "' in module " + module.getName() + "'.");
+ }
+ String dependentModuleName = dependentModuleImport.getModuleName();
+ dependentModuleRevision = dependentModuleImport.getRevision();
+
+ TreeMap<Date, ModuleBuilder> moduleBuildersByRevision = modules
+ .get(dependentModuleName);
+ if (dependentModuleRevision == null) {
+ dependentModule = moduleBuildersByRevision.lastEntry()
+ .getValue();
+ } else {
+ dependentModule = moduleBuildersByRevision
+ .get(dependentModuleRevision);
+ }
+ }
+
+ if (dependentModule == null) {
+ throw new YangParseException(
+ "Failed to find dependent module with prefix '" + prefix
+ + "' and revision '" + dependentModuleRevision
+ + "'.");
+ }
+ return dependentModule;
+ }
+
+ /**
+ * Get module import referenced by given prefix.
+ *
+ * @param builder
+ * module to search
+ * @param prefix
+ * prefix associated with import
+ * @return ModuleImport based on given prefix
+ */
+ private ModuleImport getModuleImport(ModuleBuilder builder, String prefix) {
+ ModuleImport moduleImport = null;
+ for (ModuleImport mi : builder.getModuleImports()) {
+ if (mi.getPrefix().equals(prefix)) {
+ moduleImport = mi;
+ break;
+ }
+ }
+ return moduleImport;
+ }
+
+ private Date createEpochTime() {
+ Calendar calendar = Calendar.getInstance();
+ calendar.setTimeInMillis(0);
+ return calendar.getTime();
+ }
+
+ private static class SchemaContextImpl implements SchemaContext {
+ private final Set<Module> modules;
+
+ private SchemaContextImpl(Set<Module> modules) {
+ this.modules = modules;
+ }
+
+ @Override
+ public Set<DataSchemaNode> getDataDefinitions() {
+ final Set<DataSchemaNode> dataDefs = new HashSet<DataSchemaNode>();
+ for (Module m : modules) {
+ dataDefs.addAll(m.getChildNodes());
+ }
+ return dataDefs;
+ }
+
+ @Override
+ public Set<Module> getModules() {
+ return modules;
+ }
+
+ @Override
+ public Set<NotificationDefinition> getNotifications() {
+ final Set<NotificationDefinition> notifications = new HashSet<NotificationDefinition>();
+ for (Module m : modules) {
+ notifications.addAll(m.getNotifications());
+ }
+ return notifications;
+ }
+
+ @Override
+ public Set<RpcDefinition> getOperations() {
+ final Set<RpcDefinition> rpcs = new HashSet<RpcDefinition>();
+ for (Module m : modules) {
+ rpcs.addAll(m.getRpcs());
+ }
+ return rpcs;
+ }
+
+ @Override
+ public Set<ExtensionDefinition> getExtensions() {
+ final Set<ExtensionDefinition> extensions = new HashSet<ExtensionDefinition>();
+ for (Module m : modules) {
+ extensions.addAll(m.getExtensionSchemaNodes());
+ }
+ return extensions;
+ }
+ }
+
+}
import java.text.DateFormat;\r
import java.text.ParseException;\r
import java.text.SimpleDateFormat;\r
-import java.util.ArrayList;\r
import java.util.Collections;\r
import java.util.Date;\r
import java.util.List;\r
import java.util.Stack;\r
-import java.util.TreeMap;\r
\r
import org.antlr.v4.runtime.tree.ParseTree;\r
import org.opendaylight.controller.antlrv4.code.gen.YangParser;\r
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Argument_stmtContext;\r
import org.opendaylight.controller.antlrv4.code.gen.YangParser.Base_stmtContext;\r
import org.opendaylight.controller.antlrv4.code.gen.YangParser.Contact_stmtContext;\r
import org.opendaylight.controller.antlrv4.code.gen.YangParser.Container_stmtContext;\r
import org.opendaylight.controller.antlrv4.code.gen.YangParser.Revision_stmtsContext;\r
import org.opendaylight.controller.antlrv4.code.gen.YangParser.Status_stmtContext;\r
import org.opendaylight.controller.antlrv4.code.gen.YangParser.Type_body_stmtsContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Union_specificationContext;\r
import org.opendaylight.controller.antlrv4.code.gen.YangParser.Yang_version_stmtContext;\r
import org.opendaylight.controller.antlrv4.code.gen.YangParserBaseListener;\r
import org.opendaylight.controller.yang.common.QName;\r
"yyyy-mm-dd");\r
private final Stack<String> actualPath = new Stack<String>();\r
\r
-\r
@Override\r
public void enterModule_stmt(YangParser.Module_stmtContext ctx) {\r
moduleName = stringFromNode(ctx);\r
@Override\r
public void enterModule_header_stmts(final Module_header_stmtsContext ctx) {\r
super.enterModule_header_stmts(ctx);\r
- \r
+\r
String yangVersion = null;\r
for (int i = 0; i < ctx.getChildCount(); ++i) {\r
final ParseTree treeNode = ctx.getChild(i);\r
yangVersion = stringFromNode(treeNode);\r
}\r
}\r
- \r
+\r
if (yangVersion == null) {\r
yangVersion = "1";\r
}\r
logger.warn(message);\r
}\r
}\r
- \r
+\r
@Override\r
public void enterImport_stmt(Import_stmtContext ctx) {\r
super.enterImport_stmt(ctx);\r
\r
@Override\r
public void enterExtension_stmt(YangParser.Extension_stmtContext ctx) {\r
- String argument = stringFromNode(ctx);\r
- QName qname = new QName(namespace, revision, yangModelPrefix, argument);\r
+ String extName = stringFromNode(ctx);\r
+ QName qname = new QName(namespace, revision, yangModelPrefix, extName);\r
ExtensionBuilder builder = moduleBuilder.addExtension(qname);\r
parseSchemaNodeArgs(ctx, builder);\r
+\r
+ String argument = null;\r
+ boolean yin = false;\r
+ for (int i = 0; i < ctx.getChildCount(); i++) {\r
+ ParseTree child = ctx.getChild(i);\r
+ if (child instanceof Argument_stmtContext) {\r
+ argument = stringFromNode(child);\r
+ yin = parseYinValue((Argument_stmtContext) child);\r
+ break;\r
+ }\r
+ }\r
+ builder.setArgument(argument);\r
+ builder.setYinElement(yin);\r
}\r
\r
@Override\r
@Override\r
public void enterType_stmt(YangParser.Type_stmtContext ctx) {\r
String typeName = stringFromNode(ctx);\r
- QName typeQName;\r
- if (typeName.contains(":")) {\r
- String[] splittedName = typeName.split(":");\r
- String prefix = splittedName[0];\r
- String name = splittedName[1];\r
- if (prefix.equals(yangModelPrefix)) {\r
- typeQName = new QName(namespace, revision, prefix, name);\r
- } else {\r
- typeQName = new QName(null, null, prefix, name);\r
- }\r
- } else {\r
- typeQName = new QName(namespace, revision, yangModelPrefix,\r
- typeName);\r
- }\r
+ QName typeQName = parseQName(typeName);\r
\r
TypeDefinition<?> type = null;\r
Type_body_stmtsContext typeBody = null;\r
}\r
\r
// if this is base yang type...\r
- if(YangTypesConverter.isBaseYangType(typeName)) {\r
+ if (YangTypesConverter.isBaseYangType(typeName)) {\r
if (typeBody == null) {\r
// if there are no constraints, just grab default base yang type\r
type = YangTypesConverter.javaTypeForBaseYangType(typeName);\r
- moduleBuilder.setType(type, actualPath);\r
+ moduleBuilder.setType(type, getActualPath());\r
} else {\r
- if(typeName.equals("union")) {\r
- List<String> types = new ArrayList<String>();\r
- for(int i = 0; i < typeBody.getChildCount(); i++) {\r
- ParseTree unionSpec = typeBody.getChild(i);\r
- if(unionSpec instanceof Union_specificationContext) {\r
- for(int j = 0; j < unionSpec.getChildCount(); j++) {\r
- ParseTree typeSpec = unionSpec.getChild(j);\r
- types.add(stringFromNode(typeSpec));\r
- }\r
- }\r
- }\r
- moduleBuilder.addUnionType(actualPath);\r
+ if ("union".equals(typeName)) {\r
+ moduleBuilder.addUnionType(getActualPath());\r
} else {\r
- type = parseTypeBody(typeName, typeBody, actualPath, namespace, revision, yangModelPrefix);\r
- moduleBuilder.setType(type, actualPath);\r
+ type = parseTypeBody(typeName, typeBody, getActualPath(),\r
+ namespace, revision, yangModelPrefix);\r
+ moduleBuilder.setType(type, getActualPath());\r
}\r
}\r
} else {\r
type = parseUnknownTypeBody(typeQName, typeBody);\r
// mark parent node of this type statement as dirty\r
- moduleBuilder.addDirtyNode(actualPath);\r
- moduleBuilder.setType(type, actualPath);\r
+ moduleBuilder.addDirtyNode(getActualPath());\r
+ moduleBuilder.setType(type, getActualPath());\r
}\r
\r
updatePath(typeName);\r
+ }\r
\r
+ private QName parseQName(String typeName) {\r
+ QName typeQName;\r
+ if (typeName.contains(":")) {\r
+ String[] splittedName = typeName.split(":");\r
+ String prefix = splittedName[0];\r
+ String name = splittedName[1];\r
+ if (prefix.equals(yangModelPrefix)) {\r
+ typeQName = new QName(namespace, revision, prefix, name);\r
+ } else {\r
+ typeQName = new QName(null, null, prefix, name);\r
+ }\r
+ } else {\r
+ typeQName = new QName(namespace, revision, yangModelPrefix,\r
+ typeName);\r
+ }\r
+ return typeQName;\r
}\r
\r
@Override\r
package org.opendaylight.controller.yang.model.parser.util;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.List;
import org.opendaylight.controller.yang.model.api.type.LengthConstraint;
}
public List<RangeConstraint> getRange() {
+ if(ranges.isEmpty()) {
+ List<RangeConstraint> result = Collections.emptyList();
+ return result;
+ }
+
List<RangeConstraint> resolved = ranges.get(0);
RangeConstraint firstRange = resolved.get(0);
RangeConstraint lastRange = resolved.get(resolved.size() - 1);
}
public List<LengthConstraint> getLength() {
+ if(lengths.isEmpty()) {
+ List<LengthConstraint> result = Collections.emptyList();
+ return result;
+ }
+
List<LengthConstraint> resolved = lengths.get(0);
LengthConstraint firstLength = resolved.get(0);
LengthConstraint lastLength = resolved.get(resolved.size() - 1);
}
public void setFractionDigits(Integer fractionDigits) {
- if (fractionDigits != null) {
+ if (this.fractionDigits == null) {
this.fractionDigits = fractionDigits;
}
}
-/*\r
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.\r
- *\r
- * This program and the accompanying materials are made available under the\r
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,\r
- * and is available at http://www.eclipse.org/legal/eplv10.html\r
- */\r
-package org.opendaylight.controller.yang.model.parser.util;\r
-\r
-import java.net.URI;\r
-import java.util.ArrayList;\r
-import java.util.Collections;\r
-import java.util.Date;\r
-import java.util.List;\r
-import java.util.Stack;\r
-\r
-import org.antlr.v4.runtime.tree.ParseTree;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Bit_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Bits_specificationContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Config_argContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Config_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Decimal64_specificationContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Description_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Enum_specificationContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Enum_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Fraction_digits_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Leafref_specificationContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Length_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Mandatory_argContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Mandatory_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Max_elements_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Min_elements_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Must_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Numerical_restrictionsContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Ordered_by_argContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Ordered_by_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Path_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Pattern_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Position_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Range_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Reference_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Require_instance_argContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Require_instance_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Status_argContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Status_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.StringContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.String_restrictionsContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Type_body_stmtsContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.Units_stmtContext;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser.When_stmtContext;\r
-import org.opendaylight.controller.yang.common.QName;\r
-import org.opendaylight.controller.yang.model.api.RevisionAwareXPath;\r
-import org.opendaylight.controller.yang.model.api.SchemaPath;\r
-import org.opendaylight.controller.yang.model.api.Status;\r
-import org.opendaylight.controller.yang.model.api.TypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;\r
-import org.opendaylight.controller.yang.model.api.type.BitsTypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.type.EnumTypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.type.LengthConstraint;\r
-import org.opendaylight.controller.yang.model.api.type.PatternConstraint;\r
-import org.opendaylight.controller.yang.model.api.type.RangeConstraint;\r
-import org.opendaylight.controller.yang.model.api.type.BitsTypeDefinition.Bit;\r
-import org.opendaylight.controller.yang.model.parser.builder.api.SchemaNodeBuilder;\r
-import org.opendaylight.controller.yang.model.parser.builder.impl.ConstraintsBuilder;\r
-import org.opendaylight.controller.yang.model.util.BaseConstraints;\r
-import org.opendaylight.controller.yang.model.util.BinaryType;\r
-import org.opendaylight.controller.yang.model.util.BitsType;\r
-import org.opendaylight.controller.yang.model.util.EnumerationType;\r
-import org.opendaylight.controller.yang.model.util.InstanceIdentifier;\r
-import org.opendaylight.controller.yang.model.util.Leafref;\r
-import org.opendaylight.controller.yang.model.util.RevisionAwareXPathImpl;\r
-import org.opendaylight.controller.yang.model.util.StringType;\r
-import org.opendaylight.controller.yang.model.util.UnknownType;\r
-import org.opendaylight.controller.yang.model.util.YangTypesConverter;\r
-import org.slf4j.Logger;\r
-import org.slf4j.LoggerFactory;\r
-\r
-public class YangModelBuilderUtil {\r
-\r
- private static final Logger logger = LoggerFactory\r
- .getLogger(YangModelBuilderUtil.class);\r
-\r
- /**\r
- * Parse given tree and get first string value.\r
- *\r
- * @param treeNode\r
- * tree to parse\r
- * @return first string value from given tree\r
- */\r
- public static String stringFromNode(final ParseTree treeNode) {\r
- final String result = "";\r
- for (int i = 0; i < treeNode.getChildCount(); ++i) {\r
- if (treeNode.getChild(i) instanceof StringContext) {\r
- final StringContext context = (StringContext) treeNode\r
- .getChild(i);\r
- if (context != null) {\r
- return context.getChild(0).getText().replace("\"", "");\r
- }\r
- }\r
- }\r
- return result;\r
- }\r
-\r
- /**\r
- * Parse 'description', 'reference' and 'status' statements and fill in\r
- * given builder.\r
- *\r
- * @param ctx\r
- * context to parse\r
- * @param builder\r
- * builder to fill in with parsed statements\r
- */\r
- public static void parseSchemaNodeArgs(ParseTree ctx,\r
- SchemaNodeBuilder builder) {\r
- for (int i = 0; i < ctx.getChildCount(); i++) {\r
- ParseTree child = ctx.getChild(i);\r
- if (child instanceof Description_stmtContext) {\r
- String desc = stringFromNode(child);\r
- builder.setDescription(desc);\r
- } else if (child instanceof Reference_stmtContext) {\r
- String ref = stringFromNode(child);\r
- builder.setReference(ref);\r
- } else if (child instanceof Status_stmtContext) {\r
- Status status = parseStatus((Status_stmtContext) child);\r
- builder.setStatus(status);\r
- }\r
- }\r
- }\r
-\r
- /**\r
- * Parse given context and return its value;\r
- *\r
- * @param ctx\r
- * status context\r
- * @return value parsed from context\r
- */\r
- public static Status parseStatus(Status_stmtContext ctx) {\r
- for (int i = 0; i < ctx.getChildCount(); i++) {\r
- ParseTree statusArg = ctx.getChild(i);\r
- if (statusArg instanceof Status_argContext) {\r
- String statusArgStr = stringFromNode(statusArg);\r
- if (statusArgStr.equals("current")) {\r
- return Status.CURRENT;\r
- } else if (statusArgStr.equals("deprecated")) {\r
- return Status.DEPRECATED;\r
- } else if (statusArgStr.equals("obsolete")) {\r
- return Status.OBSOLETE;\r
- } else {\r
- logger.warn("Invalid 'status' statement: " + statusArgStr);\r
- }\r
- }\r
- }\r
- return null;\r
- }\r
-\r
- /**\r
- * Parse given tree and returns units statement as string.\r
- *\r
- * @param ctx\r
- * context to parse\r
- * @return value of units statement as string or null if there is no units\r
- * statement\r
- */\r
- public static String parseUnits(ParseTree ctx) {\r
- String units = null;\r
- for (int i = 0; i < ctx.getChildCount(); i++) {\r
- ParseTree child = ctx.getChild(i);\r
- if (child instanceof Units_stmtContext) {\r
- units = stringFromNode(child);\r
- break;\r
- }\r
- }\r
- return units;\r
- }\r
-\r
- /**\r
- * Create SchemaPath object from given path list with namespace, revision\r
- * and prefix based on given values.\r
- *\r
- * @param actualPath\r
- * @param namespace\r
- * @param revision\r
- * @param prefix\r
- * @return SchemaPath object.\r
- */\r
- public static SchemaPath createActualSchemaPath(List<String> actualPath,\r
- URI namespace, Date revision, String prefix) {\r
- final List<QName> path = new ArrayList<QName>();\r
- QName qname;\r
- for (String pathElement : actualPath) {\r
- qname = new QName(namespace, revision, prefix, pathElement);\r
- path.add(qname);\r
- }\r
- return new SchemaPath(path, true);\r
- }\r
-\r
- /**\r
- * Create SchemaPath from given string.\r
- *\r
- * @param augmentPath\r
- * string representation of path\r
- * @return SchemaPath object\r
- */\r
- public static SchemaPath parseAugmentPath(String augmentPath) {\r
- boolean absolute = augmentPath.startsWith("/");\r
- String[] splittedPath = augmentPath.split("/");\r
- List<QName> path = new ArrayList<QName>();\r
- QName name;\r
- for (String pathElement : splittedPath) {\r
- if (pathElement.length() > 0) {\r
- String[] splittedElement = pathElement.split(":");\r
- if (splittedElement.length == 1) {\r
- name = new QName(null, null, null, splittedElement[0]);\r
- } else {\r
- name = new QName(null, null, splittedElement[0],\r
- splittedElement[1]);\r
- }\r
- path.add(name);\r
- }\r
- }\r
- return new SchemaPath(path, absolute);\r
- }\r
-\r
- /**\r
- * Create java.util.List of QName objects from given key definition as\r
- * string.\r
- *\r
- * @param keyDefinition\r
- * key definition as string\r
- * @param namespace\r
- * current namespace\r
- * @param revision\r
- * current revision\r
- * @param prefix\r
- * current prefix\r
- * @return YANG list key as java.util.List of QName objects\r
- */\r
- public static List<QName> createListKey(String keyDefinition,\r
- URI namespace, Date revision, String prefix) {\r
- List<QName> key = new ArrayList<QName>();\r
- String[] splittedKey = keyDefinition.split(" ");\r
-\r
- QName qname = null;\r
- for (String keyElement : splittedKey) {\r
- if (keyElement.length() != 0) {\r
- qname = new QName(namespace, revision, prefix, keyElement);\r
- key.add(qname);\r
- }\r
- }\r
- return key;\r
- }\r
-\r
- private static List<EnumTypeDefinition.EnumPair> getEnumConstants(\r
- Type_body_stmtsContext ctx, List<String> path, URI namespace,\r
- Date revision, String prefix) {\r
- List<EnumTypeDefinition.EnumPair> enumConstants = new ArrayList<EnumTypeDefinition.EnumPair>();\r
-\r
- out: for (int j = 0; j < ctx.getChildCount(); j++) {\r
- ParseTree enumSpecChild = ctx.getChild(j);\r
- if (enumSpecChild instanceof Enum_specificationContext) {\r
- for (int k = 0; k < enumSpecChild.getChildCount(); k++) {\r
- ParseTree enumChild = enumSpecChild.getChild(k);\r
- if (enumChild instanceof Enum_stmtContext) {\r
- enumConstants.add(createEnumPair(\r
- (Enum_stmtContext) enumChild, k, path,\r
- namespace, revision, prefix));\r
- if (k == enumSpecChild.getChildCount() - 1) {\r
- break out;\r
- }\r
- }\r
- }\r
- }\r
- }\r
- return enumConstants;\r
- }\r
-\r
- private static EnumTypeDefinition.EnumPair createEnumPair(\r
- Enum_stmtContext ctx, final int value, List<String> path,\r
- final URI namespace, final Date revision, final String prefix) {\r
- final String name = stringFromNode(ctx);\r
- final QName qname = new QName(namespace, revision, prefix, name);\r
- String description = null;\r
- String reference = null;\r
- Status status = null;\r
- List<String> enumPairPath = new ArrayList<String>(path);\r
- enumPairPath.add(name);\r
-\r
- for (int i = 0; i < ctx.getChildCount(); i++) {\r
- ParseTree child = ctx.getChild(i);\r
- if (child instanceof Description_stmtContext) {\r
- description = stringFromNode(child);\r
- } else if (child instanceof Reference_stmtContext) {\r
- reference = stringFromNode(child);\r
- } else if (child instanceof Status_stmtContext) {\r
- status = parseStatus((Status_stmtContext) child);\r
- }\r
- }\r
-\r
- EnumPairImpl result = new EnumPairImpl();\r
- result.qname = qname;\r
- result.path = createActualSchemaPath(enumPairPath, namespace, revision,\r
- prefix);\r
- result.description = description;\r
- result.reference = reference;\r
- result.status = status;\r
- // TODO: extensionSchemaNodes\r
- result.name = name;\r
- result.value = value;\r
- return result;\r
- }\r
-\r
- private static class EnumPairImpl implements EnumTypeDefinition.EnumPair {\r
-\r
- private QName qname;\r
- private SchemaPath path;\r
- private String description;\r
- private String reference;\r
- private Status status;\r
- private List<UnknownSchemaNode> extensionSchemaNodes = Collections\r
- .emptyList();\r
- private String name;\r
- private Integer value;\r
-\r
- @Override\r
- public QName getQName() {\r
- return qname;\r
- }\r
-\r
- @Override\r
- public SchemaPath getPath() {\r
- return path;\r
- }\r
-\r
- @Override\r
- public String getDescription() {\r
- return description;\r
- }\r
-\r
- @Override\r
- public String getReference() {\r
- return reference;\r
- }\r
-\r
- @Override\r
- public Status getStatus() {\r
- return status;\r
- }\r
-\r
- @Override\r
- public List<UnknownSchemaNode> getUnknownSchemaNodes() {\r
- return extensionSchemaNodes;\r
- }\r
-\r
- @Override\r
- public String getName() {\r
- return name;\r
- }\r
-\r
- @Override\r
- public Integer getValue() {\r
- return value;\r
- }\r
-\r
- @Override\r
- public int hashCode() {\r
- final int prime = 31;\r
- int result = 1;\r
- result = prime * result + ((qname == null) ? 0 : qname.hashCode());\r
- result = prime * result + ((path == null) ? 0 : path.hashCode());\r
- result = prime * result\r
- + ((description == null) ? 0 : description.hashCode());\r
- result = prime * result\r
- + ((reference == null) ? 0 : reference.hashCode());\r
- result = prime * result\r
- + ((status == null) ? 0 : status.hashCode());\r
- result = prime\r
- * result\r
- + ((extensionSchemaNodes == null) ? 0\r
- : extensionSchemaNodes.hashCode());\r
- result = prime * result + ((name == null) ? 0 : name.hashCode());\r
- result = prime * result + ((value == null) ? 0 : value.hashCode());\r
- return result;\r
- }\r
-\r
- @Override\r
- public boolean equals(Object obj) {\r
- if (this == obj) {\r
- return true;\r
- }\r
- if (obj == null) {\r
- return false;\r
- }\r
- if (getClass() != obj.getClass()) {\r
- return false;\r
- }\r
- EnumPairImpl other = (EnumPairImpl) obj;\r
- if (qname == null) {\r
- if (other.qname != null) {\r
- return false;\r
- }\r
- } else if (!qname.equals(other.qname)) {\r
- return false;\r
- }\r
- if (path == null) {\r
- if (other.path != null) {\r
- return false;\r
- }\r
- } else if (!path.equals(other.path)) {\r
- return false;\r
- }\r
- if (description == null) {\r
- if (other.description != null) {\r
- return false;\r
- }\r
- } else if (!description.equals(other.description)) {\r
- return false;\r
- }\r
- if (reference == null) {\r
- if (other.reference != null) {\r
- return false;\r
- }\r
- } else if (!reference.equals(other.reference)) {\r
- return false;\r
- }\r
- if (status == null) {\r
- if (other.status != null) {\r
- return false;\r
- }\r
- } else if (!status.equals(other.status)) {\r
- return false;\r
- }\r
- if (extensionSchemaNodes == null) {\r
- if (other.extensionSchemaNodes != null) {\r
- return false;\r
- }\r
- } else if (!extensionSchemaNodes.equals(other.extensionSchemaNodes)) {\r
- return false;\r
- }\r
- if (name == null) {\r
- if (other.name != null) {\r
- return false;\r
- }\r
- } else if (!name.equals(other.name)) {\r
- return false;\r
- }\r
- if (value == null) {\r
- if (other.value != null) {\r
- return false;\r
- }\r
- } else if (!value.equals(other.value)) {\r
- return false;\r
- }\r
- return true;\r
- }\r
-\r
- @Override\r
- public String toString() {\r
- return EnumTypeDefinition.EnumPair.class.getSimpleName() + "[name="\r
- + name + ", value=" + value + "]";\r
- }\r
- };\r
-\r
- private static List<RangeConstraint> getRangeConstraints(\r
- Type_body_stmtsContext ctx) {\r
- final List<RangeConstraint> rangeConstraints = new ArrayList<RangeConstraint>();\r
- for (int j = 0; j < ctx.getChildCount(); j++) {\r
- ParseTree numRestrChild = ctx.getChild(j);\r
- if (numRestrChild instanceof Numerical_restrictionsContext) {\r
- for (int k = 0; k < numRestrChild.getChildCount(); k++) {\r
- ParseTree rangeChild = numRestrChild.getChild(k);\r
- if (rangeChild instanceof Range_stmtContext) {\r
- rangeConstraints\r
- .addAll(parseRangeConstraints((Range_stmtContext) rangeChild));\r
- break;\r
- }\r
- }\r
- }\r
- }\r
- return rangeConstraints;\r
- }\r
-\r
- private static List<RangeConstraint> parseRangeConstraints(\r
- Range_stmtContext ctx) {\r
- List<RangeConstraint> rangeConstraints = new ArrayList<RangeConstraint>();\r
- String description = null;\r
- String reference = null;\r
-\r
- for (int i = 0; i < ctx.getChildCount(); i++) {\r
- ParseTree child = ctx.getChild(i);\r
- if (child instanceof Description_stmtContext) {\r
- description = stringFromNode(child);\r
- } else if (child instanceof Reference_stmtContext) {\r
- reference = stringFromNode(child);\r
- }\r
- }\r
-\r
- String rangeStr = stringFromNode(ctx);\r
- String trimmed = rangeStr.replace(" ", "");\r
- String[] splittedRange = trimmed.split("\\|");\r
- for (String rangeDef : splittedRange) {\r
- String[] splittedRangeDef = rangeDef.split("\\.\\.");\r
- Long min;\r
- Long max;\r
- if (splittedRangeDef.length == 1) {\r
- min = max = parseRangeValue(splittedRangeDef[0]);\r
- } else {\r
- min = parseRangeValue(splittedRangeDef[0]);\r
- max = parseRangeValue(splittedRangeDef[1]);\r
- }\r
- RangeConstraint range = BaseConstraints.rangeConstraint(min, max,\r
- description, reference);\r
- rangeConstraints.add(range);\r
- }\r
-\r
- return rangeConstraints;\r
- }\r
-\r
- private static List<LengthConstraint> getLengthConstraints(\r
- Type_body_stmtsContext ctx) {\r
- List<LengthConstraint> lengthConstraints = new ArrayList<LengthConstraint>();\r
- for (int j = 0; j < ctx.getChildCount(); j++) {\r
- ParseTree stringRestrChild = ctx.getChild(j);\r
- if (stringRestrChild instanceof String_restrictionsContext) {\r
- for (int k = 0; k < stringRestrChild.getChildCount(); k++) {\r
- ParseTree lengthChild = stringRestrChild.getChild(k);\r
- if (lengthChild instanceof Length_stmtContext) {\r
- lengthConstraints\r
- .addAll(parseLengthConstraints((Length_stmtContext) lengthChild));\r
- }\r
- }\r
- }\r
- }\r
- return lengthConstraints;\r
- }\r
-\r
- private static List<LengthConstraint> parseLengthConstraints(\r
- Length_stmtContext ctx) {\r
- List<LengthConstraint> lengthConstraints = new ArrayList<LengthConstraint>();\r
- String description = null;\r
- String reference = null;\r
-\r
- for (int i = 0; i < ctx.getChildCount(); i++) {\r
- ParseTree child = ctx.getChild(i);\r
- if (child instanceof Description_stmtContext) {\r
- description = stringFromNode(child);\r
- } else if (child instanceof Reference_stmtContext) {\r
- reference = stringFromNode(child);\r
- }\r
- }\r
-\r
- String lengthStr = stringFromNode(ctx);\r
- String trimmed = lengthStr.replace(" ", "");\r
- String[] splittedRange = trimmed.split("\\|");\r
- for (String rangeDef : splittedRange) {\r
- String[] splittedRangeDef = rangeDef.split("\\.\\.");\r
- Long min;\r
- Long max;\r
- if (splittedRangeDef.length == 1) {\r
- min = max = parseRangeValue(splittedRangeDef[0]);\r
- } else {\r
- min = parseRangeValue(splittedRangeDef[0]);\r
- max = parseRangeValue(splittedRangeDef[1]);\r
- }\r
- LengthConstraint range = BaseConstraints.lengthConstraint(min, max,\r
- description, reference);\r
- lengthConstraints.add(range);\r
- }\r
-\r
- return lengthConstraints;\r
- }\r
-\r
- private static Long parseRangeValue(String value) {\r
- Long result = null;\r
- if (value.equals("min")) {\r
- result = Long.MIN_VALUE;\r
- } else if (value.equals("max")) {\r
- result = Long.MAX_VALUE;\r
- } else {\r
- result = Long.valueOf(value);\r
- }\r
- return result;\r
- }\r
-\r
- private static List<PatternConstraint> getPatternConstraint(\r
- Type_body_stmtsContext ctx) {\r
- List<PatternConstraint> patterns = new ArrayList<PatternConstraint>();\r
-\r
- out: for (int j = 0; j < ctx.getChildCount(); j++) {\r
- ParseTree stringRestrChild = ctx.getChild(j);\r
- if (stringRestrChild instanceof String_restrictionsContext) {\r
- for (int k = 0; k < stringRestrChild.getChildCount(); k++) {\r
- ParseTree lengthChild = stringRestrChild.getChild(k);\r
- if (lengthChild instanceof Pattern_stmtContext) {\r
- patterns.add(parsePatternConstraint((Pattern_stmtContext) lengthChild));\r
- if (k == lengthChild.getChildCount() - 1) {\r
- break out;\r
- }\r
- }\r
- }\r
- }\r
- }\r
- return patterns;\r
- }\r
-\r
- /**\r
- * Internal helper method.\r
- *\r
- * @param ctx\r
- * pattern context\r
- * @return PatternConstraint object\r
- */\r
- private static PatternConstraint parsePatternConstraint(\r
- Pattern_stmtContext ctx) {\r
- String description = null;\r
- String reference = null;\r
- for (int i = 0; i < ctx.getChildCount(); i++) {\r
- ParseTree child = ctx.getChild(i);\r
- if (child instanceof Description_stmtContext) {\r
- description = stringFromNode(child);\r
- } else if (child instanceof Reference_stmtContext) {\r
- reference = stringFromNode(child);\r
- }\r
- }\r
- String pattern = patternStringFromNode(ctx);\r
- return BaseConstraints.patternConstraint(pattern, description,\r
- reference);\r
- }\r
-\r
- public static String patternStringFromNode(final Pattern_stmtContext treeNode) {\r
- String result = "";\r
- for (int i = 0; i < treeNode.getChildCount(); ++i) {\r
- ParseTree child = treeNode.getChild(i);\r
- if (child instanceof StringContext) {\r
- for(int j = 0; j < child.getChildCount(); j++) {\r
- if(j % 2 == 0) {\r
- String patternToken = child.getChild(j).getText();\r
- result += patternToken.substring(1, patternToken.length()-1);\r
- }\r
- }\r
- }\r
- }\r
- return result;\r
- }\r
-\r
- private static Integer getFractionDigits(Type_body_stmtsContext ctx) {\r
- for (int j = 0; j < ctx.getChildCount(); j++) {\r
- ParseTree dec64specChild = ctx.getChild(j);\r
- if (dec64specChild instanceof Decimal64_specificationContext) {\r
- return parseFractionDigits((Decimal64_specificationContext) dec64specChild);\r
- }\r
- }\r
- return null;\r
- }\r
-\r
- private static Integer parseFractionDigits(\r
- Decimal64_specificationContext ctx) {\r
- for (int k = 0; k < ctx.getChildCount(); k++) {\r
- ParseTree fdChild = ctx.getChild(k);\r
- if (fdChild instanceof Fraction_digits_stmtContext) {\r
- return Integer.valueOf(stringFromNode(fdChild));\r
- }\r
- }\r
- return null;\r
- }\r
-\r
- private static List<BitsTypeDefinition.Bit> getBits(\r
- Type_body_stmtsContext ctx, List<String> actualPath, URI namespace,\r
- Date revision, String prefix) {\r
- List<BitsTypeDefinition.Bit> bits = new ArrayList<BitsTypeDefinition.Bit>();\r
- for (int j = 0; j < ctx.getChildCount(); j++) {\r
- ParseTree bitsSpecChild = ctx.getChild(j);\r
- if (bitsSpecChild instanceof Bits_specificationContext) {\r
- for (int k = 0; k < bitsSpecChild.getChildCount(); k++) {\r
- ParseTree bitChild = bitsSpecChild.getChild(k);\r
- if (bitChild instanceof Bit_stmtContext) {\r
- bits.add(parseBit((Bit_stmtContext) bitChild,\r
- actualPath, namespace, revision, prefix));\r
- }\r
- }\r
- }\r
- }\r
- return bits;\r
- }\r
-\r
- private static boolean isRequireInstance(Type_body_stmtsContext ctx) {\r
- for (int i = 0; i < ctx.getChildCount(); i++) {\r
- ParseTree child = ctx.getChild(i);\r
- if (child instanceof Require_instance_stmtContext) {\r
- for (int j = 0; j < child.getChildCount(); j++) {\r
- ParseTree reqArg = child.getChild(j);\r
- if (reqArg instanceof Require_instance_argContext) {\r
- return Boolean.valueOf(stringFromNode(reqArg));\r
- }\r
- }\r
- }\r
- }\r
- return false;\r
- }\r
-\r
- private static BitsTypeDefinition.Bit parseBit(final Bit_stmtContext ctx,\r
- List<String> actualPath, final URI namespace, final Date revision,\r
- final String prefix) {\r
- String name = stringFromNode(ctx);\r
- final QName qname = new QName(namespace, revision, prefix, name);\r
- Long position = null;\r
-\r
- String description = null;\r
- String reference = null;\r
- Status status = Status.CURRENT;\r
-\r
- Stack<String> bitPath = new Stack<String>();\r
- bitPath.addAll(actualPath);\r
- bitPath.add(name);\r
-\r
- SchemaPath schemaPath = createActualSchemaPath(bitPath, namespace,\r
- revision, prefix);\r
-\r
- for (int i = 0; i < ctx.getChildCount(); i++) {\r
- ParseTree child = ctx.getChild(i);\r
- if (child instanceof Position_stmtContext) {\r
- String positionStr = stringFromNode(child);\r
- position = Long.valueOf(positionStr);\r
- if (position < 0 || position > 4294967295L) {\r
- throw new IllegalArgumentException(\r
- "position value MUST be in the range 0 to 4294967295, but was: "\r
- + position);\r
- }\r
- } else if (child instanceof Description_stmtContext) {\r
- description = stringFromNode(child);\r
- } else if (child instanceof Reference_stmtContext) {\r
- reference = stringFromNode(child);\r
- } else if (child instanceof Status_stmtContext) {\r
- status = parseStatus((Status_stmtContext) child);\r
- }\r
- }\r
-\r
- // TODO: extensionDefinitions\r
- return createBit(qname, schemaPath, description, reference, status,\r
- null, position);\r
- }\r
-\r
- private static BitsTypeDefinition.Bit createBit(final QName qname,\r
- final SchemaPath schemaPath, final String description,\r
- final String reference, final Status status,\r
- final List<UnknownSchemaNode> extensionDefinitions,\r
- final Long position) {\r
- return new BitsTypeDefinition.Bit() {\r
-\r
- @Override\r
- public QName getQName() {\r
- return qname;\r
- }\r
-\r
- @Override\r
- public SchemaPath getPath() {\r
- return schemaPath;\r
- }\r
-\r
- @Override\r
- public String getDescription() {\r
- return description;\r
- }\r
-\r
- @Override\r
- public String getReference() {\r
- return reference;\r
- }\r
-\r
- @Override\r
- public Status getStatus() {\r
- return status;\r
- }\r
-\r
- @Override\r
- public List<UnknownSchemaNode> getUnknownSchemaNodes() {\r
- return extensionDefinitions;\r
- }\r
-\r
- @Override\r
- public Long getPosition() {\r
- return position;\r
- }\r
-\r
- @Override\r
- public String getName() {\r
- return qname.getLocalName();\r
- }\r
-\r
- @Override\r
- public int hashCode() {\r
- final int prime = 31;\r
- int result = 1;\r
- result = prime * result\r
- + ((qname == null) ? 0 : qname.hashCode());\r
- result = prime * result\r
- + ((schemaPath == null) ? 0 : schemaPath.hashCode());\r
- result = prime * result\r
- + ((description == null) ? 0 : description.hashCode());\r
- result = prime * result\r
- + ((reference == null) ? 0 : reference.hashCode());\r
- result = prime * result\r
- + ((status == null) ? 0 : status.hashCode());\r
- result = prime * result\r
- + ((position == null) ? 0 : position.hashCode());\r
- result = prime\r
- * result\r
- + ((extensionDefinitions == null) ? 0\r
- : extensionDefinitions.hashCode());\r
- return result;\r
- }\r
-\r
- @Override\r
- public boolean equals(Object obj) {\r
- if (this == obj) {\r
- return true;\r
- }\r
- if (obj == null) {\r
- return false;\r
- }\r
- if (getClass() != obj.getClass()) {\r
- return false;\r
- }\r
- Bit other = (Bit) obj;\r
- if (qname == null) {\r
- if (other.getQName() != null) {\r
- return false;\r
- }\r
- } else if (!qname.equals(other.getQName())) {\r
- return false;\r
- }\r
- if (schemaPath == null) {\r
- if (other.getPath() != null) {\r
- return false;\r
- }\r
- } else if (!schemaPath.equals(other.getPath())) {\r
- return false;\r
- }\r
- if (description == null) {\r
- if (other.getDescription() != null) {\r
- return false;\r
- }\r
- } else if (!description.equals(other.getDescription())) {\r
- return false;\r
- }\r
- if (reference == null) {\r
- if (other.getReference() != null) {\r
- return false;\r
- }\r
- } else if (!reference.equals(other.getReference())) {\r
- return false;\r
- }\r
- if (status == null) {\r
- if (other.getStatus() != null) {\r
- return false;\r
- }\r
- } else if (!status.equals(other.getStatus())) {\r
- return false;\r
- }\r
- if (extensionDefinitions == null) {\r
- if (other.getUnknownSchemaNodes() != null) {\r
- return false;\r
- }\r
- } else if (!extensionDefinitions.equals(other\r
- .getUnknownSchemaNodes())) {\r
- return false;\r
- }\r
- if (position == null) {\r
- if (other.getPosition() != null) {\r
- return false;\r
- }\r
- } else if (!position.equals(other.getPosition())) {\r
- return false;\r
- }\r
- return true;\r
- }\r
-\r
- @Override\r
- public String toString() {\r
- return Bit.class.getSimpleName() + "[name="\r
- + qname.getLocalName() + ", position=" + position + "]";\r
- }\r
- };\r
- }\r
-\r
- /**\r
- * Parse orderedby statement.\r
- *\r
- * @param childNode\r
- * Ordered_by_stmtContext\r
- * @return true, if orderedby contains value 'user' or false otherwise\r
- */\r
- public static boolean parseUserOrdered(Ordered_by_stmtContext childNode) {\r
- boolean result = false;\r
- for (int j = 0; j < childNode.getChildCount(); j++) {\r
- ParseTree orderArg = childNode.getChild(j);\r
- if (orderArg instanceof Ordered_by_argContext) {\r
- String orderStr = stringFromNode(orderArg);\r
- if (orderStr.equals("system")) {\r
- result = false;\r
- } else if (orderStr.equals("user")) {\r
- result = true;\r
- } else {\r
- logger.warn("Invalid 'orderedby' statement.");\r
- }\r
- }\r
- }\r
- return result;\r
- }\r
-\r
- /**\r
- * Parse given config context and return true if it contains string 'true',\r
- * false otherwise.\r
- *\r
- * @param ctx\r
- * config context to parse.\r
- * @return true if given context contains string 'true', false otherwise\r
- */\r
- public static boolean parseConfig(final Config_stmtContext ctx) {\r
- if (ctx != null) {\r
- for (int i = 0; i < ctx.getChildCount(); ++i) {\r
- final ParseTree configContext = ctx.getChild(i);\r
- if (configContext instanceof Config_argContext) {\r
- final String value = stringFromNode(configContext);\r
- if (value.equals("true")) {\r
- return true;\r
- }\r
- }\r
- }\r
- }\r
- return false;\r
- }\r
-\r
- /**\r
- * Parse given type body and creates UnknownType definition.\r
- *\r
- * @param typedefQName\r
- * qname of current type\r
- * @param ctx\r
- * type body\r
- * @return UnknownType object with constraints from parsed type body\r
- */\r
- public static TypeDefinition<?> parseUnknownTypeBody(QName typedefQName,\r
- Type_body_stmtsContext ctx) {\r
- UnknownType.Builder ut = new UnknownType.Builder(typedefQName);\r
-\r
- if (ctx != null) {\r
- List<RangeConstraint> rangeStatements = getRangeConstraints(ctx);\r
- List<LengthConstraint> lengthStatements = getLengthConstraints(ctx);\r
- List<PatternConstraint> patternStatements = getPatternConstraint(ctx);\r
- Integer fractionDigits = getFractionDigits(ctx);\r
-\r
- ut.rangeStatements(rangeStatements);\r
- ut.lengthStatements(lengthStatements);\r
- ut.patterns(patternStatements);\r
- ut.fractionDigits(fractionDigits);\r
- }\r
-\r
- return ut.build();\r
- }\r
-\r
- /**\r
- * Create TypeDefinition object based on given type name and type body.\r
- *\r
- * @param typeName\r
- * name of type\r
- * @param typeBody\r
- * type body\r
- * @param actualPath\r
- * current path in schema\r
- * @param namespace\r
- * current namespace\r
- * @param revision\r
- * current revision\r
- * @param prefix\r
- * current prefix\r
- * @return TypeDefinition object based on parsed values.\r
- */\r
- public static TypeDefinition<?> parseTypeBody(String typeName,\r
- Type_body_stmtsContext typeBody, List<String> actualPath,\r
- URI namespace, Date revision, String prefix) {\r
- TypeDefinition<?> type = null;\r
-\r
- List<RangeConstraint> rangeStatements = getRangeConstraints(typeBody);\r
- Integer fractionDigits = getFractionDigits(typeBody);\r
- List<LengthConstraint> lengthStatements = getLengthConstraints(typeBody);\r
- List<PatternConstraint> patternStatements = getPatternConstraint(typeBody);\r
- List<EnumTypeDefinition.EnumPair> enumConstants = getEnumConstants(typeBody, actualPath, namespace, revision, prefix);\r
-\r
- if (typeName.equals("decimal64")) {\r
- type = YangTypesConverter.javaTypeForBaseYangDecimal64Type(\r
- rangeStatements, fractionDigits);\r
- } else if (typeName.startsWith("int")) {\r
- type = YangTypesConverter.javaTypeForBaseYangSignedIntegerType(typeName,\r
- rangeStatements);\r
- } else if(typeName.startsWith("uint")) {\r
- type = YangTypesConverter.javaTypeForBaseYangUnsignedIntegerType(typeName,\r
- rangeStatements);\r
- } else if (typeName.equals("enumeration")) {\r
- type = new EnumerationType(enumConstants);\r
- } else if (typeName.equals("string")) {\r
- type = new StringType(lengthStatements, patternStatements);\r
- } else if (typeName.equals("bits")) {\r
- type = new BitsType(getBits(typeBody, actualPath, namespace,\r
- revision, prefix));\r
- } else if (typeName.equals("leafref")) {\r
- final String path = parseLeafrefPath(typeBody);\r
- final boolean absolute = path.startsWith("/");\r
- RevisionAwareXPath xpath = new RevisionAwareXPathImpl(path,\r
- absolute);\r
- type = new Leafref(actualPath, namespace, revision, xpath);\r
- } else if (typeName.equals("binary")) {\r
- type = new BinaryType(null, lengthStatements, null);\r
- } else if (typeName.equals("instance-identifier")) {\r
- boolean requireInstance = isRequireInstance(typeBody);\r
- type = new InstanceIdentifier(null, requireInstance);\r
- }\r
- return type;\r
- }\r
-\r
- private static String parseLeafrefPath(Type_body_stmtsContext ctx) {\r
- for (int i = 0; i < ctx.getChildCount(); i++) {\r
- ParseTree child = ctx.getChild(i);\r
- if (child instanceof Leafref_specificationContext) {\r
- for (int j = 0; j < child.getChildCount(); j++) {\r
- ParseTree leafRefSpec = child.getChild(j);\r
- if (leafRefSpec instanceof Path_stmtContext) {\r
- return stringFromNode(leafRefSpec);\r
- }\r
- }\r
- }\r
- }\r
- return null;\r
- }\r
-\r
- /**\r
- * Internal helper method for parsing Must_stmtContext.\r
- *\r
- * @param ctx\r
- * Must_stmtContext\r
- * @return an array of strings with following fields: [0] must text [1]\r
- * description [2] reference\r
- */\r
- public static String[] parseMust(YangParser.Must_stmtContext ctx) {\r
- String[] params = new String[3];\r
-\r
- String mustText = "";\r
- String description = null;\r
- String reference = null;\r
- for (int i = 0; i < ctx.getChildCount(); ++i) {\r
- ParseTree child = ctx.getChild(i);\r
- if (child instanceof StringContext) {\r
- final StringContext context = (StringContext) child;\r
- for (int j = 0; j < context.getChildCount(); j++) {\r
- String mustPart = context.getChild(j).getText();\r
- if (j == 0) {\r
- mustText += mustPart\r
- .substring(0, mustPart.length() - 1);\r
- continue;\r
- }\r
- if (j % 2 == 0) {\r
- mustText += mustPart.substring(1);\r
- }\r
- }\r
- } else if (child instanceof Description_stmtContext) {\r
- description = stringFromNode(child);\r
- } else if (child instanceof Reference_stmtContext) {\r
- reference = stringFromNode(child);\r
- }\r
- }\r
- params[0] = mustText;\r
- params[1] = description;\r
- params[2] = reference;\r
-\r
- return params;\r
- }\r
-\r
- /**\r
- * Parse given tree and set constraints to given builder.\r
- *\r
- * @param ctx\r
- * Context to search.\r
- * @param constraintsBuilder\r
- * ConstraintsBuilder to fill.\r
- */\r
- public static void parseConstraints(ParseTree ctx,\r
- ConstraintsBuilder constraintsBuilder) {\r
- for (int i = 0; i < ctx.getChildCount(); ++i) {\r
- final ParseTree childNode = ctx.getChild(i);\r
- if (childNode instanceof Max_elements_stmtContext) {\r
- Integer max = Integer.valueOf(stringFromNode(childNode));\r
- constraintsBuilder.setMinElements(max);\r
- } else if (childNode instanceof Min_elements_stmtContext) {\r
- Integer min = Integer.valueOf(stringFromNode(childNode));\r
- constraintsBuilder.setMinElements(min);\r
- } else if (childNode instanceof Must_stmtContext) {\r
- String[] mustParams = parseMust((Must_stmtContext) childNode);\r
- constraintsBuilder.addMustDefinition(mustParams[0],\r
- mustParams[1], mustParams[2]);\r
- } else if (childNode instanceof Mandatory_stmtContext) {\r
- for (int j = 0; j < childNode.getChildCount(); j++) {\r
- ParseTree mandatoryTree = ctx.getChild(j);\r
- if (mandatoryTree instanceof Mandatory_argContext) {\r
- Boolean mandatory = Boolean\r
- .valueOf(stringFromNode(mandatoryTree));\r
- constraintsBuilder.setMandatory(mandatory);\r
- }\r
- }\r
- } else if (childNode instanceof When_stmtContext) {\r
- constraintsBuilder.addWhenCondition(stringFromNode(childNode));\r
- }\r
- }\r
- }\r
-\r
-}\r
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/eplv10.html
+ */
+package org.opendaylight.controller.yang.model.parser.util;
+
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.List;
+import java.util.Stack;
+
+import org.antlr.v4.runtime.tree.ParseTree;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Argument_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Bit_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Bits_specificationContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Config_argContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Config_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Decimal64_specificationContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Description_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Enum_specificationContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Enum_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Fraction_digits_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Leafref_specificationContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Length_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Mandatory_argContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Mandatory_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Max_elements_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Min_elements_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Must_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Numerical_restrictionsContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Ordered_by_argContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Ordered_by_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Path_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Pattern_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Position_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Range_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Reference_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Require_instance_argContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Require_instance_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Status_argContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Status_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.StringContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.String_restrictionsContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Type_body_stmtsContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Units_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.When_stmtContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Yin_element_argContext;
+import org.opendaylight.controller.antlrv4.code.gen.YangParser.Yin_element_stmtContext;
+import org.opendaylight.controller.yang.common.QName;
+import org.opendaylight.controller.yang.model.api.RevisionAwareXPath;
+import org.opendaylight.controller.yang.model.api.SchemaPath;
+import org.opendaylight.controller.yang.model.api.Status;
+import org.opendaylight.controller.yang.model.api.TypeDefinition;
+import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;
+import org.opendaylight.controller.yang.model.api.type.BitsTypeDefinition;
+import org.opendaylight.controller.yang.model.api.type.BitsTypeDefinition.Bit;
+import org.opendaylight.controller.yang.model.api.type.EnumTypeDefinition;
+import org.opendaylight.controller.yang.model.api.type.LengthConstraint;
+import org.opendaylight.controller.yang.model.api.type.PatternConstraint;
+import org.opendaylight.controller.yang.model.api.type.RangeConstraint;
+import org.opendaylight.controller.yang.model.parser.builder.api.SchemaNodeBuilder;
+import org.opendaylight.controller.yang.model.parser.builder.impl.ConstraintsBuilder;
+import org.opendaylight.controller.yang.model.util.BaseConstraints;
+import org.opendaylight.controller.yang.model.util.BinaryType;
+import org.opendaylight.controller.yang.model.util.BitsType;
+import org.opendaylight.controller.yang.model.util.EnumerationType;
+import org.opendaylight.controller.yang.model.util.InstanceIdentifier;
+import org.opendaylight.controller.yang.model.util.Leafref;
+import org.opendaylight.controller.yang.model.util.RevisionAwareXPathImpl;
+import org.opendaylight.controller.yang.model.util.StringType;
+import org.opendaylight.controller.yang.model.util.UnknownType;
+import org.opendaylight.controller.yang.model.util.YangTypesConverter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public final class YangModelBuilderUtil {
+
+ private static final Logger logger = LoggerFactory
+ .getLogger(YangModelBuilderUtil.class);
+
+ /**
+ * Parse given tree and get first string value.
+ *
+ * @param treeNode
+ * tree to parse
+ * @return first string value from given tree
+ */
+ public static String stringFromNode(final ParseTree treeNode) {
+ final String result = "";
+ for (int i = 0; i < treeNode.getChildCount(); ++i) {
+ if (treeNode.getChild(i) instanceof StringContext) {
+ final StringContext context = (StringContext) treeNode
+ .getChild(i);
+ if (context != null) {
+ return context.getChild(0).getText().replace("\"", "");
+ }
+ }
+ }
+ return result;
+ }
+
+ /**
+ * Parse 'description', 'reference' and 'status' statements and fill in
+ * given builder.
+ *
+ * @param ctx
+ * context to parse
+ * @param builder
+ * builder to fill in with parsed statements
+ */
+ public static void parseSchemaNodeArgs(ParseTree ctx,
+ SchemaNodeBuilder builder) {
+ for (int i = 0; i < ctx.getChildCount(); i++) {
+ ParseTree child = ctx.getChild(i);
+ if (child instanceof Description_stmtContext) {
+ String desc = stringFromNode(child);
+ builder.setDescription(desc);
+ } else if (child instanceof Reference_stmtContext) {
+ String ref = stringFromNode(child);
+ builder.setReference(ref);
+ } else if (child instanceof Status_stmtContext) {
+ Status status = parseStatus((Status_stmtContext) child);
+ builder.setStatus(status);
+ }
+ }
+ }
+
+ /**
+ * Parse given context and return its value;
+ *
+ * @param ctx
+ * status context
+ * @return value parsed from context
+ */
+ public static Status parseStatus(Status_stmtContext ctx) {
+ Status result = null;
+ for (int i = 0; i < ctx.getChildCount(); i++) {
+ ParseTree statusArg = ctx.getChild(i);
+ if (statusArg instanceof Status_argContext) {
+ String statusArgStr = stringFromNode(statusArg);
+ if ("current".equals(statusArgStr)) {
+ result = Status.CURRENT;
+ } else if ("deprecated".equals(statusArgStr)) {
+ result = Status.DEPRECATED;
+ } else if ("obsolete".equals(statusArgStr)) {
+ result = Status.OBSOLETE;
+ } else {
+ logger.warn("Invalid 'status' statement: " + statusArgStr);
+ }
+ }
+ }
+ return result;
+ }
+
+ /**
+ * Parse given tree and returns units statement as string.
+ *
+ * @param ctx
+ * context to parse
+ * @return value of units statement as string or null if there is no units
+ * statement
+ */
+ public static String parseUnits(ParseTree ctx) {
+ String units = null;
+ for (int i = 0; i < ctx.getChildCount(); i++) {
+ ParseTree child = ctx.getChild(i);
+ if (child instanceof Units_stmtContext) {
+ units = stringFromNode(child);
+ break;
+ }
+ }
+ return units;
+ }
+
+ /**
+ * Create SchemaPath object from given path list with namespace, revision
+ * and prefix based on given values.
+ *
+ * @param actualPath
+ * current position in model
+ * @param namespace
+ * @param revision
+ * @param prefix
+ * @return SchemaPath object.
+ */
+ public static SchemaPath createActualSchemaPath(List<String> actualPath,
+ URI namespace, Date revision, String prefix) {
+ final List<QName> path = new ArrayList<QName>();
+ QName qname;
+ // start from index 1 - module name ommited
+ for (int i = 1; i < actualPath.size(); i++) {
+ qname = new QName(namespace, revision, prefix, actualPath.get(i));
+ path.add(qname);
+ }
+ return new SchemaPath(path, true);
+ }
+
+ /**
+ * Create SchemaPath from given string.
+ *
+ * @param augmentPath
+ * string representation of path
+ * @return SchemaPath object
+ */
+ public static SchemaPath parseAugmentPath(String augmentPath) {
+ boolean absolute = augmentPath.startsWith("/");
+ String[] splittedPath = augmentPath.split("/");
+ List<QName> path = new ArrayList<QName>();
+ QName name;
+ for (String pathElement : splittedPath) {
+ if (pathElement.length() > 0) {
+ String[] splittedElement = pathElement.split(":");
+ if (splittedElement.length == 1) {
+ name = new QName(null, null, null, splittedElement[0]);
+ } else {
+ name = new QName(null, null, splittedElement[0],
+ splittedElement[1]);
+ }
+ path.add(name);
+ }
+ }
+ return new SchemaPath(path, absolute);
+ }
+
+ /**
+ * Create java.util.List of QName objects from given key definition as
+ * string.
+ *
+ * @param keyDefinition
+ * key definition as string
+ * @param namespace
+ * current namespace
+ * @param revision
+ * current revision
+ * @param prefix
+ * current prefix
+ * @return YANG list key as java.util.List of QName objects
+ */
+ public static List<QName> createListKey(String keyDefinition,
+ URI namespace, Date revision, String prefix) {
+ List<QName> key = new ArrayList<QName>();
+ String[] splittedKey = keyDefinition.split(" ");
+
+ QName qname = null;
+ for (String keyElement : splittedKey) {
+ if (keyElement.length() != 0) {
+ qname = new QName(namespace, revision, prefix, keyElement);
+ key.add(qname);
+ }
+ }
+ return key;
+ }
+
+ private static List<EnumTypeDefinition.EnumPair> getEnumConstants(
+ Type_body_stmtsContext ctx, List<String> path, URI namespace,
+ Date revision, String prefix) {
+ List<EnumTypeDefinition.EnumPair> enumConstants = new ArrayList<EnumTypeDefinition.EnumPair>();
+
+ out: for (int j = 0; j < ctx.getChildCount(); j++) {
+ ParseTree enumSpecChild = ctx.getChild(j);
+ if (enumSpecChild instanceof Enum_specificationContext) {
+ for (int k = 0; k < enumSpecChild.getChildCount(); k++) {
+ ParseTree enumChild = enumSpecChild.getChild(k);
+ if (enumChild instanceof Enum_stmtContext) {
+ enumConstants.add(createEnumPair(
+ (Enum_stmtContext) enumChild, k, path,
+ namespace, revision, prefix));
+ if (k == enumSpecChild.getChildCount() - 1) {
+ break out;
+ }
+ }
+ }
+ }
+ }
+ return enumConstants;
+ }
+
+ private static EnumTypeDefinition.EnumPair createEnumPair(
+ Enum_stmtContext ctx, final int value, List<String> path,
+ final URI namespace, final Date revision, final String prefix) {
+ final String name = stringFromNode(ctx);
+ final QName qname = new QName(namespace, revision, prefix, name);
+ String description = null;
+ String reference = null;
+ Status status = null;
+ List<String> enumPairPath = new ArrayList<String>(path);
+ enumPairPath.add(name);
+
+ for (int i = 0; i < ctx.getChildCount(); i++) {
+ ParseTree child = ctx.getChild(i);
+ if (child instanceof Description_stmtContext) {
+ description = stringFromNode(child);
+ } else if (child instanceof Reference_stmtContext) {
+ reference = stringFromNode(child);
+ } else if (child instanceof Status_stmtContext) {
+ status = parseStatus((Status_stmtContext) child);
+ }
+ }
+
+ EnumPairImpl result = new EnumPairImpl();
+ result.qname = qname;
+ result.path = createActualSchemaPath(enumPairPath, namespace, revision,
+ prefix);
+ result.description = description;
+ result.reference = reference;
+ result.status = status;
+ result.name = name;
+ result.value = value;
+ return result;
+ }
+
+ private static class EnumPairImpl implements EnumTypeDefinition.EnumPair {
+ private QName qname;
+ private SchemaPath path;
+ private String description;
+ private String reference;
+ private Status status;
+ private List<UnknownSchemaNode> extensionSchemaNodes = Collections
+ .emptyList();
+ private String name;
+ private Integer value;
+
+ @Override
+ public QName getQName() {
+ return qname;
+ }
+
+ @Override
+ public SchemaPath getPath() {
+ return path;
+ }
+
+ @Override
+ public String getDescription() {
+ return description;
+ }
+
+ @Override
+ public String getReference() {
+ return reference;
+ }
+
+ @Override
+ public Status getStatus() {
+ return status;
+ }
+
+ @Override
+ public List<UnknownSchemaNode> getUnknownSchemaNodes() {
+ return extensionSchemaNodes;
+ }
+
+ @Override
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public Integer getValue() {
+ return value;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((qname == null) ? 0 : qname.hashCode());
+ result = prime * result + ((path == null) ? 0 : path.hashCode());
+ result = prime
+ * result
+ + ((extensionSchemaNodes == null) ? 0
+ : extensionSchemaNodes.hashCode());
+ result = prime * result + ((name == null) ? 0 : name.hashCode());
+ result = prime * result + ((value == null) ? 0 : value.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ EnumPairImpl other = (EnumPairImpl) obj;
+ if (qname == null) {
+ if (other.qname != null) {
+ return false;
+ }
+ } else if (!qname.equals(other.qname)) {
+ return false;
+ }
+ if (path == null) {
+ if (other.path != null) {
+ return false;
+ }
+ } else if (!path.equals(other.path)) {
+ return false;
+ }
+ if (extensionSchemaNodes == null) {
+ if (other.extensionSchemaNodes != null) {
+ return false;
+ }
+ } else if (!extensionSchemaNodes.equals(other.extensionSchemaNodes)) {
+ return false;
+ }
+ if (name == null) {
+ if (other.name != null) {
+ return false;
+ }
+ } else if (!name.equals(other.name)) {
+ return false;
+ }
+ if (value == null) {
+ if (other.value != null) {
+ return false;
+ }
+ } else if (!value.equals(other.value)) {
+ return false;
+ }
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return EnumTypeDefinition.EnumPair.class.getSimpleName() + "[name="
+ + name + ", value=" + value + "]";
+ }
+ };
+
+ private static List<RangeConstraint> getRangeConstraints(
+ Type_body_stmtsContext ctx) {
+ final List<RangeConstraint> rangeConstraints = new ArrayList<RangeConstraint>();
+ for (int j = 0; j < ctx.getChildCount(); j++) {
+ ParseTree numRestrChild = ctx.getChild(j);
+ if (numRestrChild instanceof Numerical_restrictionsContext) {
+ for (int k = 0; k < numRestrChild.getChildCount(); k++) {
+ ParseTree rangeChild = numRestrChild.getChild(k);
+ if (rangeChild instanceof Range_stmtContext) {
+ rangeConstraints
+ .addAll(parseRangeConstraints((Range_stmtContext) rangeChild));
+ break;
+ }
+ }
+ }
+ }
+ return rangeConstraints;
+ }
+
+ private static List<RangeConstraint> parseRangeConstraints(
+ Range_stmtContext ctx) {
+ List<RangeConstraint> rangeConstraints = new ArrayList<RangeConstraint>();
+ String description = null;
+ String reference = null;
+
+ for (int i = 0; i < ctx.getChildCount(); i++) {
+ ParseTree child = ctx.getChild(i);
+ if (child instanceof Description_stmtContext) {
+ description = stringFromNode(child);
+ } else if (child instanceof Reference_stmtContext) {
+ reference = stringFromNode(child);
+ }
+ }
+
+ String rangeStr = stringFromNode(ctx);
+ String trimmed = rangeStr.replace(" ", "");
+ String[] splittedRange = trimmed.split("\\|");
+ for (String rangeDef : splittedRange) {
+ String[] splittedRangeDef = rangeDef.split("\\.\\.");
+ Number min;
+ Number max;
+ if (splittedRangeDef.length == 1) {
+ min = max = parseRangeValue(splittedRangeDef[0]);
+ } else {
+ min = parseRangeValue(splittedRangeDef[0]);
+ max = parseRangeValue(splittedRangeDef[1]);
+ }
+ RangeConstraint range = BaseConstraints.rangeConstraint(min, max,
+ description, reference);
+ rangeConstraints.add(range);
+ }
+
+ return rangeConstraints;
+ }
+
+ private static List<LengthConstraint> getLengthConstraints(
+ Type_body_stmtsContext ctx) {
+ List<LengthConstraint> lengthConstraints = new ArrayList<LengthConstraint>();
+ for (int j = 0; j < ctx.getChildCount(); j++) {
+ ParseTree stringRestrChild = ctx.getChild(j);
+ if (stringRestrChild instanceof String_restrictionsContext) {
+ for (int k = 0; k < stringRestrChild.getChildCount(); k++) {
+ ParseTree lengthChild = stringRestrChild.getChild(k);
+ if (lengthChild instanceof Length_stmtContext) {
+ lengthConstraints
+ .addAll(parseLengthConstraints((Length_stmtContext) lengthChild));
+ }
+ }
+ }
+ }
+ return lengthConstraints;
+ }
+
+ private static List<LengthConstraint> parseLengthConstraints(
+ Length_stmtContext ctx) {
+ List<LengthConstraint> lengthConstraints = new ArrayList<LengthConstraint>();
+ String description = null;
+ String reference = null;
+
+ for (int i = 0; i < ctx.getChildCount(); i++) {
+ ParseTree child = ctx.getChild(i);
+ if (child instanceof Description_stmtContext) {
+ description = stringFromNode(child);
+ } else if (child instanceof Reference_stmtContext) {
+ reference = stringFromNode(child);
+ }
+ }
+
+ String lengthStr = stringFromNode(ctx);
+ String trimmed = lengthStr.replace(" ", "");
+ String[] splittedRange = trimmed.split("\\|");
+ for (String rangeDef : splittedRange) {
+ String[] splittedRangeDef = rangeDef.split("\\.\\.");
+ Number min;
+ Number max;
+ if (splittedRangeDef.length == 1) {
+ min = max = parseRangeValue(splittedRangeDef[0]);
+ } else {
+ min = parseRangeValue(splittedRangeDef[0]);
+ max = parseRangeValue(splittedRangeDef[1]);
+ }
+ LengthConstraint range = BaseConstraints.lengthConstraint(min, max,
+ description, reference);
+ lengthConstraints.add(range);
+ }
+
+ return lengthConstraints;
+ }
+
+ private static Number parseRangeValue(String value) {
+ Number result = null;
+ if ("min".equals(value) || "max".equals(value)) {
+ result = new UnknownBoundaryNumber(value);
+ } else {
+ try {
+ result = Long.valueOf(value);
+ } catch (NumberFormatException e) {
+ throw new YangParseException("Unable to parse range value '"
+ + value + "'.", e);
+ }
+ }
+ return result;
+ }
+
+ private static List<PatternConstraint> getPatternConstraint(
+ Type_body_stmtsContext ctx) {
+ List<PatternConstraint> patterns = new ArrayList<PatternConstraint>();
+
+ out: for (int j = 0; j < ctx.getChildCount(); j++) {
+ ParseTree stringRestrChild = ctx.getChild(j);
+ if (stringRestrChild instanceof String_restrictionsContext) {
+ for (int k = 0; k < stringRestrChild.getChildCount(); k++) {
+ ParseTree lengthChild = stringRestrChild.getChild(k);
+ if (lengthChild instanceof Pattern_stmtContext) {
+ patterns.add(parsePatternConstraint((Pattern_stmtContext) lengthChild));
+ if (k == lengthChild.getChildCount() - 1) {
+ break out;
+ }
+ }
+ }
+ }
+ }
+ return patterns;
+ }
+
+ /**
+ * Internal helper method.
+ *
+ * @param ctx
+ * pattern context
+ * @return PatternConstraint object
+ */
+ private static PatternConstraint parsePatternConstraint(
+ Pattern_stmtContext ctx) {
+ String description = null;
+ String reference = null;
+ for (int i = 0; i < ctx.getChildCount(); i++) {
+ ParseTree child = ctx.getChild(i);
+ if (child instanceof Description_stmtContext) {
+ description = stringFromNode(child);
+ } else if (child instanceof Reference_stmtContext) {
+ reference = stringFromNode(child);
+ }
+ }
+ String pattern = patternStringFromNode(ctx);
+ return BaseConstraints.patternConstraint(pattern, description,
+ reference);
+ }
+
+ /**
+ * Parse given context and return pattern value.
+ *
+ * @param ctx
+ * context to parse
+ * @return pattern value as String
+ */
+ public static String patternStringFromNode(final Pattern_stmtContext ctx) {
+ StringBuilder result = new StringBuilder();
+ for (int i = 0; i < ctx.getChildCount(); ++i) {
+ ParseTree child = ctx.getChild(i);
+ if (child instanceof StringContext) {
+ for (int j = 0; j < child.getChildCount(); j++) {
+ if (j % 2 == 0) {
+ String patternToken = child.getChild(j).getText();
+ result.append(patternToken.substring(1,
+ patternToken.length() - 1));
+ }
+ }
+ }
+ }
+ return result.toString();
+ }
+
+ private static Integer getFractionDigits(Type_body_stmtsContext ctx) {
+ Integer result = null;
+ for (int j = 0; j < ctx.getChildCount(); j++) {
+ ParseTree dec64specChild = ctx.getChild(j);
+ if (dec64specChild instanceof Decimal64_specificationContext) {
+ result = parseFractionDigits((Decimal64_specificationContext) dec64specChild);
+ }
+ }
+ return result;
+ }
+
+ private static Integer parseFractionDigits(
+ Decimal64_specificationContext ctx) {
+ Integer result = null;
+ for (int k = 0; k < ctx.getChildCount(); k++) {
+ ParseTree fdChild = ctx.getChild(k);
+ if (fdChild instanceof Fraction_digits_stmtContext) {
+ String value = stringFromNode(fdChild);
+ try {
+ result = Integer.valueOf(value);
+ } catch (NumberFormatException e) {
+ throw new YangParseException(
+ "Unable to parse fraction digits value '" + value
+ + "'.", e);
+ }
+ }
+ }
+ return result;
+ }
+
+ private static List<BitsTypeDefinition.Bit> getBits(
+ Type_body_stmtsContext ctx, List<String> actualPath, URI namespace,
+ Date revision, String prefix) {
+ List<BitsTypeDefinition.Bit> bits = new ArrayList<BitsTypeDefinition.Bit>();
+ for (int j = 0; j < ctx.getChildCount(); j++) {
+ ParseTree bitsSpecChild = ctx.getChild(j);
+ if (bitsSpecChild instanceof Bits_specificationContext) {
+ long highestPosition = -1;
+ for (int k = 0; k < bitsSpecChild.getChildCount(); k++) {
+ ParseTree bitChild = bitsSpecChild.getChild(k);
+ if (bitChild instanceof Bit_stmtContext) {
+ Bit bit = parseBit((Bit_stmtContext) bitChild,
+ highestPosition, actualPath, namespace,
+ revision, prefix);
+ if (bit.getPosition() > highestPosition) {
+ highestPosition = bit.getPosition();
+ }
+ bits.add(bit);
+ }
+ }
+ }
+ }
+ return bits;
+ }
+
+ private static BitsTypeDefinition.Bit parseBit(final Bit_stmtContext ctx,
+ long highestPosition, List<String> actualPath, final URI namespace,
+ final Date revision, final String prefix) {
+ String name = stringFromNode(ctx);
+ final QName qname = new QName(namespace, revision, prefix, name);
+ Long position = null;
+
+ String description = null;
+ String reference = null;
+ Status status = Status.CURRENT;
+
+ Stack<String> bitPath = new Stack<String>();
+ bitPath.addAll(actualPath);
+ bitPath.add(name);
+
+ SchemaPath schemaPath = createActualSchemaPath(bitPath, namespace,
+ revision, prefix);
+
+ for (int i = 0; i < ctx.getChildCount(); i++) {
+ ParseTree child = ctx.getChild(i);
+ if (child instanceof Position_stmtContext) {
+ String positionStr = stringFromNode(child);
+ position = Long.valueOf(positionStr);
+ } else if (child instanceof Description_stmtContext) {
+ description = stringFromNode(child);
+ } else if (child instanceof Reference_stmtContext) {
+ reference = stringFromNode(child);
+ } else if (child instanceof Status_stmtContext) {
+ status = parseStatus((Status_stmtContext) child);
+ }
+ }
+
+ if (position == null) {
+ position = highestPosition + 1;
+ }
+ if (position < 0 || position > 4294967295L) {
+ throw new YangParseException(
+ "Error on bit '"
+ + name
+ + "': the position value MUST be in the range 0 to 4294967295");
+ }
+
+ final List<UnknownSchemaNode> extensionNodes = Collections.emptyList();
+ return createBit(qname, schemaPath, description, reference, status,
+ extensionNodes, position);
+ }
+
+ private static BitsTypeDefinition.Bit createBit(final QName qname,
+ final SchemaPath schemaPath, final String description,
+ final String reference, final Status status,
+ final List<UnknownSchemaNode> unknownNodes, final Long position) {
+ return new BitsTypeDefinition.Bit() {
+
+ @Override
+ public QName getQName() {
+ return qname;
+ }
+
+ @Override
+ public SchemaPath getPath() {
+ return schemaPath;
+ }
+
+ @Override
+ public String getDescription() {
+ return description;
+ }
+
+ @Override
+ public String getReference() {
+ return reference;
+ }
+
+ @Override
+ public Status getStatus() {
+ return status;
+ }
+
+ @Override
+ public List<UnknownSchemaNode> getUnknownSchemaNodes() {
+ return unknownNodes;
+ }
+
+ @Override
+ public Long getPosition() {
+ return position;
+ }
+
+ @Override
+ public String getName() {
+ return qname.getLocalName();
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result
+ + ((qname == null) ? 0 : qname.hashCode());
+ result = prime * result
+ + ((schemaPath == null) ? 0 : schemaPath.hashCode());
+ result = prime * result
+ + ((position == null) ? 0 : position.hashCode());
+ result = prime
+ * result
+ + ((unknownNodes == null) ? 0 : unknownNodes.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null) {
+ return false;
+ }
+ if (getClass() != obj.getClass()) {
+ return false;
+ }
+ Bit other = (Bit) obj;
+ if (qname == null) {
+ if (other.getQName() != null) {
+ return false;
+ }
+ } else if (!qname.equals(other.getQName())) {
+ return false;
+ }
+ if (schemaPath == null) {
+ if (other.getPath() != null) {
+ return false;
+ }
+ } else if (!schemaPath.equals(other.getPath())) {
+ return false;
+ }
+ if (unknownNodes == null) {
+ if (other.getUnknownSchemaNodes() != null) {
+ return false;
+ }
+ } else if (!unknownNodes.equals(other.getUnknownSchemaNodes())) {
+ return false;
+ }
+ if (position == null) {
+ if (other.getPosition() != null) {
+ return false;
+ }
+ } else if (!position.equals(other.getPosition())) {
+ return false;
+ }
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return Bit.class.getSimpleName() + "[name="
+ + qname.getLocalName() + ", position=" + position + "]";
+ }
+ };
+ }
+
+ /**
+ * Parse orderedby statement.
+ *
+ * @param childNode
+ * Ordered_by_stmtContext
+ * @return true, if orderedby contains value 'user' or false otherwise
+ */
+ public static boolean parseUserOrdered(Ordered_by_stmtContext childNode) {
+ boolean result = false;
+ for (int j = 0; j < childNode.getChildCount(); j++) {
+ ParseTree orderArg = childNode.getChild(j);
+ if (orderArg instanceof Ordered_by_argContext) {
+ String orderStr = stringFromNode(orderArg);
+ if ("system".equals(orderStr)) {
+ result = false;
+ } else if ("user".equals(orderStr)) {
+ result = true;
+ } else {
+ logger.warn("Invalid 'orderedby' statement.");
+ }
+ }
+ }
+ return result;
+ }
+
+ /**
+ * Parse given config context and return true if it contains string 'true',
+ * false otherwise.
+ *
+ * @param ctx
+ * config context to parse.
+ * @return true if given context contains string 'true', false otherwise
+ */
+ public static boolean parseConfig(final Config_stmtContext ctx) {
+ boolean result = false;
+ if (ctx != null) {
+ for (int i = 0; i < ctx.getChildCount(); ++i) {
+ final ParseTree configContext = ctx.getChild(i);
+ if (configContext instanceof Config_argContext) {
+ final String value = stringFromNode(configContext);
+ if ("true".equals(value)) {
+ result = true;
+ break;
+ }
+ }
+ }
+ }
+ return result;
+ }
+
+ /**
+ * Parse given type body and creates UnknownType definition.
+ *
+ * @param typedefQName
+ * qname of current type
+ * @param ctx
+ * type body
+ * @return UnknownType object with constraints from parsed type body
+ */
+ public static TypeDefinition<?> parseUnknownTypeBody(QName typedefQName,
+ Type_body_stmtsContext ctx) {
+ UnknownType.Builder unknownType = new UnknownType.Builder(typedefQName);
+
+ if (ctx != null) {
+ List<RangeConstraint> rangeStatements = getRangeConstraints(ctx);
+ List<LengthConstraint> lengthStatements = getLengthConstraints(ctx);
+ List<PatternConstraint> patternStatements = getPatternConstraint(ctx);
+ Integer fractionDigits = getFractionDigits(ctx);
+
+ unknownType.rangeStatements(rangeStatements);
+ unknownType.lengthStatements(lengthStatements);
+ unknownType.patterns(patternStatements);
+ unknownType.fractionDigits(fractionDigits);
+ }
+
+ return unknownType.build();
+ }
+
+ /**
+ * Create TypeDefinition object based on given type name and type body.
+ *
+ * @param typeName
+ * name of type
+ * @param typeBody
+ * type body
+ * @param actualPath
+ * current path in schema
+ * @param namespace
+ * current namespace
+ * @param revision
+ * current revision
+ * @param prefix
+ * current prefix
+ * @return TypeDefinition object based on parsed values.
+ */
+ public static TypeDefinition<?> parseTypeBody(String typeName,
+ Type_body_stmtsContext typeBody, List<String> actualPath,
+ URI namespace, Date revision, String prefix) {
+ TypeDefinition<?> type = null;
+
+ List<RangeConstraint> rangeStatements = getRangeConstraints(typeBody);
+ Integer fractionDigits = getFractionDigits(typeBody);
+ List<LengthConstraint> lengthStatements = getLengthConstraints(typeBody);
+ List<PatternConstraint> patternStatements = getPatternConstraint(typeBody);
+ List<EnumTypeDefinition.EnumPair> enumConstants = getEnumConstants(
+ typeBody, actualPath, namespace, revision, prefix);
+
+ if ("decimal64".equals(typeName)) {
+ type = YangTypesConverter.javaTypeForBaseYangDecimal64Type(
+ rangeStatements, fractionDigits);
+ } else if (typeName.startsWith("int")) {
+ type = YangTypesConverter.javaTypeForBaseYangSignedIntegerType(
+ typeName, rangeStatements);
+ } else if (typeName.startsWith("uint")) {
+ type = YangTypesConverter.javaTypeForBaseYangUnsignedIntegerType(
+ typeName, rangeStatements);
+ } else if ("enumeration".equals(typeName)) {
+ type = new EnumerationType(enumConstants);
+ } else if ("string".equals(typeName)) {
+ type = new StringType(lengthStatements, patternStatements);
+ } else if ("bits".equals(typeName)) {
+ type = new BitsType(getBits(typeBody, actualPath, namespace,
+ revision, prefix));
+ } else if ("leafref".equals(typeName)) {
+ final String path = parseLeafrefPath(typeBody);
+ final boolean absolute = path.startsWith("/");
+ RevisionAwareXPath xpath = new RevisionAwareXPathImpl(path,
+ absolute);
+ type = new Leafref(xpath);
+ } else if ("binary".equals(typeName)) {
+ List<Byte> bytes = Collections.emptyList();
+ type = new BinaryType(bytes, lengthStatements, null);
+ } else if ("instance-identifier".equals(typeName)) {
+ boolean requireInstance = isRequireInstance(typeBody);
+ type = new InstanceIdentifier(null, requireInstance);
+ }
+ return type;
+ }
+
+ private static boolean isRequireInstance(Type_body_stmtsContext ctx) {
+ for (int i = 0; i < ctx.getChildCount(); i++) {
+ ParseTree child = ctx.getChild(i);
+ if (child instanceof Require_instance_stmtContext) {
+ for (int j = 0; j < child.getChildCount(); j++) {
+ ParseTree reqArg = child.getChild(j);
+ if (reqArg instanceof Require_instance_argContext) {
+ return Boolean.valueOf(stringFromNode(reqArg));
+ }
+ }
+ }
+ }
+ return false;
+ }
+
+ private static String parseLeafrefPath(Type_body_stmtsContext ctx) {
+ for (int i = 0; i < ctx.getChildCount(); i++) {
+ ParseTree child = ctx.getChild(i);
+ if (child instanceof Leafref_specificationContext) {
+ for (int j = 0; j < child.getChildCount(); j++) {
+ ParseTree leafRefSpec = child.getChild(j);
+ if (leafRefSpec instanceof Path_stmtContext) {
+ return stringFromNode(leafRefSpec);
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Internal helper method for parsing Must_stmtContext.
+ *
+ * @param ctx
+ * Must_stmtContext
+ * @return an array of strings with following fields: [0] must text [1]
+ * description [2] reference
+ */
+ public static String[] parseMust(YangParser.Must_stmtContext ctx) {
+ String[] params = new String[3];
+
+ StringBuilder mustText = new StringBuilder();
+ String description = null;
+ String reference = null;
+ for (int i = 0; i < ctx.getChildCount(); ++i) {
+ ParseTree child = ctx.getChild(i);
+ if (child instanceof StringContext) {
+ final StringContext context = (StringContext) child;
+ for (int j = 0; j < context.getChildCount(); j++) {
+ String mustPart = context.getChild(j).getText();
+ if (j == 0) {
+ mustText.append(mustPart.substring(0,
+ mustPart.length() - 1));
+ continue;
+ }
+ if (j % 2 == 0) {
+ mustText.append(mustPart.substring(1));
+ }
+ }
+ } else if (child instanceof Description_stmtContext) {
+ description = stringFromNode(child);
+ } else if (child instanceof Reference_stmtContext) {
+ reference = stringFromNode(child);
+ }
+ }
+ params[0] = mustText.toString();
+ params[1] = description;
+ params[2] = reference;
+
+ return params;
+ }
+
+ /**
+ * Parse given tree and set constraints to given builder.
+ *
+ * @param ctx
+ * Context to search.
+ * @param constraintsBuilder
+ * ConstraintsBuilder to fill.
+ */
+ public static void parseConstraints(ParseTree ctx,
+ ConstraintsBuilder constraintsBuilder) {
+ for (int i = 0; i < ctx.getChildCount(); ++i) {
+ final ParseTree childNode = ctx.getChild(i);
+ if (childNode instanceof Max_elements_stmtContext) {
+ Integer max = Integer.valueOf(stringFromNode(childNode));
+ constraintsBuilder.setMinElements(max);
+ } else if (childNode instanceof Min_elements_stmtContext) {
+ Integer min = Integer.valueOf(stringFromNode(childNode));
+ constraintsBuilder.setMinElements(min);
+ } else if (childNode instanceof Must_stmtContext) {
+ String[] mustParams = parseMust((Must_stmtContext) childNode);
+ constraintsBuilder.addMustDefinition(mustParams[0],
+ mustParams[1], mustParams[2]);
+ } else if (childNode instanceof Mandatory_stmtContext) {
+ for (int j = 0; j < childNode.getChildCount(); j++) {
+ ParseTree mandatoryTree = ctx.getChild(j);
+ if (mandatoryTree instanceof Mandatory_argContext) {
+ Boolean mandatory = Boolean
+ .valueOf(stringFromNode(mandatoryTree));
+ constraintsBuilder.setMandatory(mandatory);
+ }
+ }
+ } else if (childNode instanceof When_stmtContext) {
+ constraintsBuilder.addWhenCondition(stringFromNode(childNode));
+ }
+ }
+ }
+
+ /**
+ * Parse given context and return yin value.
+ *
+ * @param ctx
+ * context to parse
+ * @return true if value is 'true', false otherwise
+ */
+ public static boolean parseYinValue(Argument_stmtContext ctx) {
+ boolean yinValue = false;
+ outer: for (int j = 0; j < ctx.getChildCount(); j++) {
+ ParseTree yin = ctx.getChild(j);
+ if (yin instanceof Yin_element_stmtContext) {
+ for (int k = 0; k < yin.getChildCount(); k++) {
+ ParseTree yinArg = yin.getChild(k);
+ if (yinArg instanceof Yin_element_argContext) {
+ String yinString = stringFromNode(yinArg);
+ if ("true".equals(yinString)) {
+ yinValue = true;
+ break outer;
+ }
+ }
+ }
+ }
+ }
+ return yinValue;
+ }
+
+}
super(errorMsg);
}
+ public YangParseException(String errorMsg, Exception exception) {
+ super(errorMsg, exception);
+ }
+
}
+++ /dev/null
-/*
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.yang.model.parser.builder.impl;
-
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.when;
-
-import java.net.URI;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.Set;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.opendaylight.controller.yang.common.QName;
-import org.opendaylight.controller.yang.model.api.AugmentationSchema;
-import org.opendaylight.controller.yang.model.api.ContainerSchemaNode;
-import org.opendaylight.controller.yang.model.api.GroupingDefinition;
-import org.opendaylight.controller.yang.model.api.SchemaPath;
-import org.opendaylight.controller.yang.model.api.Status;
-import org.opendaylight.controller.yang.model.api.TypeDefinition;
-import org.opendaylight.controller.yang.model.api.UsesNode;
-import org.opendaylight.controller.yang.model.parser.builder.api.GroupingBuilder;
-import org.opendaylight.controller.yang.model.parser.builder.api.UsesNodeBuilder;
-
-public class ContainerSchemaNodeBuilderTest {
-
- private ContainerSchemaNodeBuilder tested;
-
- private final String NAME = "test-container";
-
- private final URI namespace = URI.create("test:container.name");
- private final Date revision = new Date();
- private final String prefix = "x";
-
- private SchemaPath schemaPath;
- private final String description = "description of container";
- private final String reference = "reference";
-
- private QName typedefQName;
- private TypedefBuilder typeBuilder;
- @Mock
- private AugmentationSchema augment;
- @Mock
- private UsesNodeBuilder usesBuilder;
- @Mock
- private UsesNode uses;
- @Mock
- private GroupingBuilder groupingBuilder;
- @Mock
- private GroupingDefinition grouping;
-
- @Before
- public void init() {
- MockitoAnnotations.initMocks(this);
- when(usesBuilder.build()).thenReturn(uses);
- when(groupingBuilder.build()).thenReturn(grouping);
-
- schemaPath = TestUtils.createSchemaPath(true, namespace, "main",
- "interface");
- typedefQName = new QName(namespace, "test-type");
- typeBuilder = new TypedefBuilder(typedefQName);
-
- QName qname = new QName(namespace, revision, prefix, NAME);
- tested = new ContainerSchemaNodeBuilder(qname);
- }
-
- @Test
- public void test() {
- tested.addTypedef(typeBuilder);
- tested.setPath(schemaPath);
- tested.setDescription(description);
- tested.setReference(reference);
- tested.setStatus(Status.OBSOLETE);
- tested.setConfiguration(false);
- tested.addUsesNode(usesBuilder);
- tested.addAugmentation(augment);
- tested.setPresenceContainer(true);
-
- ContainerSchemaNode result = tested.build();
-
- Set<TypeDefinition<?>> expectedTypedefs = result.getTypeDefinitions();
- assertEquals(1, expectedTypedefs.size());
- assertEquals(typedefQName, expectedTypedefs.iterator().next()
- .getQName());
-
- Set<AugmentationSchema> expectedAugments = new HashSet<AugmentationSchema>();
- expectedAugments.add(augment);
- assertEquals(expectedAugments, result.getAvailableAugmentations());
-
- assertEquals(schemaPath, result.getPath());
- assertEquals(description, result.getDescription());
- assertEquals(reference, result.getReference());
- assertEquals(Status.OBSOLETE, result.getStatus());
- assertFalse(result.isConfiguration());
-
- Set<UsesNode> expectedUses = new HashSet<UsesNode>();
- expectedUses.add(uses);
- assertEquals(expectedUses, result.getUses());
-
- assertTrue(result.isPresenceContainer());
- }
-
-}
+++ /dev/null
-/*
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.yang.model.parser.builder.impl;
-
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.when;
-
-import java.net.URI;
-import java.util.Date;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.opendaylight.controller.yang.common.QName;
-import org.opendaylight.controller.yang.model.api.GroupingDefinition;
-import org.opendaylight.controller.yang.model.api.LeafListSchemaNode;
-import org.opendaylight.controller.yang.model.api.SchemaPath;
-import org.opendaylight.controller.yang.model.api.Status;
-import org.opendaylight.controller.yang.model.api.TypeDefinition;
-import org.opendaylight.controller.yang.model.api.UsesNode;
-import org.opendaylight.controller.yang.model.parser.builder.api.GroupingBuilder;
-import org.opendaylight.controller.yang.model.parser.builder.api.UsesNodeBuilder;
-
-public class LeafListSchemaNodeBuilderTest {
-
- private LeafListSchemaNodeBuilder tested;
-
- private final String NAME = "test-leaf";
-
- private final URI namespace = URI.create("test:leaf.name");
- private final Date revision = new Date();
- private final String prefix = "x";
-
- private SchemaPath schemaPath;
- private final String description = "description of container";
- private final String reference = "reference";
-
- private QName typedefQName;
- private TypeDefinition<?> type;
-
- @Mock
- private UsesNodeBuilder usesBuilder;
- @Mock
- private UsesNode uses;
- @Mock
- private GroupingBuilder groupingBuilder;
- @Mock
- private GroupingDefinition grouping;
-
- @Before
- public void init() {
- MockitoAnnotations.initMocks(this);
- when(usesBuilder.build()).thenReturn(uses);
- when(groupingBuilder.build()).thenReturn(grouping);
-
- schemaPath = TestUtils.createSchemaPath(true, namespace, "main",
- "interface");
- typedefQName = new QName(namespace, "test-type");
- TypedefBuilder typeBuilder = new TypedefBuilder(typedefQName);
- type = typeBuilder.build();
-
- QName qname = new QName(namespace, revision, prefix, NAME);
- tested = new LeafListSchemaNodeBuilder(qname);
- }
-
- @Test
- public void test() {
- tested.setType(type);
- tested.setPath(schemaPath);
- tested.setDescription(description);
- tested.setReference(reference);
- tested.setStatus(Status.OBSOLETE);
- tested.setConfiguration(false);
-
- LeafListSchemaNode result = tested.build();
-
- assertEquals(type, result.getType());
- assertEquals(schemaPath, result.getPath());
- assertEquals(description, result.getDescription());
- assertEquals(reference, result.getReference());
- assertEquals(Status.OBSOLETE, result.getStatus());
- assertFalse(result.isConfiguration());
- }
-
-}
+++ /dev/null
-/*
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.yang.model.parser.builder.impl;
-
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.when;
-
-import java.net.URI;
-import java.util.Date;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.opendaylight.controller.yang.common.QName;
-import org.opendaylight.controller.yang.model.api.GroupingDefinition;
-import org.opendaylight.controller.yang.model.api.LeafSchemaNode;
-import org.opendaylight.controller.yang.model.api.SchemaPath;
-import org.opendaylight.controller.yang.model.api.Status;
-import org.opendaylight.controller.yang.model.api.TypeDefinition;
-import org.opendaylight.controller.yang.model.api.UsesNode;
-import org.opendaylight.controller.yang.model.parser.builder.api.GroupingBuilder;
-import org.opendaylight.controller.yang.model.parser.builder.api.UsesNodeBuilder;
-
-public class LeafSchemaNodeBuilderTest {
-
- private LeafSchemaNodeBuilder tested;
-
- private final String NAME = "test-leaf";
-
- private final URI namespace = URI.create("test:leaf.name");
- private final Date revision = new Date();
- private final String prefix = "x";
-
- private SchemaPath schemaPath;
- private final String description = "description of container";
- private final String reference = "reference";
-
- private QName typedefQName;
- private TypeDefinition<?> type;
- @Mock
- private UsesNodeBuilder usesBuilder;
- @Mock
- private UsesNode uses;
- @Mock
- private GroupingBuilder groupingBuilder;
- @Mock
- private GroupingDefinition grouping;
-
- @Before
- public void init() {
- MockitoAnnotations.initMocks(this);
- when(usesBuilder.build()).thenReturn(uses);
- when(groupingBuilder.build()).thenReturn(grouping);
-
- schemaPath = TestUtils.createSchemaPath(true, namespace, "main",
- "interface");
- typedefQName = new QName(namespace, "test-type");
- TypedefBuilder typeBuilder = new TypedefBuilder(typedefQName);
- type = typeBuilder.build();
-
- QName qname = new QName(namespace, revision, prefix, NAME);
- tested = new LeafSchemaNodeBuilder(qname);
- }
-
- @Test
- public void test() {
- tested.setType(type);
- tested.setPath(schemaPath);
- tested.setDescription(description);
- tested.setReference(reference);
- tested.setStatus(Status.OBSOLETE);
- tested.setConfiguration(false);
-
- LeafSchemaNode result = tested.build();
-
- assertEquals(type, result.getType());
- assertEquals(schemaPath, result.getPath());
- assertEquals(description, result.getDescription());
- assertEquals(reference, result.getReference());
- assertEquals(Status.OBSOLETE, result.getStatus());
- assertFalse(result.isConfiguration());
- }
-
-}
+++ /dev/null
-/*
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
- *
- * This program and the accompanying materials are made available under the
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,
- * and is available at http://www.eclipse.org/legal/epl-v10.html
- */
-package org.opendaylight.controller.yang.model.parser.builder.impl;
-
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.when;
-
-import java.net.URI;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.opendaylight.controller.yang.common.QName;
-import org.opendaylight.controller.yang.model.api.AugmentationSchema;
-import org.opendaylight.controller.yang.model.api.GroupingDefinition;
-import org.opendaylight.controller.yang.model.api.ListSchemaNode;
-import org.opendaylight.controller.yang.model.api.SchemaPath;
-import org.opendaylight.controller.yang.model.api.Status;
-import org.opendaylight.controller.yang.model.api.TypeDefinition;
-import org.opendaylight.controller.yang.model.api.UsesNode;
-import org.opendaylight.controller.yang.model.parser.builder.api.GroupingBuilder;
-import org.opendaylight.controller.yang.model.parser.builder.api.UsesNodeBuilder;
-
-public class ListSchemaNodeBuilderTest {
-
- private ListSchemaNodeBuilder tested;
-
- private static final String NAME = "test-list";
-
- private final URI namespace = URI.create("test:list.name");
- private final Date revision = new Date();
- private final String prefix = "x";
-
- private SchemaPath schemaPath;
- private final String description = "description of list";
- private final String reference = "reference";
-
- private QName typedefQName;
- private TypedefBuilder typeBuilder;
- @Mock
- private AugmentationSchema augment;
- @Mock
- private UsesNodeBuilder usesBuilder;
- @Mock
- private UsesNode uses;
- @Mock
- private GroupingBuilder groupingBuilder;
- @Mock
- private GroupingDefinition grouping;
- private List<QName> keyDefinition;
-
- @Before
- public void init() {
- MockitoAnnotations.initMocks(this);
- when(usesBuilder.build()).thenReturn(uses);
- when(groupingBuilder.build()).thenReturn(grouping);
-
- schemaPath = TestUtils.createSchemaPath(true, namespace, "main", NAME);
- typedefQName = new QName(namespace, "test-type");
- typeBuilder = new TypedefBuilder(typedefQName);
-
- keyDefinition = new ArrayList<QName>();
- keyDefinition.add(new QName(namespace, "name"));
-
- QName qname = new QName(namespace, revision, prefix, NAME);
- tested = new ListSchemaNodeBuilder(qname);
- }
-
- @Test
- public void test() {
- tested.addTypedef(typeBuilder);
- tested.setPath(schemaPath);
- tested.setDescription(description);
- tested.setReference(reference);
- tested.setStatus(Status.OBSOLETE);
- tested.setConfiguration(false);
- tested.addUsesNode(usesBuilder);
- tested.addAugmentation(augment);
- tested.setUserOrdered(true);
- tested.setKeyDefinition(keyDefinition);
-
- ListSchemaNode result = tested.build();
-
- Set<TypeDefinition<?>> expectedTypedefs = result.getTypeDefinitions();
- assertEquals(1, expectedTypedefs.size());
- assertEquals(typedefQName, expectedTypedefs.iterator().next()
- .getQName());
-
- Set<AugmentationSchema> expectedAugments = new HashSet<AugmentationSchema>();
- expectedAugments.add(augment);
- assertEquals(expectedAugments, result.getAvailableAugmentations());
-
- assertEquals(schemaPath, result.getPath());
- assertEquals(description, result.getDescription());
- assertEquals(reference, result.getReference());
- assertEquals(Status.OBSOLETE, result.getStatus());
- assertFalse(result.isConfiguration());
-
- Set<UsesNode> expectedUses = new HashSet<UsesNode>();
- expectedUses.add(uses);
- assertEquals(expectedUses, result.getUses());
-
- assertTrue(result.isUserOrdered());
- assertEquals(keyDefinition, result.getKeyDefinition());
- }
-
-}
-/*\r
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.\r
- *\r
- * This program and the accompanying materials are made available under the\r
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,\r
- * and is available at http://www.eclipse.org/legal/epl-v10.html\r
- */\r
-package org.opendaylight.controller.yang.model.parser.impl;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import java.io.File;\r
-import java.util.List;\r
-import java.util.Set;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.opendaylight.controller.yang.model.api.IdentitySchemaNode;\r
-import org.opendaylight.controller.yang.model.api.LeafSchemaNode;\r
-import org.opendaylight.controller.yang.model.api.Module;\r
-import org.opendaylight.controller.yang.model.api.TypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.type.EnumTypeDefinition.EnumPair;\r
-import org.opendaylight.controller.yang.model.api.type.LengthConstraint;\r
-import org.opendaylight.controller.yang.model.api.type.PatternConstraint;\r
-import org.opendaylight.controller.yang.model.parser.api.YangModelParser;\r
-import org.opendaylight.controller.yang.model.util.EnumerationType;\r
-import org.opendaylight.controller.yang.model.util.InstanceIdentifier;\r
-import org.opendaylight.controller.yang.model.util.StringType;\r
-import org.opendaylight.controller.yang.model.util.UnionType;\r
-\r
-public class TypesResolutionTest {\r
-\r
- private YangModelParser parser;\r
- private String[] testFiles;\r
- private Set<Module> modules;\r
-\r
- @Before\r
- public void init() {\r
- parser = new YangModelParserImpl();\r
- File testDir = new File("src/test/resources/types");\r
- String[] fileList = testDir.list();\r
- testFiles = new String[fileList.length];\r
- int i = 0;\r
- for(String fileName : fileList) {\r
- File file = new File(testDir, fileName);\r
- testFiles[i] = file.getAbsolutePath();\r
- i++;\r
- }\r
- modules = parser.parseYangModels(testFiles);\r
- assertEquals(fileList.length, modules.size());\r
- }\r
-\r
- @Test\r
- public void testIPVersion() {\r
- Module tested = findModule(modules, "ietf-inet-types");\r
- Set<TypeDefinition<?>> typedefs = tested.getTypeDefinitions();\r
- assertEquals(14, typedefs.size());\r
-\r
- TypeDefinition<?> type = findTypedef(typedefs, "ip-version");\r
- EnumerationType en = (EnumerationType)type.getBaseType();\r
- List<EnumPair> values = en.getValues();\r
- assertEquals(3, values.size());\r
-\r
- EnumPair value0 = values.get(0);\r
- assertEquals("unknown", value0.getName());\r
- assertEquals(0, (int)value0.getValue());\r
-\r
- EnumPair value1 = values.get(1);\r
- assertEquals("ipv4", value1.getName());\r
- assertEquals(1, (int)value1.getValue());\r
-\r
- EnumPair value2 = values.get(2);\r
- assertEquals("ipv6", value2.getName());\r
- assertEquals(2, (int)value2.getValue());\r
- }\r
-\r
- @Test\r
- public void testIpAddress() {\r
- Module tested = findModule(modules, "ietf-inet-types");\r
- Set<TypeDefinition<?>> typedefs = tested.getTypeDefinitions();\r
- TypeDefinition<?> type = findTypedef(typedefs, "ip-address");\r
- UnionType baseType = (UnionType)type.getBaseType();\r
- List<TypeDefinition<?>> unionTypes = baseType.getTypes();\r
-\r
- StringType ipv4 = (StringType)unionTypes.get(0);\r
- String expectedPattern =\r
- "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}"\r
- + "([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])"\r
- + "(%[\\p{N}\\p{L}]+)?";\r
- assertEquals(expectedPattern, ipv4.getPatterns().get(0).getRegularExpression());\r
-\r
- StringType ipv6 = (StringType)unionTypes.get(1);\r
- List<PatternConstraint> ipv6Patterns = ipv6.getPatterns();\r
- expectedPattern = "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}"\r
- + "((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|"\r
- + "(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}"\r
- + "(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))"\r
- + "(%[\\p{N}\\p{L}]+)?";\r
- assertEquals(expectedPattern, ipv6Patterns.get(0).getRegularExpression());\r
-\r
- expectedPattern = "(([^:]+:){6}(([^:]+:[^:]+)|(.*\\..*)))|"\r
- + "((([^:]+:)*[^:]+)?::(([^:]+:)*[^:]+)?)"\r
- + "(%.+)?";\r
- assertEquals(expectedPattern, ipv6Patterns.get(1).getRegularExpression());\r
- }\r
-\r
- @Test\r
- public void testDomainName() {\r
- Module tested = findModule(modules, "ietf-inet-types");\r
- Set<TypeDefinition<?>> typedefs = tested.getTypeDefinitions();\r
- TypeDefinition<?> type = findTypedef(typedefs, "domain-name");\r
- StringType baseType = (StringType)type.getBaseType();\r
- List<PatternConstraint> patterns = baseType.getPatterns();\r
- assertEquals(1, patterns.size());\r
- String expectedPattern = "((([a-zA-Z0-9_]([a-zA-Z0-9\\-_]){0,61})?[a-zA-Z0-9]\\.)*"\r
- + "([a-zA-Z0-9_]([a-zA-Z0-9\\-_]){0,61})?[a-zA-Z0-9]\\.?)"\r
- + "|\\.";\r
- assertEquals(expectedPattern, patterns.get(0).getRegularExpression());\r
-\r
- List<LengthConstraint> lengths = baseType.getLengthStatements();\r
- assertEquals(1, lengths.size());\r
- LengthConstraint length = baseType.getLengthStatements().get(0);\r
- assertEquals(1L, length.getMin().longValue());\r
- assertEquals(253L, length.getMax().longValue());\r
- }\r
-\r
- @Test\r
- public void testInstanceIdentifier1() {\r
- Module tested = findModule(modules, "custom-types-test");\r
- LeafSchemaNode leaf = (LeafSchemaNode)tested.getDataChildByName("inst-id-leaf1");\r
- InstanceIdentifier leafType = (InstanceIdentifier)leaf.getType();\r
- assertFalse(leafType.requireInstance());\r
- }\r
-\r
- @Test\r
- public void testInstanceIdentifier2() {\r
- Module tested = findModule(modules, "custom-types-test");\r
- LeafSchemaNode leaf = (LeafSchemaNode)tested.getDataChildByName("inst-id-leaf2");\r
- InstanceIdentifier leafType = (InstanceIdentifier)leaf.getType();\r
- assertTrue(leafType.requireInstance());\r
- }\r
-\r
- @Test\r
- public void testIdentity() {\r
- Module tested = findModule(modules, "custom-types-test");\r
- Set<IdentitySchemaNode> identities = tested.getIdentities();\r
- IdentitySchemaNode testedIdentity = null;\r
- for(IdentitySchemaNode id : identities) {\r
- if(id.getQName().getLocalName().equals("crypto-alg")) {\r
- testedIdentity = id;\r
- IdentitySchemaNode baseIdentity = id.getBaseIdentity();\r
- assertEquals("crypto-base", baseIdentity.getQName().getLocalName());\r
- assertNull(baseIdentity.getBaseIdentity());\r
- }\r
- }\r
- assertNotNull(testedIdentity);\r
- }\r
-\r
- private Module findModule(Set<Module> modules, String name) {\r
- for(Module module : modules) {\r
- if(module.getName().equals(name)) {\r
- return module;\r
- }\r
- }\r
- return null;\r
- }\r
-\r
- private TypeDefinition<?> findTypedef(Set<TypeDefinition<?>> typedefs, String name) {\r
- for(TypeDefinition<?> td : typedefs) {\r
- if(td.getQName().getLocalName().equals(name)) {\r
- return td;\r
- }\r
- }\r
- return null;\r
- }\r
-\r
-}\r
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.yang.model.parser.impl;
+
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.util.List;
+import java.util.Set;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.opendaylight.controller.yang.model.api.IdentitySchemaNode;
+import org.opendaylight.controller.yang.model.api.LeafSchemaNode;
+import org.opendaylight.controller.yang.model.api.Module;
+import org.opendaylight.controller.yang.model.api.TypeDefinition;
+import org.opendaylight.controller.yang.model.api.type.BitsTypeDefinition.Bit;
+import org.opendaylight.controller.yang.model.api.type.EnumTypeDefinition.EnumPair;
+import org.opendaylight.controller.yang.model.api.type.LengthConstraint;
+import org.opendaylight.controller.yang.model.api.type.PatternConstraint;
+import org.opendaylight.controller.yang.model.parser.api.YangModelParser;
+import org.opendaylight.controller.yang.model.util.BitsType;
+import org.opendaylight.controller.yang.model.util.EnumerationType;
+import org.opendaylight.controller.yang.model.util.ExtendedType;
+import org.opendaylight.controller.yang.model.util.InstanceIdentifier;
+import org.opendaylight.controller.yang.model.util.StringType;
+import org.opendaylight.controller.yang.model.util.UnionType;
+
+public class TypesResolutionTest {
+
+ private Set<Module> testedModules;
+
+ @Before
+ public void init() {
+ YangModelParser parser = new YangModelParserImpl();
+ File testDir = new File("src/test/resources/types");
+ String[] fileList = testDir.list();
+ String[] testFiles = new String[fileList.length];
+ for (int i = 0; i < fileList.length; i++) {
+ String fileName = fileList[i];
+ File file = new File(testDir, fileName);
+ testFiles[i] = file.getAbsolutePath();
+ }
+ testedModules = parser.parseYangModels(testFiles);
+ assertEquals(fileList.length, testedModules.size());
+ }
+
+ @Test
+ public void testIPVersion() {
+ Module tested = findModule(testedModules, "ietf-inet-types");
+ Set<TypeDefinition<?>> typedefs = tested.getTypeDefinitions();
+ assertEquals(14, typedefs.size());
+
+ TypeDefinition<?> type = findTypedef(typedefs, "ip-version");
+ assertTrue(type.getDescription().contains(
+ "This value represents the version of the IP protocol."));
+ assertTrue(type.getReference().contains(
+ "RFC 2460: Internet Protocol, Version 6 (IPv6) Specification"));
+
+ EnumerationType enumType = (EnumerationType) type.getBaseType();
+ List<EnumPair> values = enumType.getValues();
+ assertEquals(3, values.size());
+
+ EnumPair value0 = values.get(0);
+ assertEquals("unknown", value0.getName());
+ assertEquals(0, (int) value0.getValue());
+ assertEquals(
+ "An unknown or unspecified version of the Internet protocol.",
+ value0.getDescription());
+
+ EnumPair value1 = values.get(1);
+ assertEquals("ipv4", value1.getName());
+ assertEquals(1, (int) value1.getValue());
+ assertEquals("The IPv4 protocol as defined in RFC 791.",
+ value1.getDescription());
+
+ EnumPair value2 = values.get(2);
+ assertEquals("ipv6", value2.getName());
+ assertEquals(2, (int) value2.getValue());
+ assertEquals("The IPv6 protocol as defined in RFC 2460.",
+ value2.getDescription());
+ }
+
+ @Test
+ public void testIpAddress() {
+ Module tested = findModule(testedModules, "ietf-inet-types");
+ Set<TypeDefinition<?>> typedefs = tested.getTypeDefinitions();
+ TypeDefinition<?> type = findTypedef(typedefs, "ip-address");
+ UnionType baseType = (UnionType) type.getBaseType();
+ List<TypeDefinition<?>> unionTypes = baseType.getTypes();
+
+ ExtendedType ipv4 = (ExtendedType)unionTypes.get(0);
+ StringType ipv4Base = (StringType) ipv4.getBaseType();
+ String expectedPattern = "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}"
+ + "([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])"
+ + "(%[\\p{N}\\p{L}]+)?";
+ assertEquals(expectedPattern, ipv4Base.getPatterns().get(0)
+ .getRegularExpression());
+
+ ExtendedType ipv6 = (ExtendedType)unionTypes.get(1);
+ StringType ipv6Base = (StringType) ipv6.getBaseType();
+ List<PatternConstraint> ipv6Patterns = ipv6Base.getPatterns();
+ expectedPattern = "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}"
+ + "((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|"
+ + "(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}"
+ + "(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))"
+ + "(%[\\p{N}\\p{L}]+)?";
+ assertEquals(expectedPattern, ipv6Patterns.get(0)
+ .getRegularExpression());
+
+ expectedPattern = "(([^:]+:){6}(([^:]+:[^:]+)|(.*\\..*)))|"
+ + "((([^:]+:)*[^:]+)?::(([^:]+:)*[^:]+)?)" + "(%.+)?";
+ assertEquals(expectedPattern, ipv6Patterns.get(1)
+ .getRegularExpression());
+ }
+
+ @Test
+ public void testDomainName() {
+ Module tested = findModule(testedModules, "ietf-inet-types");
+ Set<TypeDefinition<?>> typedefs = tested.getTypeDefinitions();
+ TypeDefinition<?> type = findTypedef(typedefs, "domain-name");
+ StringType baseType = (StringType) type.getBaseType();
+ List<PatternConstraint> patterns = baseType.getPatterns();
+ assertEquals(1, patterns.size());
+ String expectedPattern = "((([a-zA-Z0-9_]([a-zA-Z0-9\\-_]){0,61})?[a-zA-Z0-9]\\.)*"
+ + "([a-zA-Z0-9_]([a-zA-Z0-9\\-_]){0,61})?[a-zA-Z0-9]\\.?)"
+ + "|\\.";
+ assertEquals(expectedPattern, patterns.get(0).getRegularExpression());
+
+ List<LengthConstraint> lengths = baseType.getLengthStatements();
+ assertEquals(1, lengths.size());
+ LengthConstraint length = baseType.getLengthStatements().get(0);
+ assertEquals(1L, length.getMin());
+ assertEquals(253L, length.getMax());
+ }
+
+ @Test
+ public void testInstanceIdentifier1() {
+ Module tested = findModule(testedModules, "custom-types-test");
+ LeafSchemaNode leaf = (LeafSchemaNode) tested
+ .getDataChildByName("inst-id-leaf1");
+ InstanceIdentifier leafType = (InstanceIdentifier) leaf.getType();
+ assertFalse(leafType.requireInstance());
+ }
+
+ @Test
+ public void testInstanceIdentifier2() {
+ Module tested = findModule(testedModules, "custom-types-test");
+ LeafSchemaNode leaf = (LeafSchemaNode) tested
+ .getDataChildByName("inst-id-leaf2");
+ InstanceIdentifier leafType = (InstanceIdentifier) leaf.getType();
+ assertTrue(leafType.requireInstance());
+ }
+
+ @Test
+ public void testIdentity() {
+ Module tested = findModule(testedModules, "custom-types-test");
+ Set<IdentitySchemaNode> identities = tested.getIdentities();
+ IdentitySchemaNode testedIdentity = null;
+ for (IdentitySchemaNode id : identities) {
+ if (id.getQName().getLocalName().equals("crypto-alg")) {
+ testedIdentity = id;
+ IdentitySchemaNode baseIdentity = id.getBaseIdentity();
+ assertEquals("crypto-base", baseIdentity.getQName()
+ .getLocalName());
+ assertNull(baseIdentity.getBaseIdentity());
+ }
+ }
+ assertNotNull(testedIdentity);
+ }
+
+ @Test
+ public void testBitsType1() {
+ Module tested = findModule(testedModules, "custom-types-test");
+ LeafSchemaNode leaf = (LeafSchemaNode) tested
+ .getDataChildByName("mybits");
+ BitsType leafType = (BitsType) leaf.getType();
+ List<Bit> bits = leafType.getBits();
+ assertEquals(3, bits.size());
+
+ Bit bit1 = bits.get(0);
+ assertEquals("disable-nagle", bit1.getName());
+ assertEquals(0L, (long) bit1.getPosition());
+
+ Bit bit2 = bits.get(1);
+ assertEquals("auto-sense-speed", bit2.getName());
+ assertEquals(1L, (long) bit2.getPosition());
+
+ Bit bit3 = bits.get(2);
+ assertEquals("10-Mb-only", bit3.getName());
+ assertEquals(2L, (long) bit3.getPosition());
+ }
+
+ @Test
+ public void testBitsType2() {
+ Module tested = findModule(testedModules, "custom-types-test");
+ Set<TypeDefinition<?>> typedefs = tested.getTypeDefinitions();
+ TypeDefinition<?> testedType = findTypedef(typedefs,
+ "access-operations-type");
+
+ BitsType bitsType = (BitsType) testedType.getBaseType();
+ List<Bit> bits = bitsType.getBits();
+ assertEquals(5, bits.size());
+
+ Bit bit0 = bits.get(0);
+ assertEquals(0L, (long) bit0.getPosition());
+
+ Bit bit1 = bits.get(1);
+ assertEquals(500L, (long) bit1.getPosition());
+
+ Bit bit2 = bits.get(2);
+ assertEquals(501L, (long) bit2.getPosition());
+
+ Bit bit3 = bits.get(3);
+ assertEquals(365L, (long) bit3.getPosition());
+
+ Bit bit4 = bits.get(4);
+ assertEquals(502L, (long) bit4.getPosition());
+ }
+
+ private Module findModule(Set<Module> modules, String name) {
+ Module result = null;
+ for (Module module : modules) {
+ if (module.getName().equals(name)) {
+ result = module;
+ break;
+ }
+ }
+ return result;
+ }
+
+ private TypeDefinition<?> findTypedef(Set<TypeDefinition<?>> typedefs,
+ String name) {
+ TypeDefinition<?> result = null;
+ for (TypeDefinition<?> td : typedefs) {
+ if (td.getQName().getLocalName().equals(name)) {
+ result = td;
+ break;
+ }
+ }
+ return result;
+ }
+
+}
+++ /dev/null
-/*\r
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.\r
- *\r
- * This program and the accompanying materials are made available under the\r
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,\r
- * and is available at http://www.eclipse.org/legal/epl-v10.html\r
- */\r
-package org.opendaylight.controller.yang.model.parser.impl;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import java.io.IOException;\r
-import java.io.InputStream;\r
-import java.net.URI;\r
-import java.text.DateFormat;\r
-import java.text.SimpleDateFormat;\r
-import java.util.ArrayList;\r
-import java.util.Collections;\r
-import java.util.Date;\r
-import java.util.List;\r
-import java.util.Set;\r
-\r
-import org.antlr.v4.runtime.ANTLRInputStream;\r
-import org.antlr.v4.runtime.CommonTokenStream;\r
-import org.antlr.v4.runtime.tree.ParseTree;\r
-import org.antlr.v4.runtime.tree.ParseTreeWalker;\r
-import org.junit.Ignore;\r
-import org.junit.Test;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangLexer;\r
-import org.opendaylight.controller.antlrv4.code.gen.YangParser;\r
-import org.opendaylight.controller.yang.common.QName;\r
-import org.opendaylight.controller.yang.model.api.ContainerSchemaNode;\r
-import org.opendaylight.controller.yang.model.api.DataNodeContainer;\r
-import org.opendaylight.controller.yang.model.api.DataSchemaNode;\r
-import org.opendaylight.controller.yang.model.api.LeafSchemaNode;\r
-import org.opendaylight.controller.yang.model.api.ListSchemaNode;\r
-import org.opendaylight.controller.yang.model.api.Module;\r
-import org.opendaylight.controller.yang.model.api.ModuleImport;\r
-import org.opendaylight.controller.yang.model.api.SchemaNode;\r
-import org.opendaylight.controller.yang.model.api.SchemaPath;\r
-import org.opendaylight.controller.yang.model.api.Status;\r
-import org.opendaylight.controller.yang.model.api.TypeDefinition;\r
-import org.opendaylight.controller.yang.model.parser.builder.impl.ModuleBuilder;\r
-import org.opendaylight.controller.yang.model.util.Leafref;\r
-import org.opendaylight.controller.yang.model.util.UnknownType;\r
-\r
-public class YangModelParserListenerTest {\r
-\r
- @Test\r
- public void testParseImport() throws Exception {\r
- Module module = getModule("/abstract-topology.yang");\r
-\r
- Set<ModuleImport> imports = module.getImports();\r
- assertEquals(1, imports.size());\r
- ModuleImport moduleImport = imports.iterator().next();\r
-\r
- assertEquals("inet", moduleImport.getPrefix());\r
-\r
- DateFormat simpleDateFormat = new SimpleDateFormat("yyyy-mm-dd");\r
- Date expectedDate = simpleDateFormat.parse("2010-09-24");\r
- assertEquals(expectedDate, moduleImport.getRevision());\r
- }\r
-\r
- @Test\r
- public void testParseHeaders() throws Exception {\r
- Module module = getModule("/abstract-topology.yang");\r
-\r
- URI namespace = module.getNamespace();\r
- URI expectedNS = URI.create("");\r
- assertEquals(expectedNS, namespace);\r
-\r
- DateFormat simpleDateFormat = new SimpleDateFormat("yyyy-mm-dd");\r
- Date expectedDate = simpleDateFormat.parse("2013-02-08");\r
- assertEquals(expectedDate, module.getRevision());\r
-\r
- String prefix = module.getPrefix();\r
- String expectedPrefix = "tp";\r
- assertEquals(expectedPrefix, prefix);\r
-\r
- String expectedDescription = "This module contains the definitions of elements that creates network";\r
- assertTrue(module.getDescription().contains(expectedDescription));\r
-\r
- String expectedReference = "~~~ WILL BE DEFINED LATER";\r
- assertEquals(expectedReference, module.getReference());\r
-\r
- assertEquals("1", module.getYangVersion());\r
- }\r
-\r
- @Test\r
- public void testParseLeafref() throws Exception {\r
- Module module = getModule("/abstract-topology.yang");\r
-\r
- Set<TypeDefinition<?>> typedefs = module.getTypeDefinitions();\r
- assertEquals(2, typedefs.size());\r
- for(TypeDefinition<?> td : typedefs) {\r
- Leafref baseType = (Leafref)td.getBaseType();\r
- if(td.getQName().getLocalName().equals("node-id-ref")) {\r
- assertEquals("/tp:topology/tp:network-nodes/tp:network-node/tp:node-id", baseType.getPathStatement().toString());\r
- } else {\r
- assertEquals("/tp:topology/tp:network-links/tp:network-link/tp:link-id", baseType.getPathStatement().toString());\r
- }\r
- }\r
- }\r
- \r
- @Ignore\r
- @Test\r
- public void testParseModule() throws IOException {\r
- //TODO: fix test\r
- Module module = getModule("/test-model.yang");\r
-\r
- URI namespace = module.getNamespace();\r
- Date revision = module.getRevision();\r
- String prefix = module.getPrefix();\r
-\r
- String expectedDescription = "module description";\r
- assertEquals(expectedDescription, module.getDescription());\r
-\r
- String expectedReference = "module reference";\r
- assertEquals(expectedReference, module.getReference());\r
-\r
- Set<TypeDefinition<?>> typedefs = module.getTypeDefinitions();\r
- assertEquals(10, typedefs.size());\r
-\r
- Set<DataSchemaNode> childNodes = module.getChildNodes();\r
- assertEquals(1, childNodes.size());\r
-\r
- final String containerName = "network";\r
- final QName containerQName = new QName(namespace, revision, prefix, containerName);\r
- ContainerSchemaNode tested = (ContainerSchemaNode) module.getChildNodes().iterator().next();\r
- DataSchemaNode container1 = module.getDataChildByName(containerName);\r
- DataSchemaNode container2 = module.getDataChildByName(containerQName);\r
-\r
- assertEquals(tested, container1);\r
- assertEquals(container1, container2);\r
- }\r
-\r
- @Ignore\r
- @Test\r
- public void testParseContainer() throws IOException {\r
- //TODO: fix test\r
- Module module = getModule("/test-model.yang");\r
-\r
- URI namespace = module.getNamespace();\r
- Date revision = module.getRevision();\r
- String prefix = module.getPrefix();\r
- final QName containerQName = new QName(namespace, revision, prefix, "network");\r
-\r
- ContainerSchemaNode tested = (ContainerSchemaNode)module.getDataChildByName(containerQName);\r
-\r
- Set<DataSchemaNode> containerChildNodes = tested.getChildNodes();\r
- assertEquals(3, containerChildNodes.size());\r
-\r
- String expectedDescription = "network-description";\r
- String expectedReference = "network-reference";\r
- Status expectedStatus = Status.OBSOLETE;\r
- testDesc_Ref_Status(tested, expectedDescription, expectedReference, expectedStatus);\r
-\r
- List<QName> path = new ArrayList<QName>();\r
- path.add(new QName(namespace, revision, prefix, "test-model"));\r
- path.add(containerQName);\r
- SchemaPath expectedSchemaPath = new SchemaPath(path, true);\r
- assertEquals(expectedSchemaPath, tested.getPath());\r
-\r
- assertTrue(tested.isConfiguration());\r
- assertTrue(tested.isPresenceContainer());\r
- }\r
-\r
- @Ignore\r
- @Test\r
- public void testParseList() throws IOException {\r
- //TODO: fix test\r
- Module module = getModule("/test-model.yang");\r
-\r
- URI namespace = module.getNamespace();\r
- Date revision = module.getRevision();\r
- String prefix = module.getPrefix();\r
- final QName listQName = new QName(namespace, revision, prefix, "topology");\r
-\r
- DataNodeContainer networkContainer = (DataNodeContainer)module.getDataChildByName("network");\r
- DataNodeContainer topologiesContainer = (DataNodeContainer)networkContainer.getDataChildByName("topologies");\r
- ListSchemaNode tested = (ListSchemaNode)topologiesContainer.getDataChildByName(listQName);\r
- assertEquals(listQName, tested.getQName());\r
-\r
- String expectedDescription = "Test description of list 'topology'.";\r
- String expectedReference = null;\r
- Status expectedStatus = Status.CURRENT;\r
- testDesc_Ref_Status(tested, expectedDescription, expectedReference, expectedStatus);\r
-\r
- List<QName> path = new ArrayList<QName>();\r
- path.add(new QName(namespace, revision, prefix, "test-model"));\r
- path.add(new QName(namespace, revision, prefix, "network"));\r
- path.add(new QName(namespace, revision, prefix, "topologies"));\r
- path.add(listQName);\r
- SchemaPath expectedSchemaPath = new SchemaPath(path, true);\r
- assertEquals(expectedSchemaPath, tested.getPath());\r
-\r
- List<QName> expectedKey = new ArrayList<QName>();\r
- expectedKey.add(new QName(namespace, revision, prefix, "topology-id"));\r
- assertEquals(expectedKey, tested.getKeyDefinition());\r
-\r
- assertEquals(Collections.EMPTY_SET, tested.getTypeDefinitions());\r
- assertEquals(Collections.EMPTY_SET, tested.getUses());\r
- assertEquals(Collections.EMPTY_SET, tested.getGroupings());\r
-\r
- assertTrue(tested.getDataChildByName("topology-id") instanceof LeafSchemaNode);\r
- }\r
- \r
- @Ignore\r
- @Test\r
- public void testParseLeaf() throws IOException {\r
- //TODO: fix test\r
- Module module = getModule("/test-model.yang");\r
-\r
- URI namespace = module.getNamespace();\r
- Date revision = module.getRevision();\r
- String prefix = module.getPrefix();\r
- final QName leafQName = new QName(namespace, revision, prefix, "topology-id");\r
-\r
- DataNodeContainer networkContainer = (DataNodeContainer)module.getDataChildByName("network");\r
- DataNodeContainer topologiesContainer = (DataNodeContainer)networkContainer.getDataChildByName("topologies");\r
- DataNodeContainer topologyList = (DataNodeContainer)topologiesContainer.getDataChildByName("topology");\r
- LeafSchemaNode tested = (LeafSchemaNode)topologyList.getDataChildByName(leafQName);\r
- assertEquals(leafQName, tested.getQName());\r
-\r
- String expectedDescription = "Test description of leaf 'topology-id'";\r
- String expectedReference = null;\r
- Status expectedStatus = Status.CURRENT;\r
- testDesc_Ref_Status(tested, expectedDescription, expectedReference, expectedStatus);\r
-\r
- List<QName> path = new ArrayList<QName>();\r
- path.add(new QName(namespace, revision, prefix, "test-model"));\r
- path.add(new QName(namespace, revision, prefix, "network"));\r
- path.add(new QName(namespace, revision, prefix, "topologies"));\r
- path.add(new QName(namespace, revision, prefix, "topology"));\r
- path.add(leafQName);\r
- SchemaPath expectedSchemaPath = new SchemaPath(path, true);\r
- assertEquals(expectedSchemaPath, tested.getPath());\r
-\r
- UnknownType.Builder typeBuilder = new UnknownType.Builder(new QName(namespace, revision, prefix, "topology-id"), null, null);\r
- TypeDefinition<?> expectedType = typeBuilder.build();\r
- assertEquals(expectedType, tested.getType());\r
- }\r
-\r
-\r
- private void testDesc_Ref_Status(SchemaNode tested, String expectedDescription, String expectedReference, Status expectedStatus) {\r
- assertEquals(expectedDescription, tested.getDescription());\r
- assertEquals(expectedReference, tested.getReference());\r
- assertEquals(expectedStatus, tested.getStatus());\r
- }\r
-\r
- private Module getModule(String testFile) throws IOException {\r
- ModuleBuilder builder = getBuilder(testFile);\r
- return builder.build();\r
- }\r
-\r
- private ModuleBuilder getBuilder(String fileName) throws IOException {\r
- final InputStream inStream = getClass().getResourceAsStream(fileName);\r
- ANTLRInputStream input = new ANTLRInputStream(inStream);\r
- final YangLexer lexer = new YangLexer(input);\r
- final CommonTokenStream tokens = new CommonTokenStream(lexer);\r
- final YangParser parser = new YangParser(tokens);\r
-\r
- final ParseTree tree = parser.yang();\r
- final ParseTreeWalker walker = new ParseTreeWalker();\r
-\r
- final YangModelParserListenerImpl modelParser = new YangModelParserListenerImpl();\r
- walker.walk(modelParser, tree);\r
- return modelParser.getModuleBuilder();\r
- }\r
-\r
-}\r
-/*\r
- * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.\r
- *\r
- * This program and the accompanying materials are made available under the\r
- * terms of the Eclipse Public License v1.0 which accompanies this distribution,\r
- * and is available at http://www.eclipse.org/legal/epl-v10.html\r
- */\r
-package org.opendaylight.controller.yang.model.parser.impl;\r
-\r
-import static org.junit.Assert.*;\r
-\r
-import java.util.HashSet;\r
-import java.util.List;\r
-import java.util.Set;\r
-\r
-import org.junit.Before;\r
-import org.junit.Test;\r
-import org.opendaylight.controller.yang.model.api.AugmentationSchema;\r
-import org.opendaylight.controller.yang.model.api.ContainerSchemaNode;\r
-import org.opendaylight.controller.yang.model.api.LeafSchemaNode;\r
-import org.opendaylight.controller.yang.model.api.ListSchemaNode;\r
-import org.opendaylight.controller.yang.model.api.Module;\r
-import org.opendaylight.controller.yang.model.api.TypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.type.IntegerTypeDefinition;\r
-import org.opendaylight.controller.yang.model.api.type.PatternConstraint;\r
-import org.opendaylight.controller.yang.model.api.type.RangeConstraint;\r
-import org.opendaylight.controller.yang.model.parser.api.YangModelParser;\r
-import org.opendaylight.controller.yang.model.util.Decimal64;\r
-import org.opendaylight.controller.yang.model.util.Int32;\r
-import org.opendaylight.controller.yang.model.util.StringType;\r
-\r
-public class YangModelParserTest {\r
-\r
- private final String testFile1 = "src/test/resources/model/testfile1.yang";\r
- private final String testFile2 = "src/test/resources/model/testfile2.yang";\r
- private YangModelParser tested;\r
-\r
- @Before\r
- public void init() {\r
- tested = new YangModelParserImpl();\r
- }\r
-\r
- @Test\r
- public void testAugment() {\r
- Set<Module> modules = tested.parseYangModels(testFile1, testFile2);\r
- assertEquals(2, modules.size());\r
-\r
- Module m2 = null;\r
- for(Module m : modules) {\r
- if(m.getName().equals("types2")) {\r
- m2 = m;\r
- }\r
- }\r
- assertNotNull(m2);\r
-\r
- AugmentationSchema augment = m2.getAugmentations().iterator().next();\r
- assertNotNull(augment);\r
- }\r
-\r
- @Test\r
- public void testAugmentTarget() {\r
- Set<Module> modules = tested.parseYangModels(testFile1, testFile2);\r
- assertEquals(2, modules.size());\r
-\r
- Module m1 = null;\r
- for(Module m : modules) {\r
- if(m.getName().equals("types1")) {\r
- m1 = m;\r
- }\r
- }\r
- assertNotNull(m1);\r
-\r
- ContainerSchemaNode container = (ContainerSchemaNode)m1.getDataChildByName("interfaces");\r
- assertNotNull(container);\r
-\r
- ListSchemaNode list = (ListSchemaNode)container.getDataChildByName("ifEntry");\r
- assertNotNull(list);\r
- assertEquals(1, list.getAvailableAugmentations().size());\r
-\r
- LeafSchemaNode leaf = (LeafSchemaNode)list.getDataChildByName("ds0ChannelNumber");\r
- assertNotNull(leaf);\r
- }\r
-\r
- @Test\r
- public void testTypedefRangesResolving() {\r
- Set<Module> modules = tested.parseYangModels(testFile1, testFile2);\r
- assertEquals(2, modules.size());\r
-\r
- Module m1 = null;\r
- for(Module m : modules) {\r
- if(m.getName().equals("types1")) {\r
- m1 = m;\r
- }\r
- }\r
- assertNotNull(m1);\r
-\r
- LeafSchemaNode testleaf = (LeafSchemaNode)m1.getDataChildByName("testleaf");\r
- TypeDefinition<?> baseType = testleaf.getType().getBaseType();\r
- assertTrue(testleaf.getType().getBaseType() instanceof Int32);\r
- Int32 baseTypeCast = (Int32)baseType;\r
- List<RangeConstraint> ranges = baseTypeCast.getRangeStatements();\r
- assertEquals(2, ranges.size());\r
- RangeConstraint range = ranges.get(0);\r
- assertEquals(2L, range.getMin());\r
- assertEquals(20L, range.getMax());\r
- }\r
-\r
- @Test\r
- public void testTypedefPatternsResolving() {\r
- Set<Module> modules = tested.parseYangModels(testFile1, testFile2);\r
- assertEquals(2, modules.size());\r
-\r
- Module m1 = null;\r
- for(Module m : modules) {\r
- if(m.getName().equals("types1")) {\r
- m1 = m;\r
- }\r
- }\r
- assertNotNull(m1);\r
-\r
- LeafSchemaNode testleaf = (LeafSchemaNode)m1.getDataChildByName("test-string-leaf");\r
- TypeDefinition<?> baseType = testleaf.getType().getBaseType();\r
- assertTrue(testleaf.getType().getBaseType() instanceof StringType);\r
- StringType baseTypeCast = (StringType)baseType;\r
-\r
- Set<String> expectedRegularExpressions = new HashSet<String>();\r
- expectedRegularExpressions.add("[a-k]*");\r
- expectedRegularExpressions.add("[b-u]*");\r
- expectedRegularExpressions.add("[e-z]*");\r
-\r
- Set<String> actualRegularExpressions = new HashSet<String>();\r
- List<PatternConstraint> patterns = baseTypeCast.getPatterns();\r
- for(PatternConstraint pc : patterns) {\r
- actualRegularExpressions.add(pc.getRegularExpression());\r
- }\r
-\r
- assertEquals(expectedRegularExpressions, actualRegularExpressions);\r
- }\r
-\r
- @Test\r
- public void testTypedefLengthsResolving() {\r
- Set<Module> modules = tested.parseYangModels(testFile1, testFile2);\r
- assertEquals(2, modules.size());\r
-\r
- Module m1 = null;\r
- for(Module m : modules) {\r
- if(m.getName().equals("types1")) {\r
- m1 = m;\r
- }\r
- }\r
- assertNotNull(m1);\r
-\r
- LeafSchemaNode testleaf = (LeafSchemaNode)m1.getDataChildByName("test-int-leaf");\r
- TypeDefinition<?> baseType = testleaf.getType().getBaseType();\r
- assertTrue(testleaf.getType().getBaseType() instanceof IntegerTypeDefinition);\r
- Int32 baseTypeCast = (Int32)baseType;\r
-\r
- Long[][] expectedRanges = new Long[3][2];\r
- expectedRanges[0] = new Long[]{10L, 20L};\r
- expectedRanges[1] = new Long[]{12L, 18L};\r
- expectedRanges[2] = new Long[]{14L, 16L};\r
-\r
- List<RangeConstraint> actualRanges = baseTypeCast.getRangeStatements();\r
- assertEquals(3, actualRanges.size());\r
- for(int i = 0; i < actualRanges.size(); i++) {\r
- assertEquals(expectedRanges[i][0], actualRanges.get(i).getMin());\r
- assertEquals(expectedRanges[i][1], actualRanges.get(i).getMax());\r
- }\r
- }\r
-\r
- @Test\r
- public void testTypeDef() {\r
- Set<Module> modules = tested.parseYangModels(testFile1, testFile2);\r
- assertEquals(2, modules.size());\r
-\r
- Module m2 = null;\r
- for(Module m : modules) {\r
- if(m.getName().equals("types2")) {\r
- m2 = m;\r
- }\r
- }\r
- assertNotNull(m2);\r
-\r
- LeafSchemaNode testleaf = (LeafSchemaNode)m2.getDataChildByName("nested-type-leaf");\r
- TypeDefinition<?> baseType = testleaf.getType().getBaseType();\r
- assertTrue(testleaf.getType().getBaseType() instanceof Int32);\r
- Int32 baseTypeCast = (Int32)baseType;\r
- List<RangeConstraint> ranges = baseTypeCast.getRangeStatements();\r
- assertEquals(2, ranges.size());\r
- RangeConstraint range = ranges.get(0);\r
- assertEquals(2L, range.getMin());\r
- assertEquals(20L, range.getMax());\r
- }\r
-\r
- @Test\r
- public void testTypedefDecimal1() {\r
- Set<Module> modules = tested.parseYangModels(testFile1, testFile2);\r
- assertEquals(2, modules.size());\r
-\r
- Module m1 = null;\r
- for(Module m : modules) {\r
- if(m.getName().equals("types1")) {\r
- m1 = m;\r
- }\r
- }\r
- assertNotNull(m1);\r
-\r
- LeafSchemaNode testleaf = (LeafSchemaNode)m1.getDataChildByName("test-decimal-leaf");\r
- TypeDefinition<?> baseType = testleaf.getType().getBaseType();\r
- assertTrue(testleaf.getType().getBaseType() instanceof Decimal64);\r
- Decimal64 baseTypeCast = (Decimal64)baseType;\r
- assertEquals(4, (int)baseTypeCast.getFractionDigits());\r
- }\r
-\r
- @Test\r
- public void testTypedefDecimal2() {\r
- Set<Module> modules = tested.parseYangModels(testFile1, testFile2);\r
- assertEquals(2, modules.size());\r
-\r
- Module m1 = null;\r
- for(Module m : modules) {\r
- if(m.getName().equals("types1")) {\r
- m1 = m;\r
- }\r
- }\r
- assertNotNull(m1);\r
-\r
- LeafSchemaNode testleaf = (LeafSchemaNode)m1.getDataChildByName("test-decimal-leaf2");\r
- TypeDefinition<?> baseType = testleaf.getType().getBaseType();\r
- assertTrue(testleaf.getType().getBaseType() instanceof Decimal64);\r
- Decimal64 baseTypeCast = (Decimal64)baseType;\r
- assertEquals(5, (int)baseTypeCast.getFractionDigits());\r
- }\r
-\r
-}\r
+/*
+ * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
+ *
+ * This program and the accompanying materials are made available under the
+ * terms of the Eclipse Public License v1.0 which accompanies this distribution,
+ * and is available at http://www.eclipse.org/legal/epl-v10.html
+ */
+package org.opendaylight.controller.yang.model.parser.impl;
+
+import static org.junit.Assert.*;
+
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.opendaylight.controller.yang.common.QName;
+import org.opendaylight.controller.yang.model.api.AugmentationSchema;
+import org.opendaylight.controller.yang.model.api.ContainerSchemaNode;
+import org.opendaylight.controller.yang.model.api.LeafSchemaNode;
+import org.opendaylight.controller.yang.model.api.ListSchemaNode;
+import org.opendaylight.controller.yang.model.api.Module;
+import org.opendaylight.controller.yang.model.api.TypeDefinition;
+import org.opendaylight.controller.yang.model.api.type.LengthConstraint;
+import org.opendaylight.controller.yang.model.api.type.PatternConstraint;
+import org.opendaylight.controller.yang.model.api.type.RangeConstraint;
+import org.opendaylight.controller.yang.model.parser.api.YangModelParser;
+import org.opendaylight.controller.yang.model.util.Decimal64;
+import org.opendaylight.controller.yang.model.util.ExtendedType;
+import org.opendaylight.controller.yang.model.util.Int16;
+import org.opendaylight.controller.yang.model.util.Int32;
+import org.opendaylight.controller.yang.model.util.StringType;
+import org.opendaylight.controller.yang.model.util.UnionType;
+
+public class YangModelParserTest {
+
+ private static final String TEST_FILE1 = "src/test/resources/model/testfile1.yang";
+ private static final String TEST_FILE2 = "src/test/resources/model/testfile2.yang";
+ private YangModelParser tested;
+
+ @Before
+ public void init() {
+ tested = new YangModelParserImpl();
+ }
+
+ @Test
+ public void testAugment() {
+ Set<Module> modules = tested.parseYangModels(TEST_FILE1, TEST_FILE2);
+ assertEquals(2, modules.size());
+
+ Module testModule = findModule(modules, "types2");
+ assertNotNull(testModule);
+
+ AugmentationSchema augment = testModule.getAugmentations().iterator().next();
+ assertNotNull(augment);
+ }
+
+ @Test
+ public void testAugmentTarget() {
+ Set<Module> modules = tested.parseYangModels(TEST_FILE1, TEST_FILE2);
+ assertEquals(2, modules.size());
+
+ Module testModule = findModule(modules, "types1");
+ assertNotNull(testModule);
+
+ ContainerSchemaNode container = (ContainerSchemaNode)testModule.getDataChildByName("interfaces");
+ assertNotNull(container);
+
+ ListSchemaNode list = (ListSchemaNode)container.getDataChildByName("ifEntry");
+ assertNotNull(list);
+ assertEquals(1, list.getAvailableAugmentations().size());
+
+ LeafSchemaNode leaf = (LeafSchemaNode)list.getDataChildByName("ds0ChannelNumber");
+ assertNotNull(leaf);
+ }
+
+ @Test
+ public void testTypedefRangesResolving() {
+ Set<Module> modules = tested.parseYangModels(TEST_FILE1, TEST_FILE2);
+ assertEquals(2, modules.size());
+
+ Module testModule = findModule(modules, "types1");
+ assertNotNull(testModule);
+
+ LeafSchemaNode testleaf = (LeafSchemaNode)testModule.getDataChildByName("testleaf");
+ ExtendedType leafType = (ExtendedType)testleaf.getType();
+ assertEquals("my-type1", leafType.getQName().getLocalName());
+ assertEquals("t2", leafType.getQName().getPrefix());
+ ExtendedType baseType = (ExtendedType)leafType.getBaseType();
+ assertEquals("my-base-int32-type", baseType.getQName().getLocalName());
+ assertEquals("t2", baseType.getQName().getPrefix());
+
+ List<RangeConstraint> ranges = leafType.getRanges();
+ assertEquals(1, ranges.size());
+ RangeConstraint range = ranges.get(0);
+ assertEquals(11L, range.getMin());
+ assertEquals(20L, range.getMax());
+ }
+
+ @Test
+ public void testTypedefPatternsResolving() {
+ Set<Module> modules = tested.parseYangModels(TEST_FILE1, TEST_FILE2);
+ assertEquals(2, modules.size());
+
+ Module testModule = findModule(modules, "types1");
+ assertNotNull(testModule);
+
+ LeafSchemaNode testleaf = (LeafSchemaNode)testModule.getDataChildByName("test-string-leaf");
+ ExtendedType testleafType = (ExtendedType)testleaf.getType();
+ QName testleafTypeQName = testleafType.getQName();
+ assertEquals("my-string-type-ext", testleafTypeQName.getLocalName());
+ assertEquals("t2", testleafTypeQName.getPrefix());
+
+ Set<String> expectedRegex = new HashSet<String>();
+ expectedRegex.add("[a-k]*");
+ expectedRegex.add("[b-u]*");
+ expectedRegex.add("[e-z]*");
+
+ Set<String> actualRegex = new HashSet<String>();
+ List<PatternConstraint> patterns = testleafType.getPatterns();
+ assertEquals(3, patterns.size());
+ for (PatternConstraint pc : patterns) {
+ actualRegex.add(pc.getRegularExpression());
+ }
+ assertEquals(expectedRegex, actualRegex);
+
+ TypeDefinition<?> baseType = testleafType.getBaseType();
+ assertEquals("my-string-type2", baseType.getQName().getLocalName());
+
+ List<LengthConstraint> lengths = testleafType.getLengths();
+ assertEquals(1, lengths.size());
+
+ LengthConstraint length = lengths.get(0);
+ assertEquals(5L, length.getMin());
+ assertEquals(10L, length.getMax());
+ }
+
+ @Test
+ public void testTypedefLengthsResolving() {
+ Set<Module> modules = tested.parseYangModels(TEST_FILE1, TEST_FILE2);
+ assertEquals(2, modules.size());
+
+ Module testModule = findModule(modules, "types1");
+ assertNotNull(testModule);
+
+ LeafSchemaNode testleaf = (LeafSchemaNode)testModule.getDataChildByName("leaf-with-length");
+ ExtendedType testleafType = (ExtendedType)testleaf.getType();
+ assertEquals("my-string-type", testleafType.getQName().getLocalName());
+
+ List<LengthConstraint> lengths = testleafType.getLengths();
+ assertEquals(1, lengths.size());
+
+ LengthConstraint length = lengths.get(0);
+ assertEquals(7L, length.getMin());
+ assertEquals(10L, length.getMax());
+ }
+
+ @Test
+ public void testTypeDef() {
+ Set<Module> modules = tested.parseYangModels(TEST_FILE1, TEST_FILE2);
+ assertEquals(2, modules.size());
+
+ Module testModule = findModule(modules, "types2");
+ assertNotNull(testModule);
+
+ LeafSchemaNode testleaf = (LeafSchemaNode)testModule.getDataChildByName("nested-type-leaf");
+ ExtendedType testleafType = (ExtendedType)testleaf.getType();
+ assertEquals("my-type1", testleafType.getQName().getLocalName());
+
+ ExtendedType baseType = (ExtendedType)testleafType.getBaseType();
+ assertEquals("my-base-int32-type", baseType.getQName().getLocalName());
+
+ Int32 int32base = (Int32)baseType.getBaseType();
+ List<RangeConstraint> ranges = int32base.getRangeStatements();
+ assertEquals(1, ranges.size());
+ RangeConstraint range = ranges.get(0);
+ assertEquals(2L, range.getMin());
+ assertEquals(20L, range.getMax());
+ }
+
+ @Test
+ public void testTypedefDecimal1() {
+ Set<Module> modules = tested.parseYangModels(TEST_FILE1, TEST_FILE2);
+ assertEquals(2, modules.size());
+
+ Module testModule = findModule(modules, "types1");
+ assertNotNull(testModule);
+
+ LeafSchemaNode testleaf = (LeafSchemaNode)testModule.getDataChildByName("test-decimal-leaf");
+ ExtendedType type = (ExtendedType)testleaf.getType();
+
+ TypeDefinition<?> baseType = type.getBaseType();
+ assertTrue(baseType instanceof Decimal64);
+ Decimal64 baseTypeCast = (Decimal64)baseType;
+ assertEquals(6, (int)baseTypeCast.getFractionDigits());
+ }
+
+ @Test
+ public void testTypedefDecimal2() {
+ Set<Module> modules = tested.parseYangModels(TEST_FILE1, TEST_FILE2);
+ assertEquals(2, modules.size());
+
+ Module testModule = findModule(modules, "types1");
+ assertNotNull(testModule);
+
+ LeafSchemaNode testleaf = (LeafSchemaNode)testModule.getDataChildByName("test-decimal-leaf2");
+ TypeDefinition<?> baseType = testleaf.getType().getBaseType();
+ assertTrue(testleaf.getType().getBaseType() instanceof Decimal64);
+ Decimal64 baseTypeCast = (Decimal64)baseType;
+ assertEquals(5, (int)baseTypeCast.getFractionDigits());
+ }
+
+ @Test
+ public void testTypedefUnion() {
+ Set<Module> modules = tested.parseYangModels(TEST_FILE1, TEST_FILE2);
+ assertEquals(2, modules.size());
+
+ Module testModule = findModule(modules, "types1");
+ assertNotNull(testModule);
+
+ LeafSchemaNode testleaf = (LeafSchemaNode)testModule.getDataChildByName("union-leaf");
+ ExtendedType testleafType = (ExtendedType)testleaf.getType();
+ assertEquals("my-union-ext", testleafType.getQName().getLocalName());
+
+ ExtendedType baseType = (ExtendedType)testleafType.getBaseType();
+ assertEquals("my-union", baseType.getQName().getLocalName());
+
+ UnionType unionBase = (UnionType) baseType.getBaseType();
+
+ List<TypeDefinition<?>> unionTypes = unionBase.getTypes();
+ Int16 unionType1 = (Int16)unionTypes.get(0);
+ List<RangeConstraint> ranges = unionType1.getRangeStatements();
+ assertEquals(1, ranges.size());
+ RangeConstraint range = ranges.get(0);
+ assertEquals(1L, range.getMin());
+ assertEquals(100L, range.getMax());
+
+ assertTrue(unionTypes.get(0) instanceof Int16);
+ assertTrue(unionTypes.get(1) instanceof Int32);
+ }
+
+ @Test
+ public void testNestedUnionResolving() {
+ Set<Module> modules = tested.parseYangModels(TEST_FILE1, TEST_FILE2);
+ assertEquals(2, modules.size());
+
+ Module testModule = findModule(modules, "types1");
+ assertNotNull(testModule);
+
+ LeafSchemaNode testleaf = (LeafSchemaNode)testModule.getDataChildByName("nested-union-leaf");
+
+ ExtendedType nestedUnion1 = (ExtendedType)testleaf.getType();
+ assertEquals("nested-union1", nestedUnion1.getQName().getLocalName());
+
+ ExtendedType nestedUnion2 = (ExtendedType)nestedUnion1.getBaseType();
+ assertEquals("nested-union2", nestedUnion2.getQName().getLocalName());
+
+ UnionType unionType1 = (UnionType)nestedUnion2.getBaseType();
+ List<TypeDefinition<?>> unionTypes = unionType1.getTypes();
+ assertEquals(2, unionTypes.size());
+ assertTrue(unionTypes.get(0) instanceof StringType);
+ assertTrue(unionTypes.get(1) instanceof ExtendedType);
+
+ ExtendedType extendedUnion = (ExtendedType)unionTypes.get(1);
+ ExtendedType extendedUnionBase = (ExtendedType)extendedUnion.getBaseType();
+ assertEquals("my-union", extendedUnionBase.getQName().getLocalName());
+
+ UnionType extendedTargetUnion = (UnionType)extendedUnionBase.getBaseType();
+ List<TypeDefinition<?>> extendedTargetTypes = extendedTargetUnion.getTypes();
+ assertTrue(extendedTargetTypes.get(0) instanceof Int16);
+ assertTrue(extendedTargetTypes.get(1) instanceof Int32);
+
+ Int16 int16 = (Int16) extendedTargetTypes.get(0);
+ List<RangeConstraint> ranges = int16.getRangeStatements();
+ assertEquals(1, ranges.size());
+ RangeConstraint range = ranges.get(0);
+ assertEquals(1L, range.getMin());
+ assertEquals(100L, range.getMax());
+ }
+
+ private Module findModule(Set<Module> modules, String moduleName) {
+ Module result = null;
+ for(Module module : modules) {
+ if(module.getName().equals(moduleName)) {
+ result = module;
+ break;
+ }
+ }
+ return result;
+ }
+
+}
-module types1 {\r
- yang-version 1;\r
- namespace "urn:simple.container.demo";\r
- prefix "t1";\r
- \r
- import types2 {\r
- prefix "data";\r
- revision-date 2013-02-27;\r
- }\r
- \r
- organization "Cisco";\r
- contact "WILL-BE-DEFINED-LATER";\r
- \r
- revision "2013-02-27" {\r
- reference " WILL BE DEFINED LATER";\r
- }\r
- \r
- container interfaces {\r
- list ifEntry {\r
- key "ifIndex";\r
-\r
- leaf ifIndex {\r
- type uint32;\r
- units minutes;\r
- }\r
- \r
- leaf ifMtu {\r
- type int32;\r
- }\r
- }\r
- }\r
- \r
- leaf testleaf {\r
- type data:my-type1 {\r
- range "min..max";\r
- }\r
- }\r
- \r
- leaf test-string-leaf {\r
- type data:my-string-type-ext;\r
- }\r
- \r
- leaf leaf-with-length {\r
- type data:my-string-type {\r
- length "7..max";\r
- }\r
- }\r
- \r
- leaf test-int-leaf {\r
- type data:my-int-type-ext;\r
- }\r
- \r
- leaf test-decimal-leaf {\r
- type data:my-decimal-type {\r
- fraction-digits 4;\r
- }\r
- }\r
- \r
- leaf test-decimal-leaf2 {\r
- type data:my-decimal-type-ext;\r
- }\r
- \r
- container ext {\r
- data:c-define "MY_INTERFACES";\r
- }\r
-\r
-}\r
+module types1 {
+ yang-version 1;
+ namespace "urn:simple.container.demo";
+ prefix "t1";
+
+ import types2 {
+ prefix "data";
+ revision-date 2013-02-27;
+ }
+
+ organization "opendaylight";
+ contact "WILL-BE-DEFINED-LATER";
+
+ revision "2013-02-27" {
+ reference " WILL BE DEFINED LATER";
+ }
+
+ container interfaces {
+ list ifEntry {
+ key "ifIndex";
+
+ leaf ifIndex {
+ type uint32;
+ units minutes;
+ }
+
+ leaf ifMtu {
+ type int32;
+ }
+ }
+ }
+
+ leaf testleaf {
+ type data:my-type1 {
+ range "min..max";
+ }
+ }
+
+ leaf test-string-leaf {
+ type data:my-string-type-ext;
+ }
+
+ leaf leaf-with-length {
+ type data:my-string-type {
+ length "7..max";
+ }
+ }
+
+ leaf test-int-leaf {
+ type data:my-int-type-ext;
+ }
+
+ leaf test-decimal-leaf {
+ type data:my-decimal-type {
+ fraction-digits 4;
+ }
+ }
+
+ leaf test-decimal-leaf2 {
+ type data:my-decimal-type-ext;
+ }
+
+ container ext {
+ data:c-define "MY_INTERFACES";
+ }
+
+ leaf union-leaf {
+ type data:my-union-ext;
+ }
+
+ deviation /data:system/data:user {
+ deviate add {
+ default "admin"; // new users are 'admin' by default
+ }
+ }
+
+ leaf nested-union-leaf {
+ type data:nested-union1;
+ }
+
+}
-module types2 {\r
- yang-version 1;\r
- namespace "urn:simple.types.data.demo";\r
- prefix "t2";\r
- \r
- import types1 {\r
- prefix "if";\r
- revision-date 2013-02-27;\r
- }\r
-\r
- organization "Cisco";\r
- contact "WILL-BE-DEFINED-LATER";\r
- description "This is types-data test description";\r
-\r
- revision "2013-02-27" {\r
- reference " WILL BE DEFINED LATER";\r
- }\r
- \r
- typedef ranges1 {\r
- type int32 {\r
- range "10..20";\r
- }\r
- }\r
- \r
- typedef ranges2 {\r
- type ranges1 {\r
- range "12..max";\r
- }\r
- }\r
- \r
- typedef my-base-int32-type {\r
- type int32 {\r
- range "2..20";\r
- }\r
- }\r
-\r
- typedef my-type1 {\r
- type my-base-int32-type {\r
- range "11..max";\r
- }\r
- }\r
- \r
- typedef my-string-type {\r
- type my-custom-string {\r
- pattern "[a-k]*";\r
- length "5..10";\r
- }\r
- }\r
- \r
- typedef my-custom-string {\r
- type string {\r
- pattern "[a-k]*";\r
- length "5..11";\r
- }\r
- }\r
- \r
- typedef my-string-type2 {\r
- type my-string-type {\r
- pattern "[b-u]*";\r
- }\r
- }\r
- \r
- typedef my-string-type-ext {\r
- type my-string-type2 {\r
- pattern "[e-z]*";\r
- }\r
- }\r
- \r
- typedef my-int-type {\r
- type int32 {\r
- range "10..20";\r
- }\r
- }\r
- \r
- typedef my-int-type2 {\r
- type my-int-type {\r
- range "12..18";\r
- }\r
- }\r
- \r
- typedef my-int-type-ext {\r
- type my-int-type2 {\r
- range "14..16";\r
- }\r
- }\r
- \r
- typedef my-decimal-type {\r
- type decimal64 {\r
- fraction-digits 6;\r
- }\r
- }\r
- \r
- typedef my-decimal-type-ext {\r
- type decimal64 {\r
- fraction-digits 5;\r
- }\r
- }\r
-\r
- augment "/if:interfaces/if:ifEntry" {\r
- when "if:ifType='ds0'";\r
- leaf ds0ChannelNumber {\r
- type string;\r
- }\r
- }\r
-\r
- leaf if-name {\r
- type leafref {\r
- path "/interface/name";\r
- }\r
- }\r
- \r
- leaf name {\r
- type string;\r
- }\r
- \r
- leaf nested-type-leaf {\r
- type my-type1;\r
- }\r
- \r
- extension c-define {\r
- description\r
- "Takes as argument a name string.\r
- Makes the code generator use the given name in the\r
- #define.";\r
- argument "name" {\r
- yin-element "true";\r
- }\r
- }\r
-\r
-}\r
+module types2 {
+ yang-version 1;
+ namespace "urn:simple.types.data.demo";
+ prefix "t2";
+
+ import types1 {
+ prefix "if";
+ revision-date 2013-02-27;
+ }
+
+ organization "opendaylight";
+ contact "WILL-BE-DEFINED-LATER";
+ description "This is types-data test description";
+
+ revision "2013-02-27" {
+ reference " WILL BE DEFINED LATER";
+ }
+
+ typedef my-base-int32-type {
+ type int32 {
+ range "2..20";
+ }
+ }
+
+ typedef my-type1 {
+ type my-base-int32-type {
+ range "11..max";
+ }
+ units "mile";
+ default "11";
+ }
+
+ typedef my-custom-string {
+ type string {
+ pattern "[a-k]*";
+ length "5..11";
+ }
+ }
+
+ typedef my-string-type {
+ type my-custom-string {
+ length "5..10";
+ }
+ }
+
+ typedef my-string-type2 {
+ type my-string-type {
+ pattern "[b-u]*";
+ }
+ }
+
+ typedef my-string-type-ext {
+ type my-string-type2 {
+ pattern "[e-z]*";
+ }
+ }
+
+ typedef my-int-type {
+ type int32 {
+ range "10..20";
+ }
+ }
+
+ typedef my-int-type2 {
+ type my-int-type {
+ range "12..18";
+ }
+ }
+
+ typedef my-int-type-ext {
+ type my-int-type2 {
+ range "14..16";
+ }
+ }
+
+ typedef my-decimal-type {
+ type decimal64 {
+ fraction-digits 6;
+ }
+ }
+
+ typedef my-decimal-type-ext {
+ type decimal64 {
+ fraction-digits 5;
+ }
+ }
+
+ typedef my-union {
+ type union {
+ type int16 {
+ range "1..100";
+ }
+ type int32;
+ }
+ }
+
+ typedef my-union-ext {
+ type my-union;
+ }
+
+ typedef nested-union1 {
+ type nested-union2;
+ }
+
+ typedef nested-union2 {
+ type union {
+ type my-union-ext;
+ type string;
+ }
+ }
+
+ augment "/if:interfaces/if:ifEntry" {
+ when "if:ifType='ds0'";
+ leaf ds0ChannelNumber {
+ type string;
+ }
+ }
+
+ leaf if-name {
+ type leafref {
+ path "/interface/name";
+ }
+ }
+
+ leaf name {
+ type string;
+ }
+
+ leaf nested-type-leaf {
+ type my-type1;
+ }
+
+ extension c-define {
+ description
+ "Takes as argument a name string.
+ Makes the code generator use the given name in the
+ #define.";
+ argument "name" {
+ yin-element "true";
+ }
+ }
+
+ container system {
+ leaf user {
+ type string;
+ }
+ }
+
+}
namespace "urn:simple.container.demo";
prefix "iit";
- organization "Cisco";
+ organization "opendaylight";
contact "WILL-BE-DEFINED-LATER";
+
+
+ typedef access-operations-type {
+ type bits {
+ bit create {
+ description "Any protocol operation that creates a new data node.";
+ }
+ bit read {
+ description "Any protocol operation or notification that returns the value of a data node.";
+ position 500;
+ }
+ bit update {
+ description "Any protocol operation that alters an existing data node.";
+ }
+ bit delete {
+ description "Any protocol operation that removes a data node.";
+ position 365;
+ }
+ bit exec {
+ description "Execution access to the specified protocol operation.";
+ }
+ }
+ description "NETCONF Access Operation.";
+ }
leaf inst-id-leaf1 {
type instance-identifier {
base "crypto-base";
description "crypto-alg description";
}
+
+ leaf mybits {
+ type bits {
+ bit disable-nagle {
+ position 0;
+ }
+ bit auto-sense-speed {
+ position 1;
+ }
+ bit 10-Mb-only {
+ position 2;
+ }
+ }
+ default "auto-sense-speed";
+ }
}
package org.opendaylight.controller.yang.model.api;\r
\r
\r
-public interface TypeDefinition<T extends TypeDefinition<T>> extends SchemaNode {\r
+public interface TypeDefinition<T extends TypeDefinition<?>> extends SchemaNode {\r
\r
T getBaseType();\r
\r
import org.opendaylight.controller.yang.model.api.Status;
import org.opendaylight.controller.yang.model.api.TypeDefinition;
import org.opendaylight.controller.yang.model.api.UnknownSchemaNode;
+import org.opendaylight.controller.yang.model.api.type.LengthConstraint;
+import org.opendaylight.controller.yang.model.api.type.PatternConstraint;
+import org.opendaylight.controller.yang.model.api.type.RangeConstraint;
public class ExtendedType implements TypeDefinition {
private final String reference;
private final List<UnknownSchemaNode> unknownSchemaNodes;
+ private List<RangeConstraint> ranges = Collections.emptyList();
+ private List<LengthConstraint> lengths = Collections.emptyList();
+ private List<PatternConstraint> patterns = Collections.emptyList();
+ private Integer fractionDigits = null;
+
+
private Status status;
private String units;
private Object defaultValue;
private String units = "";
private Object defaultValue = null;
+ private List<RangeConstraint> ranges = Collections.emptyList();
+ private List<LengthConstraint> lengths = Collections.emptyList();
+ private List<PatternConstraint> patterns = Collections.emptyList();
+ private Integer fractionDigits = null;
+
public Builder(final QName typeName, TypeDefinition<?> baseType,
final String description, final String reference) {
this.typeName = typeName;
return this;
}
+ public Builder ranges(final List<RangeConstraint> ranges) {
+ if(ranges != null) {
+ this.ranges = ranges;
+ }
+ return this;
+ }
+
+ public Builder lengths(final List<LengthConstraint> lengths) {
+ if(lengths != null) {
+ this.lengths = lengths;
+ }
+ return this;
+ }
+
+ public Builder patterns(final List<PatternConstraint> patterns) {
+ if(patterns != null) {
+ this.patterns = patterns;
+ }
+ return this;
+ }
+
+ public Builder fractionDigits(final Integer fractionDigits) {
+ this.fractionDigits = fractionDigits;
+ return this;
+ }
+
public ExtendedType build() {
return new ExtendedType(this);
}
this.status = builder.status;
this.units = builder.units;
this.defaultValue = builder.defaultValue;
+
+ this.ranges = builder.ranges;
+ this.lengths = builder.lengths;
+ this.patterns = builder.patterns;
+ this.fractionDigits = builder.fractionDigits;
}
@Override
builder2.append("]");
return builder2.toString();
}
+
+ public List<RangeConstraint> getRanges() {
+ return ranges;
+ }
+
+ public List<LengthConstraint> getLengths() {
+ return lengths;
+ }
+
+ public List<PatternConstraint> getPatterns() {
+ return patterns;
+ }
+
+ public Integer getFractionDigits() {
+ return fractionDigits;
+ }
}
/**\r
* The <code>default</code> implementation of Instance Leafref Type Definition\r
* interface.\r
- * \r
+ *\r
* @see LeafrefTypeDefinition\r
*/\r
public class Leafref implements LeafrefTypeDefinition {\r
private final String units = "";\r
private final LeafrefTypeDefinition baseType;\r
\r
- private Leafref(final RevisionAwareXPath xpath) {\r
+ public Leafref(final RevisionAwareXPath xpath) {\r
this.xpath = xpath;\r
this.path = BaseTypes.schemaPath(name);\r
this.baseType = this;\r
}\r
- \r
+\r
public Leafref(final List<String> actualPath, final URI namespace,\r
final Date revision, final RevisionAwareXPath xpath) {\r
super();\r
this.xpath = xpath;\r
baseType = new Leafref(xpath);\r
}\r
- \r
+\r
public Leafref(final List<String> actualPath, final URI namespace,\r
final Date revision, final LeafrefTypeDefinition baseType,\r
final RevisionAwareXPath xpath) {\r
\r
/*\r
* (non-Javadoc)\r
- * \r
+ *\r
* @see\r
* org.opendaylight.controller.yang.model.api.TypeDefinition#getBaseType()\r
*/\r
\r
/*\r
* (non-Javadoc)\r
- * \r
+ *\r
* @see org.opendaylight.controller.yang.model.api.TypeDefinition#getUnits()\r
*/\r
@Override\r
\r
/*\r
* (non-Javadoc)\r
- * \r
+ *\r
* @see\r
* org.opendaylight.controller.yang.model.api.TypeDefinition#getDefaultValue\r
* ()\r
\r
/*\r
* (non-Javadoc)\r
- * \r
+ *\r
* @see org.opendaylight.controller.yang.model.api.SchemaNode#getQName()\r
*/\r
@Override\r
\r
/*\r
* (non-Javadoc)\r
- * \r
+ *\r
* @see org.opendaylight.controller.yang.model.api.SchemaNode#getPath()\r
*/\r
@Override\r
\r
/*\r
* (non-Javadoc)\r
- * \r
+ *\r
* @see\r
* org.opendaylight.controller.yang.model.api.SchemaNode#getDescription()\r
*/\r
\r
/*\r
* (non-Javadoc)\r
- * \r
+ *\r
* @see org.opendaylight.controller.yang.model.api.SchemaNode#getReference()\r
*/\r
@Override\r
\r
/*\r
* (non-Javadoc)\r
- * \r
+ *\r
* @see org.opendaylight.controller.yang.model.api.SchemaNode#getStatus()\r
*/\r
@Override\r
\r
/*\r
* (non-Javadoc)\r
- * \r
+ *\r
* @see\r
* org.opendaylight.controller.yang.model.api.SchemaNode#getExtensionSchemaNodes\r
* ()\r
\r
/*\r
* (non-Javadoc)\r
- * \r
+ *\r
* @see\r
* org.opendaylight.controller.yang.model.api.type.LeafrefTypeDefinition\r
* #getPathStatement()\r
package org.opendaylight.controller.usermanager.internal;
+import org.opendaylight.controller.sal.utils.Status;
/**
* Configuration Java Object which represents a Local configured Authorization
* for a remote authenticated user for User Manager.
*/
public class AuthorizationConfig extends UserConfig {
- private static final long serialVersionUID = 1L;
-
- public AuthorizationConfig() {
- super();
- }
-
- // Constructor may be needed for autocontainer logic
- public AuthorizationConfig(String user, String role) {
- super();
- this.user = user;
- this.role = role;
- }
-
- @Override
- public boolean isValid() {
- return (user != null && !user.isEmpty() && role != null && !role
- .isEmpty());
- }
-
- public String getRolesData() {
- return (role.replace(",", " "));
- }
-
- public String toString() {
- return "AuthorizationConfig=[user: " + user + ", role: " + role + "]";
- }
+ private static final long serialVersionUID = 1L;
+
+ public AuthorizationConfig() {
+ super();
+ }
+
+ // Constructor may be needed for autocontainer logic
+ public AuthorizationConfig(String user, String role) {
+ super();
+ this.user = user;
+ this.role = role;
+ }
+
+ @Override
+ public Status validate() {
+ return (!isRoleValid().isSuccess() ? isRoleValid() : isUsernameValid());
+ }
+
+ public String getRolesData() {
+ return (role.replace(",", " "));
+ }
+
+ public String toString() {
+ return "AuthorizationConfig=[user: " + user + ", role: " + role + "]";
+ }
}
package org.opendaylight.controller.usermanager.internal;
import java.io.Serializable;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.opendaylight.controller.sal.authorization.AuthResultEnum;
+import org.opendaylight.controller.sal.utils.Status;
+import org.opendaylight.controller.sal.utils.StatusCode;
import org.opendaylight.controller.usermanager.AuthResponse;
/**
- * Configuration Java Object which represents a Local AAA user
- * configuration information for User Manager.
+ * Configuration Java Object which represents a Local AAA user configuration
+ * information for User Manager.
*/
public class UserConfig implements Serializable {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 1L;
- /*
- * Clear text password as we are moving to some MD5 digest
- * for when saving configurations
- */
- protected String user;
- protected String role;
- private String password;
+ /*
+ * Clear text password as we are moving to some MD5 digest for when saving
+ * configurations
+ */
+ protected String user;
+ protected String role;
+ private String password;
+ private static final int USERNAME_MAXLENGTH = 32;
+ private static final int PASSWORD_MINLENGTH = 5;
+ private static final int PASSWORD_MAXLENGTH = 256;
+ private static final Pattern INVALID_USERNAME_CHARACTERS = Pattern
+ .compile("([/\\s\\.\\?#%;\\\\]+)");
- public UserConfig() {
- }
+ public UserConfig() {
+ }
- public UserConfig(String user, String password, String role) {
- this.user = user;
- this.password = password;
- this.role = role;
- }
+ public UserConfig(String user, String password, String role) {
+ this.user = user;
+ this.password = password;
+ this.role = role;
+ }
- public String getUser() {
- return user;
- }
+ public String getUser() {
+ return user;
+ }
- public String getPassword() {
- return password;
- }
+ public String getPassword() {
+ return password;
+ }
- public String getRole() {
- return role;
- }
+ public String getRole() {
+ return role;
+ }
@Override
public int hashCode() {
public boolean equals(Object obj) {
return EqualsBuilder.reflectionEquals(this, obj);
}
-
+
@Override
public String toString() {
- return "UserConfig[user="+ user + ", password=" + password + "]";
+ return "UserConfig[user=" + user + ", password=" + password + "]";
+ }
+
+ public Status validate() {
+ Status validCheck = new Status(StatusCode.SUCCESS, null);
+ validCheck = isRoleValid();
+
+ if (validCheck.isSuccess())
+ validCheck = isUsernameValid();
+ if (validCheck.isSuccess())
+ validCheck = isPasswordValid();
+
+ return validCheck;
+ }
+
+ protected Status isUsernameValid() {
+ if (user == null || user.isEmpty()) {
+ return new Status(StatusCode.BADREQUEST, "Username cannot be empty");
+ }
+
+ Matcher mUser = UserConfig.INVALID_USERNAME_CHARACTERS.matcher(user);
+ if (user.length() > UserConfig.USERNAME_MAXLENGTH
+ || mUser.find() == true) {
+ return new Status(StatusCode.BADREQUEST,
+ "Username can have 1-32 non-whitespace "
+ + "alphanumeric characters and any special "
+ + "characters except ./#%;?\\");
+ }
+
+ return new Status(StatusCode.SUCCESS, null);
+ }
+
+ private Status isPasswordValid() {
+ if (password == null || password.isEmpty()) {
+ return new Status(StatusCode.BADREQUEST, "Password cannot be empty");
+ }
+
+ if (password.length() < UserConfig.PASSWORD_MINLENGTH
+ || password.length() > UserConfig.PASSWORD_MAXLENGTH) {
+ return new Status(StatusCode.BADREQUEST,
+ "Password should have 5-256 characters");
+ }
+ return new Status(StatusCode.SUCCESS, null);
}
- public boolean isValid() {
- return (user != null && !user.isEmpty() && role != null
- && !role.isEmpty() && password != null && !password.isEmpty());
- }
-
- public boolean update(String currentPassword, String newPassword,
- String newRole) {
- // To make any changes to a user configured profile, current password
- // must always be provided
- if (!this.password.equals(currentPassword)) {
- return false;
- }
- if (newPassword != null) {
- this.password = newPassword;
- }
- if (newRole != null) {
- this.role = newRole;
- }
- return true;
- }
-
- public AuthResponse authenticate(String clearTextPass) {
- AuthResponse locResponse = new AuthResponse();
- if (password.equals(clearTextPass)) {
- locResponse.setStatus(AuthResultEnum.AUTH_ACCEPT_LOC);
- locResponse.addData(role.replace(",", " "));
- } else {
- locResponse.setStatus(AuthResultEnum.AUTH_REJECT_LOC);
- }
- return locResponse;
- }
+ protected Status isRoleValid() {
+ if (role == null || role.isEmpty()) {
+ return new Status(StatusCode.BADREQUEST,
+ "Role name cannot be empty");
+ }
+ return new Status(StatusCode.SUCCESS, null);
+ }
+
+ public boolean update(String currentPassword, String newPassword,
+ String newRole) {
+ // To make any changes to a user configured profile, current password
+ // must always be provided
+ if (!this.password.equals(currentPassword)) {
+ return false;
+ }
+ if (newPassword != null) {
+ this.password = newPassword;
+ }
+ if (newRole != null) {
+ this.role = newRole;
+ }
+ return true;
+ }
+
+ public AuthResponse authenticate(String clearTextPass) {
+ AuthResponse locResponse = new AuthResponse();
+ if (password.equals(clearTextPass)) {
+ locResponse.setStatus(AuthResultEnum.AUTH_ACCEPT_LOC);
+ locResponse.addData(role.replace(",", " "));
+ } else {
+ locResponse.setStatus(AuthResultEnum.AUTH_REJECT_LOC);
+ }
+ return locResponse;
+ }
}
private static final String authFileName = ROOT + "authorization.conf";
private ConcurrentMap<String, UserConfig> localUserConfigList;
private ConcurrentMap<String, ServerConfig> remoteServerConfigList;
- private ConcurrentMap<String, AuthorizationConfig> authorizationConfList; // local authorization info for remotely authenticated users
+ private ConcurrentMap<String, AuthorizationConfig> authorizationConfList; // local
+ // authorization
+ // info
+ // for
+ // remotely
+ // authenticated
+ // users
private ConcurrentMap<String, AuthenticatedUser> activeUsers;
private ConcurrentMap<String, IAAAProvider> authProviders;
private ConcurrentMap<Long, String> localUserListSaveConfigEvent,
private ISessionManager sessionMgr = new SessionManager();
public boolean addAAAProvider(IAAAProvider provider) {
- if (provider == null
- || provider.getName() == null
+ if (provider == null || provider.getName() == null
|| provider.getName().trim().isEmpty()) {
return false;
}
this.applicationAuthorizationClients = Collections
.synchronizedSet(new HashSet<IResourceAuthorization>());
if (clusterGlobalService == null) {
- logger
- .error("un-initialized clusterGlobalService, can't create cache");
+ logger.error("un-initialized clusterGlobalService, can't create cache");
return;
}
EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
clusterGlobalService.createCache(
- "usermanager.remoteServerConfigList", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ "usermanager.remoteServerConfigList",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
clusterGlobalService.createCache(
- "usermanager.authorizationConfList", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ "usermanager.authorizationConfList",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
- clusterGlobalService.createCache("usermanager.activeUsers", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ clusterGlobalService.createCache("usermanager.activeUsers",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
clusterGlobalService.createCache(
- "usermanager.localUserSaveConfigEvent", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ "usermanager.localUserSaveConfigEvent",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
clusterGlobalService.createCache(
- "usermanager.remoteServerSaveConfigEvent", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ "usermanager.remoteServerSaveConfigEvent",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
clusterGlobalService.createCache(
- "usermanager.authorizationSaveConfigEvent", EnumSet
- .of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
+ "usermanager.authorizationSaveConfigEvent",
+ EnumSet.of(IClusterServices.cacheMode.NON_TRANSACTIONAL));
} catch (CacheConfigException cce) {
logger.error("\nCache configuration invalid - check cache mode");
} catch (CacheExistException ce) {
- logger
- .error("\nCache already exits - destroy and recreate if needed");
+ logger.error("\nCache already exits - destroy and recreate if needed");
}
}
- @SuppressWarnings( { "unchecked", "deprecation" })
+ @SuppressWarnings({ "unchecked", "deprecation" })
private void retrieveCaches() {
if (clusterGlobalService == null) {
logger.error("un-initialized clusterService, can't retrieve cache");
remoteServerSaveConfigEvent = (ConcurrentMap<Long, String>) clusterGlobalService
.getCache("usermanager.remoteServerSaveConfigEvent");
if (remoteServerSaveConfigEvent == null) {
- logger
- .error("\nFailed to get cache for remoteServerSaveConfigEvent");
+ logger.error("\nFailed to get cache for remoteServerSaveConfigEvent");
}
authorizationSaveConfigEvent = (ConcurrentMap<Long, String>) clusterGlobalService
.getCache("usermanager.authorizationSaveConfigEvent");
if (authorizationSaveConfigEvent == null) {
- logger
- .error("\nFailed to get cache for authorizationSaveConfigEvent");
+ logger.error("\nFailed to get cache for authorizationSaveConfigEvent");
}
}
private void loadConfigurations() {
- // To encode and decode user and server configuration objects
- loadSecurityKeys();
-
+ // To encode and decode user and server configuration objects
+ loadSecurityKeys();
+
/*
- * Do not load local startup file if we already got the
- * configurations synced from another cluster node
+ * Do not load local startup file if we already got the configurations
+ * synced from another cluster node
*/
if (localUserConfigList.isEmpty()) {
loadUserConfig();
}
private void loadSecurityKeys() {
-
- }
- private void checkDefaultNetworkAdmin() {
- // If startup config is not there, it's old or it was deleted,
- // need to add Default Admin
+ }
+
+ private void checkDefaultNetworkAdmin() {
+ // If startup config is not there, it's old or it was deleted,
+ // need to add Default Admin
if (!localUserConfigList.containsKey(defaultAdmin)) {
- localUserConfigList.put(defaultAdmin,
- new UserConfig(defaultAdmin,
- defaultAdminPassword,
- defaultAdminRole));
+ localUserConfigList.put(defaultAdmin, new UserConfig(defaultAdmin,
+ defaultAdminPassword, defaultAdminRole));
}
}
rcResponse = aaaClient.authService(userName, password,
aaaServer.getAddress(), aaaServer.getSecret());
if (rcResponse.getStatus() == AuthResultEnum.AUTH_ACCEPT) {
- logger
- .info(
- "Remote Authentication Succeeded for User: \"{}\", by Server: {}",
- userName, aaaServer.getAddress());
+ logger.info(
+ "Remote Authentication Succeeded for User: \"{}\", by Server: {}",
+ userName, aaaServer.getAddress());
remotelyAuthenticated = true;
break;
} else if (rcResponse.getStatus() == AuthResultEnum.AUTH_REJECT) {
}
rcResponse = localUser.authenticate(password);
if (rcResponse.getStatus() != AuthResultEnum.AUTH_ACCEPT_LOC) {
- logger.info("Local Authentication Failed for User: \"{}\", Reason: {}",
- userName, rcResponse.getStatus().toString());
-
+ logger.info(
+ "Local Authentication Failed for User: \"{}\", Reason: {}",
+ userName, rcResponse.getStatus().toString());
+
return (rcResponse.getStatus());
}
logger.info("Local Authentication Succeeded for User: \"{}\"",
result = new AuthenticatedUser(userName);
/*
- * Extract attributes from response
- * All the information we are interested in is in the first Cisco VSA (vendor specific attribute).
+ * Extract attributes from response All the information we are
+ * interested in is in the first Cisco VSA (vendor specific attribute).
* Just process the first VSA and return
*/
String attributes = (rcResponse.getData() != null && !rcResponse
authorizationInfoIsPresent = checkAuthorizationInfo(attributes);
/*
- * The AAA server was only used to perform the authentication
- * Look for locally stored authorization info for this user
- * If found, add the data to the rcResponse
+ * The AAA server was only used to perform the authentication Look for
+ * locally stored authorization info for this user If found, add the
+ * data to the rcResponse
*/
if (remotelyAuthenticated && !authorizationInfoIsPresent) {
- logger
- .info(
- "No Remote Authorization Info provided by Server for User: \"{}\"",
- userName);
+ logger.info(
+ "No Remote Authorization Info provided by Server for User: \"{}\"",
+ userName);
logger.info(
"Looking for Local Authorization Info for User: \"{}\"",
userName);
}
/*
- * Common response parsing for local & remote authenticated user
- * Looking for authorized resources, detecting attributes' validity
+ * Common response parsing for local & remote authenticated user Looking
+ * for authorized resources, detecting attributes' validity
*/
if (authorizationInfoIsPresent) {
- // Identifying the administrative role
+ // Identifying the administrative role
adminRoles = attributes.split(" ");
result.setRoleList(adminRoles);
authorized = true;
return rcResponse.getStatus();
}
- // Check in the attributes string whether or not authorization information is present
+ // Check in the attributes string whether or not authorization information
+ // is present
private boolean checkAuthorizationInfo(String attributes) {
return (attributes != null && !attributes.isEmpty());
}
private void removeUserFromActiveList(String user) {
if (!activeUsers.containsKey(user)) {
- // as cookie persists in cache, we can get logout for unexisting active users
+ // as cookie persists in cache, we can get logout for unexisting
+ // active users
return;
}
activeUsers.remove(user);
@Override
public Object readObject(ObjectInputStream ois)
throws FileNotFoundException, IOException, ClassNotFoundException {
- // Perform the class deserialization locally, from inside the package where the class is defined
+ // Perform the class deserialization locally, from inside the package
+ // where the class is defined
return ois.readObject();
}
* Interaction with GUI START
*/
public Status addRemoveLocalUser(UserConfig AAAconf, boolean delete) {
- // Validation check
- if (!AAAconf.isValid()) {
- String msg = "Invalid Local User configuration";
- logger.warn(msg);
- return new Status(StatusCode.BADREQUEST, msg);
+ // UserConfig Validation check
+ Status validCheck = AAAconf.validate();
+ if (!validCheck.isSuccess()) {
+ return validCheck;
}
// Update Config database
if (delete) {
- if (AAAconf.getUser().equals(UserManagerImpl.defaultAdmin)) {
- String msg = "Invalid Request: Default Network Admin User " +
- "cannot be deleted";
- logger.debug(msg);
- return new Status(StatusCode.NOTALLOWED, msg);
- }
+ if (AAAconf.getUser().equals(UserManagerImpl.defaultAdmin)) {
+ String msg = "Invalid Request: Default Network Admin User "
+ + "cannot be deleted";
+ logger.debug(msg);
+ return new Status(StatusCode.NOTALLOWED, msg);
+ }
localUserConfigList.remove(AAAconf.getUser());
} else {
- if (AAAconf.getUser().equals(UserManagerImpl.defaultAdmin)) {
- String msg = "Invalid Request: Default Network Admin User " +
- "cannot be added";
- logger.debug(msg);
- return new Status(StatusCode.NOTALLOWED, msg);
- }
+ if (AAAconf.getUser().equals(UserManagerImpl.defaultAdmin)) {
+ String msg = "Invalid Request: Default Network Admin User "
+ + "cannot be added";
+ logger.debug(msg);
+ return new Status(StatusCode.NOTALLOWED, msg);
+ }
localUserConfigList.put(AAAconf.getUser(), AAAconf);
}
private Status addRemoveAAAServer(ServerConfig AAAconf, boolean delete) {
// Validation check
if (!AAAconf.isValid()) {
- String msg = "Invalid Server configuration";
+ String msg = "Invalid Server configuration";
logger.warn(msg);
return new Status(StatusCode.BADREQUEST, msg);
}
return new Status(StatusCode.SUCCESS, null);
}
- private Status addRemoveAuthInfo(AuthorizationConfig AAAconf,
- boolean delete) {
- if (!AAAconf.isValid()) {
- String msg = "Invalid Authorization configuration";
+ private Status addRemoveAuthInfo(AuthorizationConfig AAAconf, boolean delete) {
+ Status configCheck = AAAconf.validate();
+ if (!configCheck.isSuccess()) {
+ String msg = "Invalid Authorization configuration: "
+ + configCheck.getDescription();
logger.warn(msg);
return new Status(StatusCode.BADREQUEST, msg);
}
@Override
public Status removeLocalUser(String userName) {
- if (userName == null || userName.trim().isEmpty()) {
- return new Status(StatusCode.BADREQUEST, "Invalid user name");
- }
- if (!localUserConfigList.containsKey(userName)) {
- return new Status(StatusCode.NOTFOUND, "User does not exist");
- }
+ if (userName == null || userName.trim().isEmpty()) {
+ return new Status(StatusCode.BADREQUEST, "Invalid user name");
+ }
+ if (!localUserConfigList.containsKey(userName)) {
+ return new Status(StatusCode.NOTFOUND, "User does not exist");
+ }
return addRemoveLocalUser(localUserConfigList.get(userName), true);
}
+
@Override
public Status addAAAServer(ServerConfig AAAconf) {
return addRemoveAAAServer(AAAconf, false);
@Override
public List<AuthorizationConfig> getAuthorizationList() {
- return new ArrayList<AuthorizationConfig>(authorizationConfList
- .values());
+ return new ArrayList<AuthorizationConfig>(
+ authorizationConfList.values());
}
@Override
// update configuration entry
targetConfigEntry = localUserConfigList.get(user);
if (targetConfigEntry == null) {
- return new Status(StatusCode.NOTFOUND, "User not found");
+ return new Status(StatusCode.NOTFOUND, "User not found");
}
if (false == targetConfigEntry.update(curPassword, newPassword, null)) {
- return new Status(StatusCode.BADREQUEST, "Current password is incorrect");
+ return new Status(StatusCode.BADREQUEST,
+ "Current password is incorrect");
}
- localUserConfigList.put(user, targetConfigEntry); // trigger cluster update
+ localUserConfigList.put(user, targetConfigEntry); // trigger cluster
+ // update
logger.info("Password changed for User \"{}\"", user);
@Override
public void userLogout(String userName) {
- // TODO: if user was authenticated through AAA server, send Acct-Status-Type=stop message to server with logout as reason
+ // TODO: if user was authenticated through AAA server, send
+ // Acct-Status-Type=stop message to server with logout as reason
removeUserFromActiveList(userName);
logger.info("User \"{}\" logged out", userName);
}
*/
@Override
public void userTimedOut(String userName) {
- // TODO: if user was authenticated through AAA server, send Acct-Status-Type=stop message to server with timeout as reason
+ // TODO: if user was authenticated through AAA server, send
+ // Acct-Status-Type=stop message to server with timeout as reason
removeUserFromActiveList(userName);
logger.info("User \"{}\" timed out", userName);
}
ci.println(conf.getUser() + " " + conf.getRole());
}
}
-
- public void _addAAAServer (CommandInterpreter ci) {
+
+ public void _addAAAServer(CommandInterpreter ci) {
String server = ci.nextArgument();
String secret = ci.nextArgument();
String protocol = ci.nextArgument();
-
+
if (server == null || secret == null || protocol == null) {
- ci.println("Usage : addAAAServer <server> <secret> <protocol>");
- return;
+ ci.println("Usage : addAAAServer <server> <secret> <protocol>");
+ return;
}
ServerConfig s = new ServerConfig(server, secret, protocol);
addAAAServer(s);
}
-
- public void _removeAAAServer (CommandInterpreter ci) {
+
+ public void _removeAAAServer(CommandInterpreter ci) {
String server = ci.nextArgument();
String secret = ci.nextArgument();
String protocol = ci.nextArgument();
-
+
if (server == null || secret == null || protocol == null) {
- ci.println("Usage : addAAAServer <server> <secret> <protocol>");
- return;
+ ci.println("Usage : addAAAServer <server> <secret> <protocol>");
+ return;
}
ServerConfig s = new ServerConfig(server, secret, protocol);
removeAAAServer(s);
}
- public void _printAAAServers (CommandInterpreter ci) {
+ public void _printAAAServers(CommandInterpreter ci) {
for (ServerConfig aaaServer : remoteServerConfigList.values()) {
String protocol = aaaServer.getProtocol();
ci.println(aaaServer.getAddress() + "-" + aaaServer.getProtocol());
/**
* Function called by the dependency manager when all the required
* dependencies are satisfied
- *
+ *
*/
void init() {
}
/**
- * Function called by the dependency manager when at least one
- * dependency become unsatisfied or when the component is shutting
- * down because for example bundle is being stopped.
- *
+ * Function called by the dependency manager when at least one dependency
+ * become unsatisfied or when the component is shutting down because for
+ * example bundle is being stopped.
+ *
*/
void destroy() {
}
/**
- * Function called by dependency manager after "init ()" is called
- * and after the services provided by the class are registered in
- * the service registry
- *
+ * Function called by dependency manager after "init ()" is called and after
+ * the services provided by the class are registered in the service registry
+ *
*/
void start() {
authProviders = new ConcurrentHashMap<String, IAAAProvider>();
}
/**
- * Function called by the dependency manager before the services
- * exported by the component are unregistered, this will be
- * followed by a "destroy ()" calls
- *
+ * Function called by the dependency manager before the services exported by
+ * the component are unregistered, this will be followed by a "destroy ()"
+ * calls
+ *
*/
void stop() {
}
@Override
public UserLevel getUserLevel(String username) {
// Returns the controller well-know user level for the passed user
- String roleName = null;
+ String roleName = null;
- // First check in active users then in local configured users
+ // First check in active users then in local configured users
if (activeUsers.containsKey(username)) {
- roleName = activeUsers.get(username).getUserRoles().get(0);
+ roleName = activeUsers.get(username).getUserRoles().get(0);
} else if (localUserConfigList.containsKey(username)) {
- roleName = localUserConfigList.get(username).getRole();
+ roleName = localUserConfigList.get(username).getRole();
}
-
+
if (roleName == null) {
- return UserLevel.NOUSER;
+ return UserLevel.NOUSER;
}
-
+
// For now only one role per user is allowed
if (roleName.equals(UserLevel.SYSTEMADMIN.toString())) {
return UserLevel.SYSTEMADMIN;
}
return new Status(StatusCode.INTERNALERROR,
- "Failed to save user configurations");
+ "Failed to save user configurations");
}
@Override
return new User(username, localUserConfigList.get(username)
.getPassword(), enabled, accountNonExpired,
- credentialsNonExpired, accountNonLocked, user
- .getGrantedAuthorities(getUserLevel(username)));
+ credentialsNonExpired, accountNonLocked,
+ user.getGrantedAuthorities(getUserLevel(username)));
} else
throw new UsernameNotFoundException("User not found " + username);
}
"Username or credentials did not match");
}
- AuthResultEnum result = authenticate((String) authentication
- .getPrincipal(), (String) authentication.getCredentials());
+ AuthResultEnum result = authenticate(
+ (String) authentication.getPrincipal(),
+ (String) authentication.getCredentials());
if (result.equals(AuthResultEnum.AUTHOR_PASS)
|| result.equals(AuthResultEnum.AUTH_ACCEPT_LOC)
|| result.equals(AuthResultEnum.AUTH_ACCEPT)) {
}
authentication = new UsernamePasswordAuthenticationToken(
- authentication.getPrincipal(), authentication
- .getCredentials(), user
- .getGrantedAuthorities(getUserLevel(authentication
- .getName())));
+ authentication.getPrincipal(),
+ authentication.getCredentials(),
+ user.getGrantedAuthorities(getUserLevel(authentication
+ .getName())));
return authentication;
} else
}
- //following are setters for use in unit testing
+ // following are setters for use in unit testing
void setLocalUserConfigList(ConcurrentMap<String, UserConfig> ucl) {
- if (ucl != null) { this.localUserConfigList = ucl; }
+ if (ucl != null) {
+ this.localUserConfigList = ucl;
+ }
}
- void setRemoteServerConfigList (ConcurrentMap<String, ServerConfig> scl) {
- if (scl != null) { this.remoteServerConfigList = scl; }
+
+ void setRemoteServerConfigList(ConcurrentMap<String, ServerConfig> scl) {
+ if (scl != null) {
+ this.remoteServerConfigList = scl;
+ }
}
- void setAuthorizationConfList (ConcurrentMap<String, AuthorizationConfig> acl) {
- if (acl != null) { this.authorizationConfList = acl; }
+
+ void setAuthorizationConfList(ConcurrentMap<String, AuthorizationConfig> acl) {
+ if (acl != null) {
+ this.authorizationConfList = acl;
+ }
}
- void setActiveUsers (ConcurrentMap<String, AuthenticatedUser> au) {
- if (au != null) { this.activeUsers = au; }
+
+ void setActiveUsers(ConcurrentMap<String, AuthenticatedUser> au) {
+ if (au != null) {
+ this.activeUsers = au;
+ }
}
- void setAuthProviders(ConcurrentMap<String, IAAAProvider> ap ) {
- if (ap != null){
+
+ void setAuthProviders(ConcurrentMap<String, IAAAProvider> ap) {
+ if (ap != null) {
this.authProviders = ap;
}
}
-
+
@Override
public ISessionManager getSessionManager() {
return this.sessionMgr;
}
-
+
public void setSessionMgr(ISessionManager sessionMgr) {
this.sessionMgr = sessionMgr;
}
-
+
public String getPassword(String username) {
return localUserConfigList.get(username).getPassword();
}
*/
public class AuthorizationUserConfigTest {
- @Test
- public void AuthorizationConfigTest() {
- AuthorizationConfig authConfig;
-
- // test isValid
- authConfig = new AuthorizationConfig(null,
- UserLevel.SYSTEMADMIN.toString());
- assertFalse(authConfig.isValid());
- authConfig = new AuthorizationConfig("admin", "");
- assertFalse(authConfig.isValid());
- authConfig = new AuthorizationConfig("admin",
- UserLevel.SYSTEMADMIN.toString());
- assertTrue(authConfig.isValid());
- }
-
- @Test
- public void UserConfigTest() {
- UserConfig userConfig;
-
- userConfig = new UserConfig(null, "cisco",
- UserLevel.NETWORKOPERATOR.toString());
- assertFalse(userConfig.isValid());
-
- userConfig = new UserConfig("uname", "", "cisco");
- assertFalse(userConfig.isValid());
-
- userConfig = new UserConfig("uname", "ciscocisco",
- UserLevel.NETWORKOPERATOR.toString());
- assertTrue(userConfig.isValid());
-
- /* currentPassword mismatch */
- assertFalse(userConfig.update("Cisco", "cisco123",
- UserLevel.NETWORKOPERATOR.toString()));
-
- assertTrue(userConfig.update("ciscocisco", null,
- UserLevel.NETWORKOPERATOR.toString()));
- /* New Password = null, No change in password */
- assertTrue(userConfig.getPassword().equals("ciscocisco"));
-
- /* Password changed successfully, no change in user role */
- assertTrue(userConfig.update("ciscocisco", "cisco123",
- UserLevel.NETWORKOPERATOR.toString()));
- assertTrue(userConfig.getPassword().equals("cisco123"));
- assertTrue(userConfig.getRole().equals(
- UserLevel.NETWORKOPERATOR.toString()));
-
- /* Password not changed, role changed successfully */
- assertTrue(userConfig.update("cisco123", "cisco123",
- UserLevel.SYSTEMADMIN.toString()));
- assertTrue(userConfig.getPassword().equals("cisco123"));
- assertTrue(userConfig.getRole()
- .equals(UserLevel.SYSTEMADMIN.toString()));
-
- /* Password and role changed successfully */
- assertTrue(userConfig.update("cisco123", "ciscocisco",
- UserLevel.SYSTEMADMIN.toString()));
- assertTrue(userConfig.getPassword().equals("ciscocisco"));
- assertTrue(userConfig.getRole()
- .equals(UserLevel.SYSTEMADMIN.toString()));
-
- String username = userConfig.getUser();
- assertTrue(username.equals("uname"));
-
- // test authenticate
- AuthResponse authresp = userConfig.authenticate("ciscocisco");
- assertTrue(authresp.getStatus().equals(AuthResultEnum.AUTH_ACCEPT_LOC));
- authresp = userConfig.authenticate("wrongPassword");
- assertTrue(authresp.getStatus().equals(AuthResultEnum.AUTH_REJECT_LOC));
-
- // test equals()
- userConfig = new UserConfig("uname", "ciscocisco",
- UserLevel.NETWORKOPERATOR.toString());
- assertEquals(userConfig, userConfig);
- UserConfig userConfig2 = new UserConfig("uname",
- "ciscocisco",
- UserLevel.NETWORKOPERATOR.toString());
- assertEquals(userConfig, userConfig2);
- }
+ @Test
+ public void AuthorizationConfigTest() {
+ AuthorizationConfig authConfig;
+
+ // test isValid
+ authConfig = new AuthorizationConfig(null,
+ UserLevel.SYSTEMADMIN.toString());
+ assertFalse(authConfig.validate().isSuccess());
+ authConfig = new AuthorizationConfig("admin", "");
+ assertFalse(authConfig.validate().isSuccess());
+ authConfig = new AuthorizationConfig("admin",
+ UserLevel.SYSTEMADMIN.toString());
+ assertTrue(authConfig.validate().isSuccess());
+ }
+
+ @Test
+ public void UserConfigTest() {
+ UserConfig userConfig;
+
+ userConfig = new UserConfig(null, "cisco",
+ UserLevel.NETWORKOPERATOR.toString());
+ assertFalse(userConfig.validate().isSuccess());
+
+ userConfig = new UserConfig("uname", "", "cisco");
+ assertFalse(userConfig.validate().isSuccess());
+
+ userConfig = new UserConfig("uname", "ciscocisco",
+ UserLevel.NETWORKOPERATOR.toString());
+ assertTrue(userConfig.validate().isSuccess());
+
+ /* currentPassword mismatch */
+ assertFalse(userConfig.update("Cisco", "cisco123",
+ UserLevel.NETWORKOPERATOR.toString()));
+
+ assertTrue(userConfig.update("ciscocisco", null,
+ UserLevel.NETWORKOPERATOR.toString()));
+ /* New Password = null, No change in password */
+ assertTrue(userConfig.getPassword().equals("ciscocisco"));
+
+ /* Password changed successfully, no change in user role */
+ assertTrue(userConfig.update("ciscocisco", "cisco123",
+ UserLevel.NETWORKOPERATOR.toString()));
+ assertTrue(userConfig.getPassword().equals("cisco123"));
+ assertTrue(userConfig.getRole().equals(
+ UserLevel.NETWORKOPERATOR.toString()));
+
+ /* Password not changed, role changed successfully */
+ assertTrue(userConfig.update("cisco123", "cisco123",
+ UserLevel.SYSTEMADMIN.toString()));
+ assertTrue(userConfig.getPassword().equals("cisco123"));
+ assertTrue(userConfig.getRole()
+ .equals(UserLevel.SYSTEMADMIN.toString()));
+
+ /* Password and role changed successfully */
+ assertTrue(userConfig.update("cisco123", "ciscocisco",
+ UserLevel.SYSTEMADMIN.toString()));
+ assertTrue(userConfig.getPassword().equals("ciscocisco"));
+ assertTrue(userConfig.getRole()
+ .equals(UserLevel.SYSTEMADMIN.toString()));
+
+ String username = userConfig.getUser();
+ assertTrue(username.equals("uname"));
+
+ // test authenticate
+ AuthResponse authresp = userConfig.authenticate("ciscocisco");
+ assertTrue(authresp.getStatus().equals(AuthResultEnum.AUTH_ACCEPT_LOC));
+ authresp = userConfig.authenticate("wrongPassword");
+ assertTrue(authresp.getStatus().equals(AuthResultEnum.AUTH_REJECT_LOC));
+
+ // test equals()
+ userConfig = new UserConfig("uname", "ciscocisco",
+ UserLevel.NETWORKOPERATOR.toString());
+ assertEquals(userConfig, userConfig);
+ UserConfig userConfig2 = new UserConfig("uname", "ciscocisco",
+ UserLevel.NETWORKOPERATOR.toString());
+ assertEquals(userConfig, userConfig2);
+ }
}
return $table;
},
validateName: function(name) {
- return name.match(/^[a-zA-Z0-9][a-zA-Z0-9_\-\.]{1,31}$/g) == null ? false : true;
+ return (name.length < 256);
}
};
var result = {};
result['nodeName'] = $('#' + one.f.switchmanager.nodesLearnt.id.modal.form.nodeName, $modal).val();
if(!one.f.switchmanager.validateName(result['nodeName'])) {
- alert("Node name can contain alphabets numbers and characters _ - . upto 32 characters in length");
+ alert("Node name can contain upto 255 characters");
return;
}
result['nodeId'] = $('#' + one.f.switchmanager.nodesLearnt.id.modal.form.nodeId, $modal).val();
var result = {};
result['gatewayName'] = $('#' + one.f.switchmanager.subnetGatewayConfig.id.modal.form.name, $modal).val();
if(!one.f.switchmanager.validateName(result['gatewayName'])) {
- alert("Gateway name can contain alphabets numbers and characters _ - . upto 32 characters in length");
+ alert("Gateway name can contain upto 255 characters");
return;
}
result['gatewayIPAddress'] = $('#' + one.f.switchmanager.subnetGatewayConfig.id.modal.form.gatewayIPAddress, $modal).val();