From 7ef28e701bb30353fe438e97c635e05130f23b84 Mon Sep 17 00:00:00 2001 From: Jozef Behran Date: Wed, 13 Jan 2016 10:07:47 +0100 Subject: [PATCH] Updated code to match new rules Here are the components of this change: https://git.opendaylight.org/gerrit/32476 https://git.opendaylight.org/gerrit/32477 https://git.opendaylight.org/gerrit/32479 https://git.opendaylight.org/gerrit/32499 https://git.opendaylight.org/gerrit/32494 https://git.opendaylight.org/gerrit/32481 https://git.opendaylight.org/gerrit/32482 https://git.opendaylight.org/gerrit/32483 https://git.opendaylight.org/gerrit/32487 https://git.opendaylight.org/gerrit/32488 Change-Id: I8a9b019cf910678ef2e601a4dbc499976e2a7590 Signed-off-by: Jozef Behran --- csit/libraries/AuthStandalone.py | 8 +- csit/libraries/ClusterStateLibrary.py | 11 +- csit/libraries/CrudLibrary.py | 12 +- csit/libraries/HsfJson/hsf_json.py | 11 +- csit/libraries/HsfJson/hsfod.py | 5 +- csit/libraries/SettingsLibrary.py | 11 +- csit/libraries/UtilLibrary.py | 13 +- csit/libraries/ipaddr.py | 33 +- csit/variables/bgpuser/variables.py | 7 +- csit/variables/pcepuser/variables.py | 120 ++-- csit/variables/tcpmd5user/variables.py | 7 +- tools/CSIT_Test/base/__init__.py | 6 - tools/CSIT_Test/base/modules/arp_handler.py | 56 -- .../base/modules/container_manager.py | 54 -- .../base/modules/forwarding_manager.py | 53 -- .../base/modules/forwarding_rule_manager.py | 67 --- tools/CSIT_Test/base/modules/host_tracker.py | 56 -- .../base/modules/statistics_manager.py | 46 -- .../CSIT_Test/base/modules/switch_manager.py | 156 ----- .../base/modules/topology_manager.py | 97 ---- tools/CSIT_Test/base/restlib.py | 150 ----- tools/CSIT_Test/base/run.py | 48 -- tools/CSIT_Test/base/testmodule.py | 123 ---- tools/OF_Test/action-keywords.csv | 3 - tools/OF_Test/keywords.csv | 15 - tools/OF_Test/keywords.csv.backup | 50 -- tools/OF_Test/match-keywords.csv | 32 -- tools/OF_Test/odl_tests.py | 535 ------------------ tools/OF_Test/odl_tests.py.backup | 518 ----------------- tools/OF_Test/ofctl/t1 | 1 - tools/OF_Test/ofctl/t11 | 1 - tools/OF_Test/ofctl/t19 | 1 - tools/OF_Test/ofctl/t2 | 1 - tools/OF_Test/ofctl/t20 | 1 - tools/OF_Test/ofctl/t21 | 1 - tools/OF_Test/ofctl/t22 | 1 - tools/OF_Test/ofctl/t25 | 1 - tools/OF_Test/ofctl/t27 | 1 - tools/OF_Test/ofctl/t29 | 1 - tools/OF_Test/ofctl/t30 | 1 - tools/OF_Test/ofctl/t31 | 1 - tools/OF_Test/ofctl/t33 | 1 - tools/OF_Test/ofctl/t36 | 1 - tools/OF_Test/ofctl/t38 | 1 - tools/OF_Test/ofctl/t4 | 1 - tools/OF_Test/ofctl/t41 | 1 - tools/OF_Test/ofctl/t43 | 1 - tools/OF_Test/ofctl/t45 | 1 - tools/OF_Test/ofctl/t48 | 1 - tools/OF_Test/ofctl/t5 | 1 - tools/OF_Test/ofctl/t6 | 1 - .../010__Connect_256_Switches.txt | 41 -- .../030_Config_100k_flows.txt | 50 -- .../libconfig.py | 157 ----- tools/OF_Test/test.py | 14 - tools/OF_Test/test1.py | 13 - tools/OF_Test/xmls/f1.xml | 34 -- tools/OF_Test/xmls/f10.xml | 48 -- tools/OF_Test/xmls/f11.xml | 51 -- tools/OF_Test/xmls/f12.xml | 43 -- tools/OF_Test/xmls/f13.xml | 46 -- tools/OF_Test/xmls/f14.xml | 52 -- tools/OF_Test/xmls/f15.xml | 45 -- tools/OF_Test/xmls/f16.xml | 35 -- tools/OF_Test/xmls/f17.xml | 36 -- tools/OF_Test/xmls/f18.xml | 36 -- tools/OF_Test/xmls/f19.xml | 36 -- tools/OF_Test/xmls/f2.xml | 34 -- tools/OF_Test/xmls/f20.xml | 36 -- tools/OF_Test/xmls/f21.xml | 36 -- tools/OF_Test/xmls/f22.xml | 36 -- tools/OF_Test/xmls/f23.xml | 39 -- tools/OF_Test/xmls/f24.xml | 36 -- tools/OF_Test/xmls/f25.xml | 36 -- tools/OF_Test/xmls/f26.xml | 38 -- tools/OF_Test/xmls/f27.xml | 34 -- tools/OF_Test/xmls/f28.xml | 36 -- tools/OF_Test/xmls/f29.xml | 36 -- tools/OF_Test/xmls/f3.xml | 33 -- tools/OF_Test/xmls/f30.xml | 36 -- tools/OF_Test/xmls/f31.xml | 33 -- tools/OF_Test/xmls/f32.xml | 38 -- tools/OF_Test/xmls/f33.xml | 38 -- tools/OF_Test/xmls/f34.xml | 38 -- tools/OF_Test/xmls/f35.xml | 38 -- tools/OF_Test/xmls/f36.xml | 36 -- tools/OF_Test/xmls/f37.xml | 38 -- tools/OF_Test/xmls/f38.xml | 38 -- tools/OF_Test/xmls/f39.xml | 38 -- tools/OF_Test/xmls/f4.xml | 39 -- tools/OF_Test/xmls/f40.xml | 38 -- tools/OF_Test/xmls/f41.xml | 38 -- tools/OF_Test/xmls/f42.xml | 36 -- tools/OF_Test/xmls/f43.xml | 42 -- tools/OF_Test/xmls/f44.xml | 32 -- tools/OF_Test/xmls/f45.xml | 34 -- tools/OF_Test/xmls/f46.xml | 44 -- tools/OF_Test/xmls/f47.xml | 42 -- tools/OF_Test/xmls/f48.xml | 31 - tools/OF_Test/xmls/f49.xml | 34 -- tools/OF_Test/xmls/f5.xml | 41 -- tools/OF_Test/xmls/f6.xml | 44 -- tools/OF_Test/xmls/f7.xml | 46 -- tools/OF_Test/xmls/f8.xml | 50 -- tools/OF_Test/xmls/f9.xml | 49 -- tools/Robot_Tool/README.md | 73 --- tools/Robot_Tool/create_docs.py | 70 --- tools/Robot_Tool/libraries/ArpHandler.py | 56 -- tools/Robot_Tool/libraries/Common.py | 41 -- .../Robot_Tool/libraries/ContainerManager.py | 56 -- .../Robot_Tool/libraries/ForwardingManager.py | 53 -- .../libraries/ForwardingRuleManager.py | 69 --- tools/Robot_Tool/libraries/HostTracker.py | 57 -- tools/Robot_Tool/libraries/MininetHandler.py | 51 -- tools/Robot_Tool/libraries/README.md | 1 - tools/Robot_Tool/libraries/RequestsLibrary.py | 205 ------- .../Robot_Tool/libraries/StatisticsManager.py | 46 -- tools/Robot_Tool/libraries/SwitchManager.py | 48 -- tools/Robot_Tool/libraries/Topology.py | 45 -- tools/Robot_Tool/libraries/TopologyManager.py | 98 ---- tools/Robot_Tool/libraries/restlib.py | 150 ----- tools/Robot_Tool/libraries/testmodule.py | 123 ---- tools/Robot_Tool/resources/README.md | 1 - tools/Robot_Tool/suites/base/__init__.txt | 15 - tools/Robot_Tool/suites/base/arp_handler.txt | 41 -- .../suites/base/container_manager.txt | 46 -- .../suites/base/forwarding_manager.txt | 41 -- .../suites/base/forwarding_rule_manager.txt | 48 -- tools/Robot_Tool/suites/base/host_tracker.txt | 45 -- .../suites/base/statistics_manager.txt | 12 - .../Robot_Tool/suites/base/switch_manager.txt | 143 ----- .../suites/base/topology_manager.txt | 45 -- .../suites/cluster/010__bridge_topology.txt | 68 --- tools/Robot_Tool/suites/cluster/015__FRM.txt | 59 -- .../suites/cluster/020__c1_fails.txt | 67 --- .../suites/cluster/025__c2_fails.txt | 71 --- tools/Robot_Tool/suites/cluster/__init__.txt | 27 - .../suites/ha/005__two_controller_running.txt | 154 ----- tools/Robot_Tool/suites/ha/010__c1_fails.txt | 115 ---- tools/Robot_Tool/suites/ha/015__c2_fails.txt | 113 ---- .../Robot_Tool/suites/ha/020__c1_recovers.txt | 130 ----- .../suites/ha/025__two_controllers_fail.txt | 113 ---- .../ha/030__two_controller_see_flow.txt | 91 --- .../suites/ha/035__installed_flow_remains.txt | 142 ----- tools/Robot_Tool/suites/ha/__init__.txt | 25 - tools/Robot_Tool/suites/ha/resource.txt | 38 -- tools/Robot_Tool/variables/README.md | 1 - tools/Robot_Tool/variables/Variables.py | 17 - tools/clustering/cluster-deployer/deploy.py | 40 +- tools/clustering/cluster-monitor/isolate.py | 14 +- tools/clustering/cluster-monitor/rejoin.py | 14 +- .../cluster-monitor/timed_isolation.py | 14 +- tools/fastbgp/bgp_app_peer.py | 11 +- tools/fastbgp/play.py | 28 +- tools/mdsal_benchmark/dsbenchmark.py | 11 +- tools/mdsal_benchmark/ntfbenchmark.py | 10 +- tools/mdsal_benchmark/rpcbenchmark.py | 10 +- tools/netconf_tools/getter.py | 20 +- .../clustering-functional-test/crud.py | 8 +- .../clustering-functional-test/settings.py | 11 +- .../clustering-functional-test/util.py | 6 +- .../config_cleanup.py | 8 +- .../flow_add_delete_test.py | 11 +- .../flow_config_blaster.py | 10 +- .../flow_config_blaster_fle.py | 11 +- .../inventory_crawler.py | 11 +- .../inventory_perf.py | 11 +- .../inventory_read_blaster.py | 9 +- .../pretty_print.py | 6 +- .../shard_perf_test.py | 11 +- .../scripts/cluster_rest_script.py | 9 +- .../ovsdbconfigblaster.py | 32 +- tools/pcep_updater/updater.py | 12 +- 173 files changed, 327 insertions(+), 7424 deletions(-) delete mode 100644 tools/CSIT_Test/base/__init__.py delete mode 100644 tools/CSIT_Test/base/modules/arp_handler.py delete mode 100644 tools/CSIT_Test/base/modules/container_manager.py delete mode 100644 tools/CSIT_Test/base/modules/forwarding_manager.py delete mode 100644 tools/CSIT_Test/base/modules/forwarding_rule_manager.py delete mode 100644 tools/CSIT_Test/base/modules/host_tracker.py delete mode 100644 tools/CSIT_Test/base/modules/statistics_manager.py delete mode 100644 tools/CSIT_Test/base/modules/switch_manager.py delete mode 100644 tools/CSIT_Test/base/modules/topology_manager.py delete mode 100644 tools/CSIT_Test/base/restlib.py delete mode 100644 tools/CSIT_Test/base/run.py delete mode 100644 tools/CSIT_Test/base/testmodule.py delete mode 100644 tools/OF_Test/action-keywords.csv delete mode 100755 tools/OF_Test/keywords.csv delete mode 100755 tools/OF_Test/keywords.csv.backup delete mode 100755 tools/OF_Test/match-keywords.csv delete mode 100755 tools/OF_Test/odl_tests.py delete mode 100755 tools/OF_Test/odl_tests.py.backup delete mode 100644 tools/OF_Test/ofctl/t1 delete mode 100644 tools/OF_Test/ofctl/t11 delete mode 100644 tools/OF_Test/ofctl/t19 delete mode 100644 tools/OF_Test/ofctl/t2 delete mode 100644 tools/OF_Test/ofctl/t20 delete mode 100644 tools/OF_Test/ofctl/t21 delete mode 100644 tools/OF_Test/ofctl/t22 delete mode 100644 tools/OF_Test/ofctl/t25 delete mode 100644 tools/OF_Test/ofctl/t27 delete mode 100644 tools/OF_Test/ofctl/t29 delete mode 100644 tools/OF_Test/ofctl/t30 delete mode 100644 tools/OF_Test/ofctl/t31 delete mode 100644 tools/OF_Test/ofctl/t33 delete mode 100644 tools/OF_Test/ofctl/t36 delete mode 100644 tools/OF_Test/ofctl/t38 delete mode 100644 tools/OF_Test/ofctl/t4 delete mode 100644 tools/OF_Test/ofctl/t41 delete mode 100644 tools/OF_Test/ofctl/t43 delete mode 100644 tools/OF_Test/ofctl/t45 delete mode 100644 tools/OF_Test/ofctl/t48 delete mode 100644 tools/OF_Test/ofctl/t5 delete mode 100644 tools/OF_Test/ofctl/t6 delete mode 100644 tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/010__Connect_256_Switches.txt delete mode 100644 tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/030_Config_100k_flows.txt delete mode 100644 tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/libconfig.py delete mode 100644 tools/OF_Test/test.py delete mode 100644 tools/OF_Test/test1.py delete mode 100644 tools/OF_Test/xmls/f1.xml delete mode 100644 tools/OF_Test/xmls/f10.xml delete mode 100644 tools/OF_Test/xmls/f11.xml delete mode 100644 tools/OF_Test/xmls/f12.xml delete mode 100644 tools/OF_Test/xmls/f13.xml delete mode 100644 tools/OF_Test/xmls/f14.xml delete mode 100644 tools/OF_Test/xmls/f15.xml delete mode 100644 tools/OF_Test/xmls/f16.xml delete mode 100644 tools/OF_Test/xmls/f17.xml delete mode 100644 tools/OF_Test/xmls/f18.xml delete mode 100644 tools/OF_Test/xmls/f19.xml delete mode 100644 tools/OF_Test/xmls/f2.xml delete mode 100644 tools/OF_Test/xmls/f20.xml delete mode 100644 tools/OF_Test/xmls/f21.xml delete mode 100644 tools/OF_Test/xmls/f22.xml delete mode 100644 tools/OF_Test/xmls/f23.xml delete mode 100644 tools/OF_Test/xmls/f24.xml delete mode 100644 tools/OF_Test/xmls/f25.xml delete mode 100644 tools/OF_Test/xmls/f26.xml delete mode 100644 tools/OF_Test/xmls/f27.xml delete mode 100644 tools/OF_Test/xmls/f28.xml delete mode 100644 tools/OF_Test/xmls/f29.xml delete mode 100644 tools/OF_Test/xmls/f3.xml delete mode 100644 tools/OF_Test/xmls/f30.xml delete mode 100644 tools/OF_Test/xmls/f31.xml delete mode 100644 tools/OF_Test/xmls/f32.xml delete mode 100644 tools/OF_Test/xmls/f33.xml delete mode 100644 tools/OF_Test/xmls/f34.xml delete mode 100644 tools/OF_Test/xmls/f35.xml delete mode 100644 tools/OF_Test/xmls/f36.xml delete mode 100644 tools/OF_Test/xmls/f37.xml delete mode 100644 tools/OF_Test/xmls/f38.xml delete mode 100644 tools/OF_Test/xmls/f39.xml delete mode 100644 tools/OF_Test/xmls/f4.xml delete mode 100644 tools/OF_Test/xmls/f40.xml delete mode 100644 tools/OF_Test/xmls/f41.xml delete mode 100644 tools/OF_Test/xmls/f42.xml delete mode 100644 tools/OF_Test/xmls/f43.xml delete mode 100644 tools/OF_Test/xmls/f44.xml delete mode 100644 tools/OF_Test/xmls/f45.xml delete mode 100644 tools/OF_Test/xmls/f46.xml delete mode 100644 tools/OF_Test/xmls/f47.xml delete mode 100644 tools/OF_Test/xmls/f48.xml delete mode 100644 tools/OF_Test/xmls/f49.xml delete mode 100644 tools/OF_Test/xmls/f5.xml delete mode 100644 tools/OF_Test/xmls/f6.xml delete mode 100644 tools/OF_Test/xmls/f7.xml delete mode 100644 tools/OF_Test/xmls/f8.xml delete mode 100644 tools/OF_Test/xmls/f9.xml delete mode 100644 tools/Robot_Tool/README.md delete mode 100644 tools/Robot_Tool/create_docs.py delete mode 100644 tools/Robot_Tool/libraries/ArpHandler.py delete mode 100644 tools/Robot_Tool/libraries/Common.py delete mode 100644 tools/Robot_Tool/libraries/ContainerManager.py delete mode 100644 tools/Robot_Tool/libraries/ForwardingManager.py delete mode 100644 tools/Robot_Tool/libraries/ForwardingRuleManager.py delete mode 100644 tools/Robot_Tool/libraries/HostTracker.py delete mode 100644 tools/Robot_Tool/libraries/MininetHandler.py delete mode 100644 tools/Robot_Tool/libraries/README.md delete mode 100644 tools/Robot_Tool/libraries/RequestsLibrary.py delete mode 100644 tools/Robot_Tool/libraries/StatisticsManager.py delete mode 100644 tools/Robot_Tool/libraries/SwitchManager.py delete mode 100644 tools/Robot_Tool/libraries/Topology.py delete mode 100644 tools/Robot_Tool/libraries/TopologyManager.py delete mode 100644 tools/Robot_Tool/libraries/restlib.py delete mode 100644 tools/Robot_Tool/libraries/testmodule.py delete mode 100644 tools/Robot_Tool/resources/README.md delete mode 100644 tools/Robot_Tool/suites/base/__init__.txt delete mode 100644 tools/Robot_Tool/suites/base/arp_handler.txt delete mode 100644 tools/Robot_Tool/suites/base/container_manager.txt delete mode 100644 tools/Robot_Tool/suites/base/forwarding_manager.txt delete mode 100644 tools/Robot_Tool/suites/base/forwarding_rule_manager.txt delete mode 100644 tools/Robot_Tool/suites/base/host_tracker.txt delete mode 100644 tools/Robot_Tool/suites/base/statistics_manager.txt delete mode 100644 tools/Robot_Tool/suites/base/switch_manager.txt delete mode 100644 tools/Robot_Tool/suites/base/topology_manager.txt delete mode 100644 tools/Robot_Tool/suites/cluster/010__bridge_topology.txt delete mode 100644 tools/Robot_Tool/suites/cluster/015__FRM.txt delete mode 100644 tools/Robot_Tool/suites/cluster/020__c1_fails.txt delete mode 100644 tools/Robot_Tool/suites/cluster/025__c2_fails.txt delete mode 100644 tools/Robot_Tool/suites/cluster/__init__.txt delete mode 100644 tools/Robot_Tool/suites/ha/005__two_controller_running.txt delete mode 100644 tools/Robot_Tool/suites/ha/010__c1_fails.txt delete mode 100644 tools/Robot_Tool/suites/ha/015__c2_fails.txt delete mode 100644 tools/Robot_Tool/suites/ha/020__c1_recovers.txt delete mode 100644 tools/Robot_Tool/suites/ha/025__two_controllers_fail.txt delete mode 100644 tools/Robot_Tool/suites/ha/030__two_controller_see_flow.txt delete mode 100644 tools/Robot_Tool/suites/ha/035__installed_flow_remains.txt delete mode 100644 tools/Robot_Tool/suites/ha/__init__.txt delete mode 100644 tools/Robot_Tool/suites/ha/resource.txt delete mode 100644 tools/Robot_Tool/variables/README.md delete mode 100644 tools/Robot_Tool/variables/Variables.py diff --git a/csit/libraries/AuthStandalone.py b/csit/libraries/AuthStandalone.py index 5e0c0eae4b..2435423bce 100644 --- a/csit/libraries/AuthStandalone.py +++ b/csit/libraries/AuthStandalone.py @@ -32,16 +32,16 @@ as URIs not starting with /restconf/ are not supported yet. # terms of the Eclipse Public License v1.0 which accompanies this distribution, # and is available at http://www.eclipse.org/legal/epl-v10.html +import json +import requests + + __author__ = "Vratko Polak" __copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." __license__ = "Eclipse Public License v1.0" __email__ = "vrpolak@cisco.com" -import json -import requests - - # # Karaf Keyword definitions. # diff --git a/csit/libraries/ClusterStateLibrary.py b/csit/libraries/ClusterStateLibrary.py index 58240e084f..dd006cb56a 100644 --- a/csit/libraries/ClusterStateLibrary.py +++ b/csit/libraries/ClusterStateLibrary.py @@ -1,8 +1,3 @@ -__author__ = "Basheeruddin Ahmed" -__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "syedbahm@cisco.com" - import SettingsLibrary from time import sleep import UtilLibrary @@ -10,6 +5,12 @@ import json import sys +__author__ = "Basheeruddin Ahmed" +__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "syedbahm@cisco.com" + + def getClusterRoles(shardName, numOfShards=3, numOfTries=3, sleepBetweenRetriesInSecs=3, port=8181, *ips): """Given a shardname (e.g. shard-inventory-config), number of shards and bunch of ips diff --git a/csit/libraries/CrudLibrary.py b/csit/libraries/CrudLibrary.py index 8194bbd0f2..6198ddd7a0 100644 --- a/csit/libraries/CrudLibrary.py +++ b/csit/libraries/CrudLibrary.py @@ -1,13 +1,15 @@ -__author__ = "Basheeruddin Ahmed" -__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "syedbahm@cisco.com" import sys import UtilLibrary import SettingsLibrary import time +__author__ = "Basheeruddin Ahmed" +__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "syedbahm@cisco.com" + + def initCar(hostname, port): """Initiales the car shard""" x = 0 @@ -77,7 +79,7 @@ def addPerson(hostname, port, numberOfPersons, *expected): payload = SettingsLibrary.add_person_rpc_payload_template.substitute( personId="user" + strId, gender=genderToggle, age=(20 + x % 100), address=strId + "Way, Some Country, Some Zip " + str(x % 1000), - contactNo= "some number" + strId) + contactNo="some number" + strId) # Send the POST request using RPC resp = UtilLibrary.post(SettingsLibrary.getAddPersonRpcUrl(hostname, port), "admin", "admin", payload) diff --git a/csit/libraries/HsfJson/hsf_json.py b/csit/libraries/HsfJson/hsf_json.py index b3d5d9322a..d478ff0e29 100644 --- a/csit/libraries/HsfJson/hsf_json.py +++ b/csit/libraries/HsfJson/hsf_json.py @@ -5,11 +5,6 @@ # terms of the Eclipse Public License v1.0 which accompanies this distribution, # and is available at http://www.eclipse.org/legal/epl-v10.html -__author__ = "Vratko Polak" -__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." -__license__ = "Eclipse Public License v1.0" -__email__ = "vrpolak@cisco.com" - try: import simplejson as _json except ImportError: # Python2.7 calls it json. @@ -18,6 +13,12 @@ from hsfl import Hsfl as _Hsfl from hsfod import Hsfod as _Hsfod +__author__ = "Vratko Polak" +__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." +__license__ = "Eclipse Public License v1.0" +__email__ = "vrpolak@cisco.com" + + def _hsfl_array(s_and_end, scan_once, **kwargs): """Scan JSON array as usual, but return hsfl instead of list.""" values, end = _json.decoder.JSONArray(s_and_end, scan_once, **kwargs) diff --git a/csit/libraries/HsfJson/hsfod.py b/csit/libraries/HsfJson/hsfod.py index 8b19a9d3af..39fd312b35 100644 --- a/csit/libraries/HsfJson/hsfod.py +++ b/csit/libraries/HsfJson/hsfod.py @@ -5,13 +5,14 @@ # terms of the Eclipse Public License v1.0 which accompanies this distribution, # and is available at http://www.eclipse.org/legal/epl-v10.html +import collections as _collections + + __author__ = "Vratko Polak" __copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." __license__ = "Eclipse Public License v1.0" __email__ = "vrpolak@cisco.com" -import collections as _collections - class Hsfod(_collections.OrderedDict): """ diff --git a/csit/libraries/SettingsLibrary.py b/csit/libraries/SettingsLibrary.py index e47c420707..c80ac87650 100644 --- a/csit/libraries/SettingsLibrary.py +++ b/csit/libraries/SettingsLibrary.py @@ -1,14 +1,15 @@ -__author__ = "Basheeruddin Ahmed" -__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "syedbahm@cisco.com" - from string import Template # helps in taking the hostname entered by the user # global hostname # global port +__author__ = "Basheeruddin Ahmed" +__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "syedbahm@cisco.com" + + # def setHostname(host): # hostname=host diff --git a/csit/libraries/UtilLibrary.py b/csit/libraries/UtilLibrary.py index ed2de6b18a..7ded295799 100644 --- a/csit/libraries/UtilLibrary.py +++ b/csit/libraries/UtilLibrary.py @@ -1,9 +1,3 @@ -__author__ = "Basheeruddin Ahmed" -__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "syedbahm@cisco.com" - - import requests from SSHLibrary import SSHLibrary @@ -12,6 +6,13 @@ import time import re import warnings + +__author__ = "Basheeruddin Ahmed" +__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "syedbahm@cisco.com" + + global _cache diff --git a/csit/libraries/ipaddr.py b/csit/libraries/ipaddr.py index 35bc994abb..8f02607502 100644 --- a/csit/libraries/ipaddr.py +++ b/csit/libraries/ipaddr.py @@ -22,9 +22,11 @@ and networks. """ +import struct + + __version__ = '2.1.11' -import struct IPV4LENGTH = 32 IPV6LENGTH = 128 @@ -455,8 +457,8 @@ class _BaseIP(_IPAddrBase): def __eq__(self, other): try: - return (self._ip == other._ip - and self._version == other._version) + return (self._ip == other._ip and + self._version == other._version) except AttributeError: return NotImplemented @@ -617,13 +619,13 @@ class _BaseNet(_IPAddrBase): def __eq__(self, other): try: - return (self._version == other._version - and self.network == other.network - and int(self.netmask) == int(other.netmask)) + return (self._version == other._version and + self.network == other.network and + int(self.netmask) == int(other.netmask)) except AttributeError: if isinstance(other, _BaseIP): - return (self._version == other._version - and self._ip == other._ip) + return (self._version == other._version and + self._ip == other._ip) def __ne__(self, other): eq = self.__eq__(other) @@ -1362,10 +1364,17 @@ class IPv4Network(_BaseV4, _BaseNet): self.iterhosts = self.__iter__ # backwards compatibility - IsRFC1918 = lambda self: self.is_private - IsMulticast = lambda self: self.is_multicast - IsLoopback = lambda self: self.is_loopback - IsLinkLocal = lambda self: self.is_link_local + def IsRFC1918(self): + return self.is_private + + def IsMulticast(self): + return self.is_multicast + + def IsLoopback(self): + return self.is_loopback + + def IsLinkLocal(self): + return self.is_link_local class _BaseV6(object): diff --git a/csit/variables/bgpuser/variables.py b/csit/variables/bgpuser/variables.py index 9e731b13e6..94360263af 100644 --- a/csit/variables/bgpuser/variables.py +++ b/csit/variables/bgpuser/variables.py @@ -11,14 +11,15 @@ read the file contents and access it as values of variables.""" # terms of the Eclipse Public License v1.0 which accompanies this distribution, # and is available at http://www.eclipse.org/legal/epl-v10.html +import os +import string + + __author__ = "Jozef Behran" __copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." __license__ = "Eclipse Public License v1.0" __email__ = "jbehran@cisco.com" -import os -import string - def get_variables(mininet_ip): """Return dict of variables keyed by the (dot-less) names of files. diff --git a/csit/variables/pcepuser/variables.py b/csit/variables/pcepuser/variables.py index 1db0fbc810..32e1834cb3 100644 --- a/csit/variables/pcepuser/variables.py +++ b/csit/variables/pcepuser/variables.py @@ -12,14 +12,15 @@ than do manipulation in Robot file.""" # terms of the Eclipse Public License v1.0 which accompanies this distribution, # and is available at http://www.eclipse.org/legal/epl-v10.html +import binascii +from string import Template + + __author__ = "Vratko Polak" __copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." __license__ = "Eclipse Public License v1.0" __email__ = "vrpolak@cisco.com" -import binascii -from string import Template - def get_variables(mininet_ip): """Return dict of variables for the given IP address of Mininet VM.""" @@ -177,58 +178,67 @@ def get_variables(mininet_ip): # There are three operations, so let us just write templates from information at # https://wiki.opendaylight.org/view/BGP_LS_PCEP:Programmer_Guide#Tunnel_Management_for_draft-ietf-pce-stateful-pce-07_and_draft-ietf-pce-pce-initiated-lsp-00 # _xml describes content type and also distinguishes from similarly named _json strings. - add_xml_templ = Template(''' - pcc://$IP - $NAME - /topo:network-topology/topo:topology''' - + '''[topo:topology-id="pcep-topology"] - - - true - true - - - - $IP - 1.1.1.1 - - - - - false - 1.1.1.1/32 - - - -''') - update_xml_templ = Template(''' - pcc://$IP - $NAME - /topo:network-topology/topo:topology''' - + '''[topo:topology-id="pcep-topology"] - - - true - true - - - - false - 2.2.2.2/32 - - - false - 1.1.1.1/32 - - - -''') - remove_xml_templ = Template(''' - pcc://$IP - $NAME - /topo:network-topology/topo:topology''' - + '''[topo:topology-id="pcep-topology"] -''') + add_xml_templ = Template( + '\n' + ' pcc://$IP\n' + ' $NAME\n' + ' ' + '/topo:network-topology/topo:topology[topo:topology-id="pcep-topology"]' + '\n' + ' \n' + ' \n' + ' true\n' + ' true\n' + ' \n' + ' \n' + ' \n' + ' $IP\n' + ' 1.1.1.1\n' + ' \n' + ' \n' + ' \n' + ' \n' + ' false\n' + ' 1.1.1.1/32\n' + ' \n' + ' \n' + ' \n' + '\n' + ) + update_xml_templ = Template( + '\n' + ' pcc://$IP\n' + ' $NAME\n' + ' ' + '/topo:network-topology/topo:topology[topo:topology-id="pcep-topology"]' + '\n' + ' \n' + ' \n' + ' true\n' + ' true\n' + ' \n' + ' \n' + ' \n' + ' false\n' + ' 2.2.2.2/32\n' + ' \n' + ' \n' + ' false\n' + ' 1.1.1.1/32\n' + ' \n' + ' \n' + ' \n' + '\n' + ) + remove_xml_templ = Template( + '\n' + ' pcc://$IP\n' + ' $NAME\n' + ' ' + '/topo:network-topology/topo:topology[topo:topology-id="pcep-topology"]' + '\n' + '\n' + ) # The operations can be applied to either delegated or instantiated tunnel, NAME is the only distinguishing value. # Also, the final IP substitution can be done here. repl_dict = {'IP': mininet_ip} diff --git a/csit/variables/tcpmd5user/variables.py b/csit/variables/tcpmd5user/variables.py index 02770a65f3..8ccff4f022 100644 --- a/csit/variables/tcpmd5user/variables.py +++ b/csit/variables/tcpmd5user/variables.py @@ -14,14 +14,15 @@ than do manipulation in Robot file. # terms of the Eclipse Public License v1.0 which accompanies this distribution, # and is available at http://www.eclipse.org/legal/epl-v10.html +import binascii +from string import Template + + __author__ = "Vratko Polak" __copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." __license__ = "Eclipse Public License v1.0" __email__ = "vrpolak@cisco.com" -import binascii -from string import Template - # FIXME: Migrate values shared by other suites to separate Python module. diff --git a/tools/CSIT_Test/base/__init__.py b/tools/CSIT_Test/base/__init__.py deleted file mode 100644 index 1a1abc3c83..0000000000 --- a/tools/CSIT_Test/base/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -""" -CSIT test tools. -Homepage: https://github.com/yeasy/CSIT_Test -Updated: 2013-11-07 -""" -__all__ = ['restlib', 'testmodule'] diff --git a/tools/CSIT_Test/base/modules/arp_handler.py b/tools/CSIT_Test/base/modules/arp_handler.py deleted file mode 100644 index bbab7c63e8..0000000000 --- a/tools/CSIT_Test/base/modules/arp_handler.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-01 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class ArpHandler(TestModule): - """ - Test for the arp handler. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/subnetservice', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_subnets(self): - """ - The name is suggested to match the NB API. - list all subnets and their properties. - """ - return super(self.__class__, self).get_entries('subnets') - - def add_subnet_gateway(self, name, body): - """ - Add a subnet gateway. - """ - super(self.__class__, self).add_entry('subnet', name, body) - - def remove_subnet_gateway(self, name): - """ - Remove a subnet gateway. - """ - super(self.__class__, self).remove_entry('subnet', name) - - def test_subnet_operations(self, name, body): - """ - Test subnet operations, like adding and removeing a subnet. - >>> ArpHandler().test_subnet_operations('test',{'name':'test','subnet':'10.0.0.254/8'}) - True - """ - return super(self.__class__, self).test_add_remove_operations('subnets', 'subnet', name, body, 'subnetConfig') - - -if __name__ == '__main__': - print 'arp handler' diff --git a/tools/CSIT_Test/base/modules/container_manager.py b/tools/CSIT_Test/base/modules/container_manager.py deleted file mode 100644 index 03b0fd8a5d..0000000000 --- a/tools/CSIT_Test/base/modules/container_manager.py +++ /dev/null @@ -1,54 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-01 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class ContainerManager(TestModule): - """ - Test for the container manager. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/containermanager', user=DEFAULT_USER, password=DEFAULT_PWD, - container=None, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_containers(self): - """ - The name is suggested to match the NB API. - Show the containers - """ - return super(self.__class__, self).get_entries('containers') - - def add_container(self, name, body): - """ - Add a container - """ - super(self.__class__, self).add_entry('container', name, body) - - def remove_container(self, name): - """ - Remove a container - """ - super(self.__class__, self).remove_entry('container', name) - - def test_container_operations(self, name, body): - """ - Test subnet operations, like adding and removeing a subnet. - >>> ContainerManager().test_container_operations('cont1',{'container':'cont1','flowSpecs': [],'staticVlan':'10', - 'nodeConnectors':["OF|1@OF|00:00:00:00:00:00:00:01","OF|23@OF|00:00:00:00:00:00:20:21"]}) - True - """ - return super(self.__class__, self).test_add_remove_operations('containers', 'container', name, body, - 'containerConfig') diff --git a/tools/CSIT_Test/base/modules/forwarding_manager.py b/tools/CSIT_Test/base/modules/forwarding_manager.py deleted file mode 100644 index f10f9ea382..0000000000 --- a/tools/CSIT_Test/base/modules/forwarding_manager.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-01 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class ForwardingManager(TestModule): - """ - Test for the forwarding manager. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/staticroute', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_routes(self): - """ - The name is suggested to match the NB API. - list all routes - """ - return super(self.__class__, self).get_entries('routes') - - def add_static_route(self, name, body): - """ - Add a static route. - """ - super(self.__class__, self).add_entry('route', name, body) - - def remove_static_route(self, name): - """ - Remove a static route - """ - super(self.__class__, self).remove_entry('route', name) - - def test_static_route_operations(self, name, body): - """ - Test static route operations, like adding and removeing a route. - >>> ForwardingManager().test_static_route_operations('route1',{'name':'route1','prefix':'192.168.1.0/24', - 'nextHop':'10.0.0.2'}) - True - """ - return super(self.__class__, self).test_add_remove_operations('routes', 'route', name, body, 'staticRoute') diff --git a/tools/CSIT_Test/base/modules/forwarding_rule_manager.py b/tools/CSIT_Test/base/modules/forwarding_rule_manager.py deleted file mode 100644 index 2874db9a1d..0000000000 --- a/tools/CSIT_Test/base/modules/forwarding_rule_manager.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-05 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class ForwardingRuleManager(TestModule): - """ - Test for the forwarding rule manager. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/flowprogrammer', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_flows(self): - """ - The name is suggested to match the NB API. - Show the flows - """ - return super(self.__class__, self).get_entries('') - - def add_flow_to_node(self, node_type, node_id, name, body): - suffix = 'node/' + node_type + '/' + node_id + '/staticFlow' - super(self.__class__, self).add_entry(suffix, name, body) - - def remove_flow_from_node(self, node_type, node_id, name): - suffix = 'node/' + node_type + '/' + node_id + '/staticFlow' - super(self.__class__, self).remove_entry(suffix, name) - - def test_flow_operations(self, node_type, node_id, name, body): - """ - Test the add,remove,show actions on flows. - >>> body = {'installInHw':'true','name':'flow1','node':{'id':'00:00:00:00:00:00:00:02','type':'OF'}, - 'priority':'1','etherType':'0x800','nwDst':'10.0.0.1/32','actions':['OUTPUT=1']} - >>> ForwardingRuleManager().test_flow_operations('OF','00:00:00:00:00:00:00:02','flow1',body) - True - >>> body = {'installInHw':'true','name':'flow2','node':{'id':'00:00:00:00:00:00:00:02','type':'OF'}, - 'priority':'1','etherType':'0x800','nwDst':'10.0.0.2/32','actions':['OUTPUT=2']} - >>> ForwardingRuleManager().test_flow_operations('OF','00:00:00:00:00:00:00:02','flow2',body) - True - """ - result = [] - # current flow table should be empty. - r = self.get_flows() - result.append(body not in r['flowConfig']) - # Add a flow - self.add_flow_to_node(node_type, node_id, name, body) - r = self.get_flows() - result.append(body in r['flowConfig']) - # Remove the flow and test if succeed - if result == [True, True]: - self.remove_flow_from_node(node_type, node_id, name) - r = self.get_flows() - result.append(body not in r['flowConfig']) - return result == [True, True, True] diff --git a/tools/CSIT_Test/base/modules/host_tracker.py b/tools/CSIT_Test/base/modules/host_tracker.py deleted file mode 100644 index faadcedea9..0000000000 --- a/tools/CSIT_Test/base/modules/host_tracker.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-06 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class HostTracker(TestModule): - """ - Test for the host tracker.. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/hosttracker', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_hosts(self): - """ - The name is suggested to match the NB API. - list all active hosts, should be done after using h1 ping h2 in mininet - """ - return super(self.__class__, self).get_entries(['hosts/active', 'hosts/inactive'], 'hostConfig') - - def add_host(self, name, body): - """ - Add a host. - """ - super(self.__class__, self).add_entry('address', name, body) - - def remove_host(self, name): - """ - Remove a host. - """ - super(self.__class__, self).remove_entry('address', name) - - def test_host_operations(self, name, body): - """ - Test host operations, like adding and removing. - >>> HostTracker().test_host_operations('10.0.1.4',{'nodeType': 'OF', 'dataLayerAddress': '5e:bf:79:84:10:a6', - 'vlan': '1', 'nodeId': '00:00:00:00:00:00:00:03', 'nodeConnectorId': '9', 'networkAddress': '10.0.1.4', - 'staticHost': True, 'nodeConnectorType': 'OF'}) - True - """ - return super(self.__class__, self).test_add_remove_operations(['hosts/active', 'hosts/inactive'], 'address', - name, body, - 'hostConfig') diff --git a/tools/CSIT_Test/base/modules/statistics_manager.py b/tools/CSIT_Test/base/modules/statistics_manager.py deleted file mode 100644 index e24721636e..0000000000 --- a/tools/CSIT_Test/base/modules/statistics_manager.py +++ /dev/null @@ -1,46 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-01 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class StatisticsManager(TestModule): - """ - Test for the statistics manager. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/statistics', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_flow_stats(self): - """ - The name is suggested to match the NB API. - Show the flow statistics - """ - return super(self.__class__, self).get_entries('flow') - - def get_port_stats(self): - """ - The name is suggested to match the NB API. - Show the port statistics - """ - return super(self.__class__, self).get_entries('port') - - def get_table_stats(self): - """ - The name is suggested to match the NB API. - Show the table statistics - """ - return super(self.__class__, self).get_entries('table') diff --git a/tools/CSIT_Test/base/modules/switch_manager.py b/tools/CSIT_Test/base/modules/switch_manager.py deleted file mode 100644 index d4bd142bc8..0000000000 --- a/tools/CSIT_Test/base/modules/switch_manager.py +++ /dev/null @@ -1,156 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-01 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class SwitchManager(TestModule): - """ - Test for the switch manager, including read switch nodes. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/switchmanager', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_nodes(self): - """ - The name is suggested to match the NB API. - list all nodes and their properties - """ - suffix = 'nodes' - r = super(self.__class__, self).read(suffix) - if r: - return r - - def get_node(self, suffix): - """ - The name is suggested to match the NB API. - list nodeconnector and properties of a node. - """ - r = super(self.__class__, self).read(suffix) - if r: - return r - - def add_property_to_node(self, node_type, node_id, property, value): - """ - Add a property to given node. - """ - suffix = 'node/' + node_type + '/' + node_id + '/property' - super(self.__class__, self).update(suffix + '/' + property + '/' + str(value)) - - def remove_property_from_node(self, node_type, node_id, property): - """ - Remove a property from given node. - """ - suffix = 'node/' + node_type + '/' + node_id + '/property' - super(self.__class__, self).delete(suffix + '/' + property) - - def add_property_to_nodeconnector(self, node_type, node_id, nc_type, nc_id, property, value): - """ - Add a property to given node. - """ - suffix = 'nodeconnector/' + node_type + '/' + node_id + '/' + nc_type + '/' + nc_id + '/property' - super(self.__class__, self).update(suffix + '/' + property + '/' + str(value)) - - def remove_property_from_nodeconnector(self, node_type, node_id, nc_type, nc_id, property): - """ - Add a property to given node. - """ - suffix = 'nodeconnector/' + node_type + '/' + node_id + '/' + nc_type + '/' + nc_id + '/property' - super(self.__class__, self).delete(suffix + '/' + property) - - def test_list_nodes(self): - """ - The name is suggested to match the NB API. - list all nodes and their properties - >>> SwitchManager().test_list_nodes() - True - """ - result = [] - r = self.get_nodes() - t = super(self.__class__, self).extract_properties(r, 'nodeProperties', 'node') - if t: - result.append({u'type': u'OF', u'id': u'00:00:00:00:00:00:00:01'} in t) - result.append({u'type': u'OF', u'id': u'00:00:00:00:00:00:00:02'} in t) - result.append({u'type': u'OF', u'id': u'00:00:00:00:00:00:00:03'} in t) - return result == [True, True, True] - - def test_node_property_operations(self, node_type, node_id, property, value): - """ - Test the add,remove,show actions on node properties. - - >>> SwitchManager().test_node_property_operations('OF','00:00:00:00:00:00:00:01','description','Switch1') - True - >>> SwitchManager().test_node_property_operations('OF','00:00:00:00:00:00:00:02','description','Switch2') - True - >>> SwitchManager().test_node_property_operations('OF','00:00:00:00:00:00:00:03','description','Switch3') - True - """ - result = [] - # current node properties should not include description - r = self.get_nodes() - v = [e['properties'].get(property) for e in r['nodeProperties'] if - e['node'] == {u'type': node_type, u'id': node_id}] - result.append(v == [{u'value': u'None'}] or v == [None]) - # After adding, current node properties should include description - self.add_property_to_node(node_type, node_id, property, value) - r = self.get_nodes() - v = [e['properties'].get(property) for e in r['nodeProperties'] if - e['node'] == {u'type': node_type, u'id': node_id}] - result.append(v == [{u'value': value}]) - # After removing, current node properties should not include description - self.remove_property_from_node(node_type, node_id, property) - r = self.get_nodes() - v = [e['properties'].get(property) for e in r['nodeProperties'] if - e['node'] == {u'type': node_type, u'id': node_id}] - result.append(v == [{u'value': u'None'}] or v == [None]) - return result == [True, True, True] - - def test_nodeconnector_property_operations(self, node_type, node_id, nc_type, nc_id, property, value): - """ - Test the add,remove,show actions on nodeconnector properties. - - >>> SwitchManager().test_nodeconnector_property_operations( - 'OF','00:00:00:00:00:00:00:01','OF','1','bandwidth',1000) - True - """ - result = [] - node_suffix = 'node/' + node_type + '/' + node_id - # default bw should be 10000000000L - r = self.get_node(node_suffix) - default_value = [e['properties'][property] for e in r['nodeConnectorProperties'] if - property in e['properties'] and e['nodeconnector'] == { - u'node': {u'type': node_type, u'id': node_id}, u'type': nc_type, u'id': nc_id}] - # After setting, the value should be the value - self.add_property_to_nodeconnector(node_type, node_id, nc_type, nc_id, property, value) - r = self.get_node(node_suffix) - current_value = [e['properties'][property] for e in r['nodeConnectorProperties'] if - property in e['properties'] and e['nodeconnector'] == { - u'node': {u'type': node_type, u'id': node_id}, u'type': nc_type, u'id': nc_id}] - result.append(current_value == [{'value': value}]) - # After removing, and restoring the default value, the bandwidth property should be default - self.remove_property_from_nodeconnector(node_type, node_id, nc_type, nc_id, property) - r = self.get_node(node_suffix) - v = [e['properties'][property] for e in r['nodeConnectorProperties'] if - property in e['properties'] and e['nodeconnector'] == {u'node': {u'type': node_type, u'id': node_id}, - u'type': nc_type, u'id': nc_id}] - result.append(v == []) - self.add_property_to_nodeconnector(node_type, node_id, nc_type, nc_id, property, default_value[0]['value']) - r = self.get_node(node_suffix) - current_value = [e['properties'][property] for e in r['nodeConnectorProperties'] if - property in e['properties'] and e['nodeconnector'] == { - u'node': {u'type': node_type, u'id': node_id}, u'type': nc_type, u'id': nc_id}] - result.append(current_value == default_value) - return result == [True, True, True] diff --git a/tools/CSIT_Test/base/modules/topology_manager.py b/tools/CSIT_Test/base/modules/topology_manager.py deleted file mode 100644 index 7d9d26d17b..0000000000 --- a/tools/CSIT_Test/base/modules/topology_manager.py +++ /dev/null @@ -1,97 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-01 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class TopologyManager(TestModule): - """ - Test for the topology manager. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/topology', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_topology(self): - """ - The name is suggested to match the NB API. - Get the topology - """ - return super(self.__class__, self).get_entries() - - def get_userlinks(self): - """ - The name is suggested to match the NB API. - Show the userlinks. - """ - suffix = 'userLinks' - r = super(self.__class__, self).read(suffix) - if r: - return r - - def add_userlink(self, name, body): - """ - Add a userlink. - """ - suffix = 'userLink' - r = super(self.__class__, self).update(suffix + '/' + name, body) - return r - - def remove_userlink(self, name): - """ - Remove a userlink. - """ - suffix = 'userLink' - r = super(self.__class__, self).delete(suffix + '/' + name) - return r - - def test_get_topology(self): - """ - The name is suggested to match the NB API. - Test the topology - >>> TopologyManager().test_get_topology() - True - """ - result = [] - r = self.get_topology() - if r: - v = [e['edge'] for e in r['edgeProperties']] - result.append({u'tailNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:01'}, - u'type': u'OF', u'id': u'2'}, - u'headNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:03'}, - u'type': u'OF', u'id': u'3'}} in v) - result.append({u'tailNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:03'}, - u'type': u'OF', u'id': u'3'}, - u'headNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:01'}, - u'type': u'OF', u'id': u'2'}} in v) - result.append({u'tailNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:02'}, - u'type': u'OF', u'id': u'3'}, - u'headNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:01'}, - u'type': u'OF', u'id': u'1'}} in v) - result.append({u'tailNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:01'}, - u'type': u'OF', u'id': u'1'}, - u'headNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:02'}, - u'type': u'OF', u'id': u'3'}} in v) - print result == [True, True, True, True] - - def test_userlink_operations(self, name, body): - """ - Test userlink operations, like adding and removing. - >>> TopologyManager().test_userlink_operations('link1', {'status':'Success','name':'link1', - 'srcNodeConnector':'OF|1@OF|00:00:00:00:00:00:00:02', - 'dstNodeConnector':'OF|1@OF|00:00:00:00:00:00:00:03'}) - True - """ - return super(self.__class__, self).test_add_remove_operations('userLinks', 'userLink', name, body, 'userLinks') diff --git a/tools/CSIT_Test/base/restlib.py b/tools/CSIT_Test/base/restlib.py deleted file mode 100644 index 76cf1caecc..0000000000 --- a/tools/CSIT_Test/base/restlib.py +++ /dev/null @@ -1,150 +0,0 @@ -""" -CSIT test tools. -Authors: Denghui Huang@IBM, Baohua Yang@IBM -Updated: 2013-11-06 -""" -import json - -import requests - - -# Global variables -DEFAULT_CONTROLLER_IP = '127.0.0.1' -# DEFAULT_CONTROLLER_IP = '9.186.105.113' #just for temp test -DEFAULT_PORT = '8080' -DEFAULT_PREFIX = 'http://' + DEFAULT_CONTROLLER_IP + ':' + DEFAULT_PORT -DEFAULT_CONTAINER = 'default' -DEFAULT_USER = 'admin' -DEFAULT_PWD = 'admin' -MODULES_DIR = 'modules' -TIMEOUTS = 2 - -''' -Send a POST request. -''' - - -def do_post_request(url, content_type, payload=None, user=DEFAULT_USER, password=DEFAULT_PWD): - data = payload - headers = {} - if content_type == 'json': - headers = {'Content-type': 'application/json', 'Accept': 'application/json'} - if payload is not None: - data = json.dumps(payload) - elif content_type == 'xml': - headers = {'Content-type': 'application/xml', 'Accept': 'application/xml'} - else: - print 'unsupported content-type' - try: - r = requests.post(url, data, headers=headers, auth=(user, password), timeout=TIMEOUTS) - r.raise_for_status() - except(requests.exceptions.HTTPError, requests.exceptions.Timeout): - return 400 - else: - return r.status_code - - -def do_get_request_with_status_code(url, content_type, user=DEFAULT_USER, password=DEFAULT_PWD): - ''' - Send a GET request. - @return The status code. - ''' - r = None - try: - r = requests.get(url, auth=(user, password), timeout=TIMEOUTS) - r.raise_for_status() - except (requests.exceptions.HTTPError, requests.exceptions.Timeout) as e: - print e - return r.status_code - finally: - return r.status_code - - -def do_put_request(url, content_type, payload=None, user=DEFAULT_USER, password=DEFAULT_PWD): - ''' - Send a PUT request. - @return The status code. - ''' - data = payload - headers = {} - if content_type == 'json': - headers = {'Content-type': 'application/json', 'Accept': 'application/json'} - if payload is not None: - data = json.dumps(payload) - elif content_type == 'xml': - headers = {'Content-type': 'application/xml', 'Accept': 'application/xml'} - else: - print 'unsupported content-type' - try: - r = requests.put(url, data, headers=headers, auth=(user, password), timeout=TIMEOUTS) - r.raise_for_status() - except(requests.exceptions.HTTPError, requests.exceptions.Timeout): - return 400 - else: - return r.status_code - - -def do_delete_request(url, user=DEFAULT_USER, password=DEFAULT_PWD): - ''' - Send a DELETE request. - @return The status code. - ''' - r = None - try: - r = requests.delete(url, auth=(user, password), timeout=TIMEOUTS) - r.raise_for_status() - except (requests.exceptions.HTTPError, requests.exceptions.Timeout) as e: - print e - finally: - if r: - return r.status_code - - -def convert_result_to_list(result): - ''' - Convert the result content to list. - ''' - list2 = [] - # print result - content = result.values() - for list1 in content: - list2 = [dict1.values() for dict1 in list1] - # print list2 - list3 = [] - for list4 in list2: - for element in list4: - list3.append(element) - # print list3 - return list3 - - -def do_get_request_with_response_content(url, content_type, user=DEFAULT_USER, password=DEFAULT_PWD, - convert_to_list=False): - ''' - Send a GET request and get the response. - @return response content as list. - ''' - try: - r = requests.get(url, auth=(user, password), timeout=TIMEOUTS) - r.raise_for_status() - except (requests.exceptions.HTTPError, requests.exceptions.Timeout) as e: - print e - return None - else: - if r is not None: - if content_type == 'json': - content = r.json() - return convert_result_to_list(content) if convert_to_list else content - elif content_type == 'xml': # TODO: add parser to xml - return None - - -if __name__ == '__main__': - # example - # Note: in json body, all field name and value (if it is string type) must be enclosed in double quotes. - # This constraint maybe cause by json parser. - body = {"status": "Success", "dstNodeConnector": "OF|1@OF|00:00:00:00:00:00:00:01", "name": "link3", - "srcNodeConnector": "OF|1@OF|00:00:00:00:00:00:00:03"} - url = 'http://127.0.0.1:8080/controller/nb/v2/topology/default/userLink/link3' - content_type = 'json' - print do_put_request(url, content_type, body) diff --git a/tools/CSIT_Test/base/run.py b/tools/CSIT_Test/base/run.py deleted file mode 100644 index 28aab41df1..0000000000 --- a/tools/CSIT_Test/base/run.py +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env python -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-07 - -Usage: Before running the test tool, should - 1. Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2'. - 2. Configure gateway in the controller web GUI, name = 'gateway', subnet = '10.0.0.254/24'. - 3. In Mininet, run 'h1 ping h2' to make sure the network is connected. -""" -import doctest -import os -from restlib import * # noqa - - -def test_module(module_name): - ''' - Run single test on given module. - ''' - print "#Test case: " + module_name.replace('_', ' ') - cmd = 'python -m doctest ' + module_name + '.py' - os.system(cmd) - - -def run(modules=None): - ''' - Run test cases according to the given modules. - If no parameter is given, then will scan the case directory, - and try to run all cases. - ''' - backup_dir = os.getcwd() - if not modules: - modules = [e[:-3] for e in os.listdir(MODULES_DIR) if e.endswith('.py')] - os.chdir(backup_dir + '/' + MODULES_DIR) - for name in modules: - test_module(name) - os.chdir(backup_dir) - - -if __name__ == '__main__': - doctest.testmod() - test_modules = ['switch_manager', 'topology_manager', 'forwarding_rule_manager', 'statistics_manager', - 'host_tracker', 'arp_handler', 'forwarding_manager', 'container_manager'] - # test_modules = ['topology_manager'] - run(test_modules) - # run() diff --git a/tools/CSIT_Test/base/testmodule.py b/tools/CSIT_Test/base/testmodule.py deleted file mode 100644 index 6ac3478451..0000000000 --- a/tools/CSIT_Test/base/testmodule.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-10-30 -""" - -from restlib import * # noqa - - -class TestModule(object): - """ - Basic module class for test restful APIS. - Support the standard Create, Read, Update, Delete (CRUD) actions. - """ - - def __init__(self, restSubContext, user=DEFAULT_USER, password=DEFAULT_PWD, container=DEFAULT_CONTAINER, - contentType='json', prefix=DEFAULT_PREFIX): - self.restSubContext = restSubContext - self.container = container - self.user = user - self.password = password - self.contentType = contentType - self.prefix = prefix - - def extract_properties(self, content, key, property): - """ - Return all nodes. - """ - if not isinstance(content, dict) or key not in content: - return None - else: - return [e.get(property) for e in content[key]] - - def get_entries(self, suffix=None, key=None): - """ - Get the existed entries in the service. - """ - if isinstance(suffix, list) and key: - result = {} - result[key] = [] - for s in suffix: - result[key].extend(self.get_entries(s).get(key)) - return result - elif isinstance(suffix, str): - return self.read(suffix) - elif not suffix: - return self.read() - else: - return None - - def add_entry(self, suffix, name, body): - """ - Add entry to the service. - """ - self.update(suffix + '/' + name, body) - - def remove_entry(self, suffix, name): - """ - Remove entry from the service. - """ - self.delete(suffix + '/' + name) - - def test_add_remove_operations(self, suffix_entries, suffix_entry, name, body, key): - result = [] - # Add an entry - self.add_entry(suffix_entry, name, body) - r = self.get_entries(suffix_entries, key) - if r: - v = r.get(key) - result.append(body in v if v else False) - # Remove the added entry - if result == [True]: - self.remove_entry(suffix_entry, name) - r = self.get_entries(suffix_entries, key) - v = r.get(key) - result.append(body not in v if v else True) - return result == [True, True] - - def create(self, suffix, body=None): - """ - POST to given suffix url. - TODO: complete - """ - url = self.prefix + self.restSubContext - if self.container: - url += '/' + self.container - if suffix: - url += '/' + suffix - return do_post_request(url, self.contentType, body, self.user, self.password) - - def read(self, suffix=None): - """ - GET from given suffix url. - """ - url = self.prefix + self.restSubContext - if self.container: - url += '/' + self.container - if suffix: - url += '/' + suffix - return do_get_request_with_response_content(url, self.contentType, self.user, self.password) - - def update(self, suffix, body=None): - """ - PUT to given suffix url. - """ - url = self.prefix + self.restSubContext - if self.container: - url += '/' + self.container - if suffix: - url += '/' + suffix - return do_put_request(url, self.contentType, body, self.user, self.password) - - def delete(self, suffix): - """ - DELETE to given suffix url. - TODO: complete - """ - url = self.prefix + self.restSubContext - if self.container: - url += '/' + self.container - if suffix: - url += '/' + suffix - return do_delete_request(url, self.user, self.password) diff --git a/tools/OF_Test/action-keywords.csv b/tools/OF_Test/action-keywords.csv deleted file mode 100644 index e61030339f..0000000000 --- a/tools/OF_Test/action-keywords.csv +++ /dev/null @@ -1,3 +0,0 @@ -dec-nw-ttl;dec_ttl -dec-mpls-ttl;dec_mpls_ttl -drop-action;drop diff --git a/tools/OF_Test/keywords.csv b/tools/OF_Test/keywords.csv deleted file mode 100755 index 5ff10e3cb3..0000000000 --- a/tools/OF_Test/keywords.csv +++ /dev/null @@ -1,15 +0,0 @@ -cookie;cookie -duration;duration -exit;exit -idle-timeout;idle_timeout -in_port;in_port -instructions;instructions -hard-timeout;hard_timeout -out_port;out_port -match;matches -n_packets;n_packets -n_bytes;n_bytes -priority;priority -reg;reg -table;table -table_id;table diff --git a/tools/OF_Test/keywords.csv.backup b/tools/OF_Test/keywords.csv.backup deleted file mode 100755 index 7cd9ee08ad..0000000000 --- a/tools/OF_Test/keywords.csv.backup +++ /dev/null @@ -1,50 +0,0 @@ -in_port;in_port -dl_vlan;dl_vlan -dl_vlan_pcp;dl_vlan_pcp -dl_src;dl_src -dl_dst;dl_dst -dl_type;dl_type -ipv4-source;nw_src -ipv4-destination;nw_dst -nw_proto;nw_proto -nw_tos;nw_tos -nw_ecn;nw_ecn -nw_ttl;nw_ttl -tp_src;tp_src -tp_dst;tp_dst -icmp_type;icmp_type -icmp_code;icmp_code -table;table -ip;ip -icmp;icmp -tcp;tcp -udp;udp -arp;arp -vlan_tci;vlan_tci -ip_frag;ip_frag -arp_sha;arp_sha -arp_tha;arp_tha -ipv6_src;ipv6_src -ipv6_dst;ipv6_dst -ipv6_label;ipv6_label -nd_target;nd_target -nd_sll;nd_sll -nd_tll;nd_tll -tun_id;tun_id -reg;reg -ipv6;ipv6 -tcp6;tcp6 -udp6;udp6 -icmp6;icmp6 -apply_actions;actions -exit;exit -cookie;cookie -priority;priority -idle-timeout;idle_timeout -hard-timeout;hard_timeout -out_port;out_port -duration;duration -table_id;table -priority;priority -n_packets;n_packets -n_bytes;n_bytes diff --git a/tools/OF_Test/match-keywords.csv b/tools/OF_Test/match-keywords.csv deleted file mode 100755 index 7dad3ebf3b..0000000000 --- a/tools/OF_Test/match-keywords.csv +++ /dev/null @@ -1,32 +0,0 @@ -arp-op;arp_op -arp-source-transport-address;arp_spa -arp-target-transport-address;arp_tpa -arp-source-hardware-address;arp_sha -arp-target-hardware-address;arp_tha -ethernet-source;dl_src -ethernet-destination;dl_dst -ethernet-type;dl_type -icmpv4-type;icmp_type -icmpv4-code;icmp_code -in-port;in_port -in-phy-port;in_phy_port -ip-dscp;nw_tos -ip-ecn;nw_ecn -ip-protocol;nw_proto -ipv4-source;nw_src -ipv4-destination;nw_dst -ipv6_source;ipv6_src -ipv6_destination;ipv6_dst -ipv6_label;ipv6_label -sctp-destination-port;tp_dst -sctp-source-port;tp_src -tcp-destination-port;tp_dst -tcp-source-port;tp_src -udp-destination-port;tp_dst -udp-source-port;tp_src -vlan-id;dl_vlan -vlan_tci;vlan_tci -nd_target;nd_target -nd_sll;nd_sll -nd_tll;nd_tll -tun_id;tun_id diff --git a/tools/OF_Test/odl_tests.py b/tools/OF_Test/odl_tests.py deleted file mode 100755 index e529dece1f..0000000000 --- a/tools/OF_Test/odl_tests.py +++ /dev/null @@ -1,535 +0,0 @@ -#!/usr/bin/python2.7 - -import os -import sys -import time -import logging -import argparse -import unittest -import requests -import xml.dom.minidom as md -from xml.etree import ElementTree as ET -from netaddr import IPNetwork -from string import lower - -import mininet.node -import mininet.topo -import mininet.net -import mininet.util - -from mininet.node import RemoteController -from mininet.node import OVSKernelSwitch - - -def create_network(controller_ip, controller_port): - """Create topology and mininet network.""" - topo = mininet.topo.Topo() - - topo.addSwitch('s1') - topo.addHost('h1') - topo.addHost('h2') - - topo.addLink('h1', 's1') - topo.addLink('h2', 's1') - - switch = mininet.util.customConstructor( - {'ovsk': OVSKernelSwitch}, 'ovsk,protocols=OpenFlow13') - - controller = mininet.util.customConstructor( - {'remote': RemoteController}, 'remote,ip=%s:%s' % (controller_ip, - controller_port)) - - net = mininet.net.Mininet(topo=topo, switch=switch, controller=controller) - - return net - - -def get_flows(net): - """Get list of flows from network's first switch. - - Return list of all flows on switch, sorted by duration (newest first) - One flow is a dictionary with all flow's attribute:value pairs. Matches - are stored under 'matches' key as another dictionary. - Example: - - { - 'actions': 'drop', - 'cookie': '0xa,', - 'duration': '3.434s,', - 'hard_timeout': '12,', - 'idle_timeout': '34,', - 'matches': { - 'ip': None, - 'nw_dst': '10.0.0.0/24' - }, - 'n_bytes': '0,', - 'n_packets': '0,', - 'priority': '2', - 'table': '1,' - } - - """ - log = logging.getLogger(__name__) - - def parse_matches(flow, matches): - flow['matches'] = {} - - for match in matches: - split_match = match.split('=', 1) - if len(split_match) == 1: - flow['matches'][split_match[0]] = None - else: - flow['matches'][split_match[0]] = split_match[1].rstrip(',') - - switch = net.switches[0] - output = switch.cmdPrint( - 'ovs-ofctl -O OpenFlow13 dump-flows %s' % switch.name) -# output = switch.cmdPrint( -# 'ovs-ofctl -F openflow10 dump-flows %s' % switch.name) - - log.debug('switch flow table: {}'.format(output)) - - flows = [] - - for line in output.splitlines()[1:]: - flow = {} - for word in line.split(): - word.rstrip(',') - try: - key, value = word.split('=', 1) - except ValueError: - # TODO: need to figure out what to do here? - continue - - if key == 'priority': - values = value.split(',') - flow[key] = values[0] - parse_matches(flow, values[1:]) - else: - flow[key] = value.rstrip(',') - - flows.append(flow) - - # sort by duration - return sorted(flows, key=lambda x: x['duration'].rstrip('s')) - - -def translate_to_flow(flow, name, dictionary): - switch_flow_name = dictionary[name] - - key_err = '{} needs to be present in flow definition. Flow definition ' \ - 'was: {}.'.format(switch_flow_name, flow) - assert switch_flow_name in flow, key_err - return switch_flow_name - - -def get_text_value(element): - return element.childNodes[0].nodeValue - - -def compare_elements(expected_match, actual_match, kw, comparators, default): - for child in expected_match.childNodes: - if child.nodeType is expected_match.TEXT_NODE: - continue - - comparator = comparators.get(child.nodeName, default) - comparator(child, actual_match, kw) - - -def fallback_comparator(xml_element, switch_flow, kw): - # print 'fallback_comparator-xml_element', xml_element.toxml() - # print 'fallback_comparator: switch_flow', switch_flow - # print 'fallback_comparator: kw', kws - - name = translate_to_flow(switch_flow, xml_element.nodeName, kw) - - actual = switch_flow[name] - expected = xml_element.childNodes[0].nodeValue - - data = xml_element.toxml(), name, actual - # print 'fallback_comparator: data', data - - assert expected == actual, 'xml part: %s && switch %s=%s' % data - - -def default_comparator(xml_element, switch_flow): - fallback_comparator(xml_element, switch_flow, keywords) - - -def integer_comparator(expected, actual, kw, base): - expected_value = int(expected.childNodes[0].data) - - name = kw.get(expected.nodeName) - actual_value = int(actual[name], base) - - data = expected.toxml(), name, actual - assert expected_value == actual_value, \ - 'xml value: %s && actual value %s=%s' % data - - -def cookie_comparator(cookie, switch_flow): - integer_comparator(cookie, switch_flow, keywords, 16) - - -def ethernet_address_comparator(child, actual_match, kw): - expected_address = child.getElementsByTagName("address")[0].childNodes[0].data - actual_address = actual_match[kw.get(child.nodeName)] - - data = child.toxml(), kw.get(child.nodeName), actual_address - - assert lower(expected_address) == lower(actual_address), \ - 'xml address: %s && actual address %s=%s' % data - - -def vlan_match_comparator(expected_match, actual_match, kw): - - def compare_vlan_pcp(expected_match, actual_match, kw): - integer_comparator(expected_match, actual_match, kw, 10) - - def compare_vlan_id(expected_match, actual_match, kw): - integer_comparator(expected_match.getElementsByTagName('vlan-id')[0], - actual_match, kw, 10) - - VLAN_COMPARATORS = { - 'vlan-pcp': compare_vlan_pcp, - 'vlan-id': compare_vlan_id, - } - - # print 'ethernet_match_comparator-expected_match:', expected_match.toxml() - # print 'ethernet_match_comparator-actual_match:', actual_match - - compare_elements(expected_match, actual_match, kw, - VLAN_COMPARATORS, fallback_comparator) - - -def ethernet_match_comparator(expected_match, actual_match, kw): - def compare_etype(child, actual_match, kw): - expected_etype = int(child.getElementsByTagName("type")[0].childNodes[0].data) - name = kw.get(child.nodeName) - data = child.toxml(), name, actual_match - - if expected_etype == 2048: # IP - assert ((actual_match.get('ip', 'IP Not-present') is None) or - (actual_match.get('tcp', 'TCP Not-present') is None) or - (actual_match.get('sctp', 'SCTP Not-present') is None) or - (actual_match.get('udp', 'UDP Not-present') is None)), 'Expected etype %s && actual etype %s=%s' % data # noqa - - elif expected_etype == 2054: # ARP - assert actual_match.get('arp', 'ARP Not-present') is None, \ - 'Expected etype %s && actual etype %s=%s' % data - - else: - actual_etype = int(actual_match[name], 16) - - assert expected_etype == actual_etype, 'xml etype: %s && actual etype %s=%s' % data - - ETH_COMPARATORS = { - 'ethernet-type': compare_etype, - 'ethernet-source': ethernet_address_comparator, - 'ethernet-destination': ethernet_address_comparator, - } - - # print 'ethernet_match_comparator-expected_match:', expected_match.toxml() - # print 'ethernet_match_comparator-actual_match:', actual_match - - compare_elements(expected_match, actual_match, kw, - ETH_COMPARATORS, fallback_comparator) - - -def ipv4_comparator(expected_match, actual_match, kw): - # print 'ip_v4_comparator:', expected_match.toxml(), actual_match - # print 'ip_v4_comparator-actual_match:', actual_match - - expected_value = expected_match.childNodes[0].data - actual_value = actual_match[kw.get(expected_match.nodeName)] - - data = expected_match.toxml(), kw.get(expected_match.nodeName), actual_value - - assert IPNetwork(expected_value) == IPNetwork(actual_value),\ - 'xml part: %s && address %s=%s' % data - - -def ip_match_comparator(expected_match, actual_match, kw): - def compare_proto(child, actual_match, kw): - print 'compare_proto:', child.toxml(), actual_match - expected_proto = int(child.childNodes[0].data) - - name = child.nodeName - data = expected_match.toxml(), name, actual_match - - if expected_proto == 6: # TCP - assert actual_match.get('tcp', 'TCP Not-present') is None, 'ip protocol type: expected %s, actual %s=%s' % data # noqa - - elif expected_proto == 17: # UDP - assert actual_match.get('udp', 'UDP Not-present') is None, 'ip protocol type: expected %s, actual %s=%s' % data # noqa - - elif expected_proto == 132: # SCTP - assert actual_match.get('sctp', 'SCTP Not-present') is None, 'ip protocol type: expected %s, actual %s=%s' % data # noqa - - else: - fallback_comparator(child, actual_match, kw) - - def compare_dscp(child, actual_match, kw): - # print 'compare_dscp:', child.toxml(), actual_match - - expected_dscp = int(child.childNodes[0].data) - name = kw.get(child.nodeName) - actual_dscp = int(actual_match[name]) - - data = child.toxml(), name, actual_match - - assert (expected_dscp * 4) == actual_dscp, 'dscp: expected %s, actual %s=%s' % data - - IP_MATCH_COMPARATORS = { - 'ip-protocol': compare_proto, - 'ip-dscp': compare_dscp, - } - - # print 'ip_match_comparator:', expected_match.toxml(), actual_match - compare_elements(expected_match, actual_match, kw, - IP_MATCH_COMPARATORS, fallback_comparator) - - -def match_comparator(expected_match, switch_flow): - MATCH_COMPARATORS = { - 'arp-source-hardware-address': ethernet_address_comparator, - 'arp-target-hardware-address': ethernet_address_comparator, - 'vlan-match': vlan_match_comparator, - 'ethernet-match': ethernet_match_comparator, - 'ip-match': ip_match_comparator, - 'ipv4-destination': ipv4_comparator, - 'ipv4-source': ipv4_comparator, - } - - actual_match = switch_flow['matches'] - - # print 'match_comparator-expected_match:', expected_match.toxml() - # print 'match_comparator-actual_match:', actual_match - # print 'match_comparator: keywords', keywords - - compare_elements(expected_match, actual_match, match_keywords, - MATCH_COMPARATORS, fallback_comparator) - - -def actions_comparator(actions, switch_flow): - # print 'actions_comparator:', actions, switch_flow - - actual_actions = switch_flow['actions'].split(",") - # print 'actions_comparator:', actual_actions - - for action in actions.childNodes: - if action.nodeType is actions.TEXT_NODE: - continue - - action_name = action.childNodes[3].nodeName - expected_action = action_keywords.get(action_name) - - data = action.toxml(), expected_action - # print 'actions_comparator:', data - - assert expected_action in actual_actions, 'xml part:\n%s\n expected action: %s' % data - - -def null_comparator(element, switch_flow): - pass - - -def instructions_comparator(instructions_element, switch_flow): - INSTRUCTION_COMPARATORS = { - 'apply-actions': actions_comparator, - 'default': null_comparator, - } - # print 'instructions_comparator:', instructions_element, switch_flow - - instructions = instructions_element.childNodes # noqa - - for instruction in instructions_element.childNodes: - if instruction.nodeType is instructions_element.TEXT_NODE: - continue - - for itype in instruction.childNodes: - if itype.nodeType is itype.TEXT_NODE: - continue - - comparator = INSTRUCTION_COMPARATORS.get(itype.nodeName, - INSTRUCTION_COMPARATORS['default']) - comparator(itype, switch_flow) - - -COMPARATORS = { - 'cookie': cookie_comparator, - 'instructions': instructions_comparator, - 'match': match_comparator, - 'default': default_comparator, -} - - -def all_nodes(xml_root): - """ - Generates every non-text nodes. - """ - current_nodes = [xml_root] - next_nodes = [] - - while len(current_nodes) > 0: - for node in current_nodes: - if node.nodeType != xml_root.TEXT_NODE: - yield node - next_nodes.extend(node.childNodes) - - current_nodes, next_nodes = next_nodes, [] - - -def check_elements(xmlstr, keywords): - # namespace = 'urn:opendaylight:flow:inventory' - tree = md.parseString(xmlstr) - - for element in all_nodes(tree.documentElement): - # switch flow object contains only some data from xml - if element.nodeName not in keywords: - # print 'check_elements: element.nodeName', element.nodeName, 'NOT in keywords' - continue - - yield element - - raise StopIteration() - - -class TestOpenFlowXMLs(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.net = create_network(cls.host, cls.mn_port) - cls.net.start() - time.sleep(15) - - @classmethod - def tearDownClass(cls): - cls.net.stop() - - -def get_values(node, *tags): - result = dict((tag, None) for tag in tags) - for node in all_nodes(node): - if node.nodeName in result and len(node.childNodes) > 0: - result[node.nodeName] = node.childNodes[0].nodeValue - return result - - -def generate_tests_from_xmls(path, xmls=None): - # generate test function from path to request xml - def generate_test(path_to_xml): - xml_string = '' - with open(path_to_xml) as f: - xml_string = f.read() - - tree = md.parseString(xml_string) - ids = get_values(tree.documentElement, 'table_id', 'id') - - def new_test(self): - log = logging.getLogger(__name__) - # send request throught RESTCONF - data = (self.host, self.port, ids['table_id'], ids['id']) - url = 'http://%s:%d/restconf/config/opendaylight-inventory:nodes' \ - '/node/openflow:1/table/%s/flow/%s' % data - headers = { - 'Content-Type': 'application/xml', - 'Accept': 'application/xml', - } - log.info('sending request to url: {}'.format(url)) - rsp = requests.put(url, auth=('admin', 'admin'), data=xml_string, - headers=headers) - log.info('received status code: {}'.format(rsp.status_code)) - log.debug('received content: {}'.format(rsp.text)) - assert rsp.status_code == 204 or rsp.status_code == 200, 'Status' \ - ' code returned %d' % rsp.status_code - - # check request content against restconf's datastore - response = requests.get(url, auth=('admin', 'admin'), - headers={'Accept': 'application/xml'}) - assert response.status_code == 200 - req = ET.tostring(ET.fromstring(xml_string)) - res = ET.tostring(ET.fromstring(response.text)) - assert req == res, 'uploaded and stored xml, are not the same\n' \ - 'uploaded: %s\nstored:%s' % (req, res) - - # collect flow table state on switch - switch_flows = get_flows(self.net) - assert len(switch_flows) > 0 - - # compare requested object and flow table state - for important_element in check_elements(xml_string, keywords): - # log.info('important element: {}'.format(important_element.nodeName)) - comparator = COMPARATORS.get(important_element.nodeName, - COMPARATORS['default']) - - comparator(important_element, switch_flows[0]) - - return new_test - - # generate list of available xml requests - xmlfiles = None - if xmls is not None: - xmlfiles = ('f%d.xml' % fid for fid in xmls) - else: - xmlfiles = (xml for xml in os.listdir(path) if xml.endswith('.xml')) - - # define key getter for sorting - def get_test_number(test_name): - return int(test_name[1:-4]) - - for xmlfile in xmlfiles: - test_name = 'test_xml_%04d' % get_test_number(xmlfile) - setattr(TestOpenFlowXMLs, - test_name, - generate_test(os.path.join(path, xmlfile))) - - -if __name__ == '__main__': - # set up logging - logging.basicConfig(level=logging.DEBUG) - - # parse cmdline arguments - parser = argparse.ArgumentParser(description='Run switch <-> ODL tests ' - 'defined by xmls.') - parser.add_argument('--odlhost', default='127.0.0.1', help='host where ' - 'odl controller is running') - parser.add_argument('--odlport', type=int, default=8080, help='port on ' - 'which odl\'s RESTCONF is listening') - parser.add_argument('--mnport', type=int, default=6653, help='port on ' - 'which odl\'s controller is listening') - parser.add_argument('--xmls', default=None, help='generete tests only ' - 'from some xmls (i.e. 1,3,34) ') - args = parser.parse_args() - - # set host and port of ODL controller for test cases - TestOpenFlowXMLs.port = args.odlport - TestOpenFlowXMLs.host = args.odlhost - TestOpenFlowXMLs.mn_port = args.mnport - - keywords = None - with open('keywords.csv') as f: - keywords = dict(line.strip().split(';') for line in f - if not line.startswith('#')) - - match_keywords = None - with open('match-keywords.csv') as f: - match_keywords = dict(line.strip().split(';') for line in f - if not line.startswith('#')) - - action_keywords = None - with open('action-keywords.csv') as f: - action_keywords = dict(line.strip().split(';') for line in f - if not line.startswith('#')) - - # fix arguments for unittest - del sys.argv[1:] - - # generate tests for TestOpenFlowXMLs - if args.xmls is not None: - xmls = map(int, args.xmls.split(',')) - generate_tests_from_xmls('xmls', xmls) - else: - generate_tests_from_xmls('xmls') - - # run all tests - unittest.main() diff --git a/tools/OF_Test/odl_tests.py.backup b/tools/OF_Test/odl_tests.py.backup deleted file mode 100755 index 73ad84a8bf..0000000000 --- a/tools/OF_Test/odl_tests.py.backup +++ /dev/null @@ -1,518 +0,0 @@ -import os -import sys -import time -import logging -import argparse -import unittest -import requests -import xml.dom.minidom as md -from xml.etree import ElementTree as ET -from netaddr import IPNetwork -from string import lower - -import mininet.node -import mininet.topo -import mininet.net -import mininet.util - -from mininet.node import RemoteController -from mininet.node import OVSKernelSwitch - -def create_network(controller_ip, controller_port): - """Create topology and mininet network.""" - topo = mininet.topo.Topo() - - topo.addSwitch('s1') - topo.addHost('h1') - topo.addHost('h2') - - topo.addLink('h1', 's1') - topo.addLink('h2', 's1') - - switch=mininet.util.customConstructor( - {'ovsk':OVSKernelSwitch}, 'ovsk,protocols=OpenFlow13') - - controller=mininet.util.customConstructor( - {'remote': RemoteController}, 'remote,ip=%s:%s' % (controller_ip, - controller_port)) - - - net = mininet.net.Mininet(topo=topo, switch=switch, controller=controller) - - return net - - -def get_flows(net): - """Get list of flows from network's first switch. - - Return list of all flows on switch, sorted by duration (newest first) - One flow is a dictionary with all flow's attribute:value pairs. Matches - are stored under 'matches' key as another dictionary. - Example: - - { - 'actions': 'drop', - 'cookie': '0xa,', - 'duration': '3.434s,', - 'hard_timeout': '12,', - 'idle_timeout': '34,', - 'matches': { - 'ip': None, - 'nw_dst': '10.0.0.0/24' - }, - 'n_bytes': '0,', - 'n_packets': '0,', - 'priority': '2', - 'table': '1,' - } - - """ - log = logging.getLogger(__name__) - def parse_matches(flow, matches): - flow['matches'] = {} - - for match in matches: - split_match = match.split('=', 1) - if len(split_match) == 1: - flow['matches'][split_match[0]] = None - else: - flow['matches'][split_match[0]] = split_match[1].rstrip(',') - - switch = net.switches[0] - output = switch.cmdPrint( - 'ovs-ofctl -O OpenFlow13 dump-flows %s' % switch.name) -# output = switch.cmdPrint( -# 'ovs-ofctl -F openflow10 dump-flows %s' % switch.name) - - log.debug('switch flow table: {}'.format(output)) - - flows = [] - - for line in output.splitlines()[1:]: - flow = {} - for word in line.split(): - word.rstrip(',') - try: - key, value = word.split('=', 1) - except ValueError: - #TODO: need to figure out what to do here? - continue - - if key == 'priority': - values = value.split(',') - flow[key] = values[0] - parse_matches(flow, values[1:]) - else: - flow[key] = value.rstrip(',') - - flows.append(flow) - - # sort by duration - return sorted(flows, key=lambda x: x['duration'].rstrip('s')) - - -def translate_to_flow(flow, name, dictionary): - switch_flow_name = dictionary[name] - - key_err = '{} needs to be present in flow definition. Flow definition ' \ - 'was: {}.'.format(switch_flow_name, flow) - assert switch_flow_name in flow, key_err - return switch_flow_name - - -def get_text_value(element): - return element.childNodes[0].nodeValue - - -def fallback_comparator(xml_element, switch_flow, kw): - # print 'fallback_comparator-xml_element', xml_element.toxml() - # print 'fallback_comparator: switch_flow', switch_flow - # print 'fallback_comparator: kw', kws - - name = translate_to_flow(switch_flow, xml_element.nodeName, kw) - - actual = switch_flow[name] - expected = xml_element.childNodes[0].nodeValue - - data = xml_element.toxml(), name, actual - # print 'fallback_comparator: data', data - - assert expected == actual, 'xml part: %s && switch %s=%s' % data - - -def default_comparator(xml_element, switch_flow): - fallback_comparator(xml_element, switch_flow, keywords) - - -def cookie_comparator(cookie, switch_flow): - name = translate_to_flow(switch_flow, cookie.nodeName, keywords) - - actual = int(switch_flow[name], 0) - expected = int(cookie.childNodes[0].nodeValue) - data = cookie.toxml(), name, actual - - assert expected == actual, 'xml part: %s && switch %s=%s' % data - - -def ethernet_address_comparator(child, actual_match, kw): - expected_address = child.getElementsByTagName("address")[0].childNodes[0].data - actual_address = actual_match[kw.get(child.nodeName)] - - data = child.toxml(), kw.get(child.nodeName), actual_address - - assert lower(expected_address) == lower(actual_address), \ - 'xml address: %s && actual address %s=%s' % data - - -def ethernet_match_comparator(expected_match, actual_match, kw): - def compare_etype(child, actual_match, kw): - expected_etype = int(child.getElementsByTagName("type")[0].childNodes[0].data) - name = kw.get(child.nodeName) - data = child.toxml(), name, actual_match - - if expected_etype == 2048: # IP - assert ((actual_match.get('ip', 'IP Not-present') is None) or \ - (actual_match.get('tcp', 'TCP Not-present') is None) or \ - (actual_match.get('sctp', 'SCTP Not-present') is None) or \ - (actual_match.get('udp', 'UDP Not-present') is None)), \ - 'Expected etype %s && actual etype %s=%s' % data - - elif expected_etype == 2054: #ARP - assert actual_match.get('arp', 'ARP Not-present') is None, \ - 'Expected etype %s && actual etype %s=%s' % data - - else: - actual_etype = int(actual_match[name], 16) - - assert expected_etype == actual_etype, 'xml etype: %s && actual etype %s=%s' % data - - - ETH_COMPARATORS = { - 'ethernet-type': compare_etype, - 'ethernet-source': ethernet_address_comparator, - 'ethernet-destination': ethernet_address_comparator, - } - - # print 'ethernet_match_comparator-expected_match:', expected_match.toxml() - # print 'ethernet_match_comparator-actual_match:', actual_match - # print 'ethernet_match_comparator-keywords:', keywords - - for child in expected_match.childNodes: - if child.nodeType is expected_match.TEXT_NODE: - continue - - comparator = ETH_COMPARATORS.get(child.nodeName) - comparator(child, actual_match, kw) - - -def ip_v4_comparator(expected_match, actual_match, kw): - # print 'ip_v4_comparator:', expected_match.toxml(), actual_match - # print 'ip_v4_comparator-actual_match:', actual_match - - expected_value = expected_match.childNodes[0].data - actual_value = actual_match[kw.get(expected_match.nodeName)] - - data = expected_match.toxml(), kw.get(expected_match.nodeName), actual_value - - assert IPNetwork(expected_value) == IPNetwork(actual_value), 'xml part: %s && address %s=%s' % data - - -def ip_match_comparator(expected_match, actual_match, kw): - def compare_proto(child, actual_match, kw): - print 'compare_proto:', child.toxml(), actual_match - expected_proto = int(child.childNodes[0].data) - - name = child.nodeName - data = expected_match.toxml(), name, actual_match - - if expected_proto == 6: # TCP - assert actual_match.get('tcp', 'TCP Not-present') is None, \ - 'ip protocol type: expected %s, actual %s=%s' % data - - elif expected_proto == 17: #UDP - assert actual_match.get('udp', 'UDP Not-present') is None, \ - 'ip protocol type: expected %s, actual %s=%s' % data - - elif expected_proto == 132: #SCTP - assert actual_match.get('sctp', 'SCTP Not-present') is None, \ - 'ip protocol type: expected %s, actual %s=%s' % data - - else: - fallback_comparator(child, actual_match, kw) - - - def compare_dscp(child, actual_match, kw): - # print 'compare_dscp:', child.toxml(), actual_match - - expected_dscp = int(child.childNodes[0].data) - name = kw.get(child.nodeName) - actual_dscp = int(actual_match[name]) - - data = child.toxml(), name, actual_match - - assert (expected_dscp * 4) == actual_dscp, 'dscp: expected %s, actual %s=%s' % data - - - IP_MATCH_COMPARATORS = { - 'ip-protocol': compare_proto, - 'ip-dscp': compare_dscp, - 'ip-ecn': fallback_comparator, - } - - # print 'ip_match_comparator:', expected_match.toxml(), actual_match - - for child in expected_match.childNodes: - if child.nodeType is expected_match.TEXT_NODE: - continue - - comparator = IP_MATCH_COMPARATORS.get(child.nodeName) - comparator(child, actual_match, kw) - - -def match_comparator(expected_match, switch_flow): - MATCH_COMPARATORS = { - 'arp-source-hardware-address': ethernet_address_comparator, - 'arp-target-hardware-address': ethernet_address_comparator, - 'ethernet-match': ethernet_match_comparator, - 'ip-match': ip_match_comparator, - 'ipv4-destination': ip_v4_comparator, - 'ipv4-source': ip_v4_comparator, - 'default': fallback_comparator, - } - - actual_match = switch_flow['matches'] - - # print 'match_comparator-expected_match:', expected_match.toxml() - # print 'match_comparator-actual_match:', actual_match - # print 'match_comparator: keywords', keywords - - for child in expected_match.childNodes: - if child.nodeType is expected_match.TEXT_NODE: - continue - - comparator = MATCH_COMPARATORS.get(child.nodeName, - MATCH_COMPARATORS['default']) - comparator(child, actual_match, match_keywords) - - -def actions_comparator(actions, switch_flow): - # print 'actions_comparator:', actions, switch_flow - - actual_actions = switch_flow['actions'].split(",") - # print 'actions_comparator:', actual_actions - - for action in actions.childNodes: - if action.nodeType is actions.TEXT_NODE: - continue - - action_name = action.childNodes[3].nodeName - expected_action = action_keywords.get(action_name) - - data = action.toxml(), expected_action - # print 'actions_comparator:', data - - assert expected_action in actual_actions, 'xml part:\n%s\n expected action: %s' % data - - -def null_comparator(element, switch_flow): - pass - - -def instructions_comparator(instructions_element, switch_flow): - INSTRUCTION_COMPARATORS = { - 'apply-actions': actions_comparator, - 'default': null_comparator, - } - # print 'instructions_comparator:', instructions_element, switch_flow - - instructions = instructions_element.childNodes - - for instruction in instructions_element.childNodes: - if instruction.nodeType is instructions_element.TEXT_NODE: - continue - - for itype in instruction.childNodes: - if itype.nodeType is itype.TEXT_NODE: - continue - - comparator = INSTRUCTION_COMPARATORS.get(itype.nodeName, - INSTRUCTION_COMPARATORS['default']) - comparator(itype, switch_flow) - - -COMPARATORS = { - 'cookie': cookie_comparator, - 'instructions': instructions_comparator, - 'match': match_comparator, - 'default': default_comparator, -} - -def all_nodes(xml_root): - """ - Generates every non-text nodes. - """ - current_nodes = [xml_root] - next_nodes = [] - - while len(current_nodes) > 0: - for node in current_nodes: - if node.nodeType != xml_root.TEXT_NODE: - yield node - next_nodes.extend(node.childNodes) - - current_nodes, next_nodes = next_nodes, [] - - -def check_elements(xmlstr, keywords): - # namespace = 'urn:opendaylight:flow:inventory' - tree = md.parseString(xmlstr) - - for element in all_nodes(tree.documentElement): - # switch flow object contains only some data from xml - if element.nodeName not in keywords: - # print 'check_elements: element.nodeName', element.nodeName, 'NOT in keywords' - continue - - yield element - - raise StopIteration() - - -class TestOpenFlowXMLs(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.net = create_network(cls.host, cls.mn_port) - cls.net.start() - time.sleep(15) - - @classmethod - def tearDownClass(cls): - cls.net.stop() - - -def get_values(node, *tags): - result = {tag: None for tag in tags} - for node in all_nodes(node): - if node.nodeName in result and len(node.childNodes) > 0: - result[node.nodeName] = node.childNodes[0].nodeValue - return result - - -def generate_tests_from_xmls(path, xmls=None): - # generate test function from path to request xml - def generate_test(path_to_xml): - xml_string = '' - with open(path_to_xml) as f: - xml_string = f.read() - - tree = md.parseString(xml_string) - ids = get_values(tree.documentElement, 'table_id', 'id') - - def new_test(self): - log = logging.getLogger(__name__) - # send request throught RESTCONF - data = (self.host, self.port, ids['table_id'], ids['id']) - url = 'http://%s:%d/restconf/config/opendaylight-inventory:nodes' \ - '/node/openflow:1/table/%s/flow/%s' % data - headers = { - 'Content-Type': 'application/xml', - 'Accept': 'application/xml', - } - log.info('sending request to url: {}'.format(url)) - rsp = requests.put(url, auth=('admin', 'admin'), data=xml_string, - headers=headers) - log.info('received status code: {}'.format(rsp.status_code)) - log.debug('received content: {}'.format(rsp.text)) - assert rsp.status_code == 204 or rsp.status_code == 200, 'Status' \ - ' code returned %d' % rsp.status_code - - # check request content against restconf's datastore - response = requests.get(url, auth=('admin', 'admin'), - headers={'Accept': 'application/xml'}) - assert response.status_code == 200 - req = ET.tostring(ET.fromstring(xml_string)) - res = ET.tostring(ET.fromstring(response.text)) - assert req == res, 'uploaded and stored xml, are not the same\n' \ - 'uploaded: %s\nstored:%s' % (req, res) - - # collect flow table state on switch - switch_flows = get_flows(self.net) - assert len(switch_flows) > 0 - - # compare requested object and flow table state - for important_element in check_elements(xml_string, keywords): - # log.info('important element: {}'.format(important_element.nodeName)) - comparator = COMPARATORS.get(important_element.nodeName, - COMPARATORS['default']) - - comparator(important_element, switch_flows[0]) - - return new_test - - # generate list of available xml requests - xmlfiles = None - if xmls is not None: - xmlfiles = ('f%d.xml' % fid for fid in xmls) - else: - xmlfiles = (xml for xml in os.listdir(path) if xml.endswith('.xml')) - - # define key getter for sorting - def get_test_number(test_name): - return int(test_name[1:-4]) - - for xmlfile in xmlfiles: - test_name = 'test_xml_%04d' % get_test_number(xmlfile) - setattr(TestOpenFlowXMLs, - test_name, - generate_test(os.path.join(path, xmlfile))) - - -if __name__ == '__main__': - # set up logging - logging.basicConfig(level=logging.DEBUG) - - # parse cmdline arguments - parser = argparse.ArgumentParser(description='Run switch <-> ODL tests ' - 'defined by xmls.') - parser.add_argument('--odlhost', default='127.0.0.1', help='host where ' - 'odl controller is running') - parser.add_argument('--odlport', type=int, default=8080, help='port on ' - 'which odl\'s RESTCONF is listening') - parser.add_argument('--mnport', type=int, default=6653, help='port on ' - 'which odl\'s controller is listening') - parser.add_argument('--xmls', default=None, help='generete tests only ' - 'from some xmls (i.e. 1,3,34) ') - args = parser.parse_args() - - # set host and port of ODL controller for test cases - TestOpenFlowXMLs.port = args.odlport - TestOpenFlowXMLs.host = args.odlhost - TestOpenFlowXMLs.mn_port = args.mnport - - keywords = None - with open('keywords.csv') as f: - keywords = dict(line.strip().split(';') for line in f - if not line.startswith('#')) - - match_keywords = None - with open('match-keywords.csv') as f: - match_keywords = dict(line.strip().split(';') for line in f - if not line.startswith('#')) - - action_keywords = None - with open('action-keywords.csv') as f: - action_keywords = dict(line.strip().split(';') for line in f - if not line.startswith('#')) - - # fix arguments for unittest - del sys.argv[1:] - - # generate tests for TestOpenFlowXMLs - if args.xmls is not None: - xmls = map(int, args.xmls.split(',')) - generate_tests_from_xmls('xmls', xmls) - else: - generate_tests_from_xmls('xmls') - - # run all tests - unittest.main() diff --git a/tools/OF_Test/ofctl/t1 b/tools/OF_Test/ofctl/t1 deleted file mode 100644 index 23f204b49c..0000000000 --- a/tools/OF_Test/ofctl/t1 +++ /dev/null @@ -1 +0,0 @@ -cookie=0xa, duration=1.451s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=dec_ttl diff --git a/tools/OF_Test/ofctl/t11 b/tools/OF_Test/ofctl/t11 deleted file mode 100644 index 1ceaf006e6..0000000000 --- a/tools/OF_Test/ofctl/t11 +++ /dev/null @@ -1 +0,0 @@ -cookie=0xa, duration=1.754s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=push_vlan diff --git a/tools/OF_Test/ofctl/t19 b/tools/OF_Test/ofctl/t19 deleted file mode 100644 index 43044613ce..0000000000 --- a/tools/OF_Test/ofctl/t19 +++ /dev/null @@ -1 +0,0 @@ -cookie=0xa, duration=1.753s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=dec_ttl diff --git a/tools/OF_Test/ofctl/t2 b/tools/OF_Test/ofctl/t2 deleted file mode 100644 index 6f39f37d53..0000000000 --- a/tools/OF_Test/ofctl/t2 +++ /dev/null @@ -1 +0,0 @@ -cookie=0xa, duration=1.557s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_src=10.0.0.1 actions=drop diff --git a/tools/OF_Test/ofctl/t20 b/tools/OF_Test/ofctl/t20 deleted file mode 100644 index 33d4c2e286..0000000000 --- a/tools/OF_Test/ofctl/t20 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=17.581s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop diff --git a/tools/OF_Test/ofctl/t21 b/tools/OF_Test/ofctl/t21 deleted file mode 100644 index 0c97c64306..0000000000 --- a/tools/OF_Test/ofctl/t21 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=14.319s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop diff --git a/tools/OF_Test/ofctl/t22 b/tools/OF_Test/ofctl/t22 deleted file mode 100644 index 3a4b0d16b3..0000000000 --- a/tools/OF_Test/ofctl/t22 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=12.954s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop diff --git a/tools/OF_Test/ofctl/t25 b/tools/OF_Test/ofctl/t25 deleted file mode 100644 index a7a3eadb3b..0000000000 --- a/tools/OF_Test/ofctl/t25 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=16.708s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop diff --git a/tools/OF_Test/ofctl/t27 b/tools/OF_Test/ofctl/t27 deleted file mode 100644 index 1008ca52dd..0000000000 --- a/tools/OF_Test/ofctl/t27 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=38.123s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=write_metadata:0xa/0xc diff --git a/tools/OF_Test/ofctl/t29 b/tools/OF_Test/ofctl/t29 deleted file mode 100644 index 2ab911c7a9..0000000000 --- a/tools/OF_Test/ofctl/t29 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=20.251s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=strip_vlan diff --git a/tools/OF_Test/ofctl/t30 b/tools/OF_Test/ofctl/t30 deleted file mode 100644 index 44fe1ec18b..0000000000 --- a/tools/OF_Test/ofctl/t30 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=17.729s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop diff --git a/tools/OF_Test/ofctl/t31 b/tools/OF_Test/ofctl/t31 deleted file mode 100644 index 3e9dbca2f8..0000000000 --- a/tools/OF_Test/ofctl/t31 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=14.874s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop diff --git a/tools/OF_Test/ofctl/t33 b/tools/OF_Test/ofctl/t33 deleted file mode 100644 index 1b74b0c9c8..0000000000 --- a/tools/OF_Test/ofctl/t33 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=23.141s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop diff --git a/tools/OF_Test/ofctl/t36 b/tools/OF_Test/ofctl/t36 deleted file mode 100644 index 03b530c925..0000000000 --- a/tools/OF_Test/ofctl/t36 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=35.125s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop diff --git a/tools/OF_Test/ofctl/t38 b/tools/OF_Test/ofctl/t38 deleted file mode 100644 index 0109e4244b..0000000000 --- a/tools/OF_Test/ofctl/t38 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=69.848s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=set_queue:1 diff --git a/tools/OF_Test/ofctl/t4 b/tools/OF_Test/ofctl/t4 deleted file mode 100644 index c11ad89725..0000000000 --- a/tools/OF_Test/ofctl/t4 +++ /dev/null @@ -1 +0,0 @@ -cookie=0xa, duration=3.098s, table=2, n_packets=0, n_bytes=0, priority=2,ip,dl_src=00 diff --git a/tools/OF_Test/ofctl/t41 b/tools/OF_Test/ofctl/t41 deleted file mode 100644 index ede432ea4f..0000000000 --- a/tools/OF_Test/ofctl/t41 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=19.763s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop diff --git a/tools/OF_Test/ofctl/t43 b/tools/OF_Test/ofctl/t43 deleted file mode 100644 index 313dc26986..0000000000 --- a/tools/OF_Test/ofctl/t43 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=93.115s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop diff --git a/tools/OF_Test/ofctl/t45 b/tools/OF_Test/ofctl/t45 deleted file mode 100644 index 6e7ba483bb..0000000000 --- a/tools/OF_Test/ofctl/t45 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=19.913s, table=2, n_packets=0, n_bytes=0, priority=2,metadata=0x100/0xffffff0000000101 actions=drop diff --git a/tools/OF_Test/ofctl/t48 b/tools/OF_Test/ofctl/t48 deleted file mode 100644 index 5fc3381029..0000000000 --- a/tools/OF_Test/ofctl/t48 +++ /dev/null @@ -1 +0,0 @@ - cookie=0xa, duration=45.249s, table=2, n_packets=0, n_bytes=0, priority=2,tun_id=0x100/0xffffff0000000101 actions=goto_table:2 diff --git a/tools/OF_Test/ofctl/t5 b/tools/OF_Test/ofctl/t5 deleted file mode 100644 index f0a6dd6e7b..0000000000 --- a/tools/OF_Test/ofctl/t5 +++ /dev/null @@ -1 +0,0 @@ -cookie=0xa, duration=2.095s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop diff --git a/tools/OF_Test/ofctl/t6 b/tools/OF_Test/ofctl/t6 deleted file mode 100644 index 9b9cb261cf..0000000000 --- a/tools/OF_Test/ofctl/t6 +++ /dev/null @@ -1 +0,0 @@ -cookie=0xa, duration=4.08s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=goto_table diff --git a/tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/010__Connect_256_Switches.txt b/tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/010__Connect_256_Switches.txt deleted file mode 100644 index 95eec94392..0000000000 --- a/tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/010__Connect_256_Switches.txt +++ /dev/null @@ -1,41 +0,0 @@ -*** Settings *** -Documentation Test suite with connection of multiple switches -Library OperatingSystem -Library Collections -Library XML -Library SSHLibrary -Variables ../../../../csit/variables/Variables.py -Library RequestsLibrary -Library ../../../../csit/libraries/Common.py - -*** Test Cases *** -Are 256 Switches Connected - [Setup] Start Mininet 256 - [Teardown] Stop Mininet - [Documentation] Checks wheather switches are connected to controller - ${resp}= RequestsLibrary.Get session /restconf/operational/network-topology:network-topology/topology/flow:1 headers=${ACCEPT_XML} - Log ${resp.content} - ${count}= Get Element Count ${resp.content} xpath=node - Should Be Equal As Numbers ${count} 256 - -*** Keywords *** -Start Mininet [Arguments] ${swnr} - [Documentation] Starts mininet with requested number of switches (${swnr}) - Log Starting mininet with ${swnr} switches - Open Connection ${MININET} prompt=> timeout=600 - Login With Public Key ${MININET_USER} ${USER_HOME}/.ssh/id_rsa any - Write sudo ovs-vsctl set-manager ptcp:6644 - Write sudo mn -c - Read Until > - Write sudo mn --controller=remote,ip=${CONTROLLER} --topo linear,${swnr} --switch ovsk,protocols=OpenFlow13 - Read Until mininet> - Sleep 3s - Create Session session http://${CONTROLLER}:${PORT} auth=${AUTH} headers=${HEADERS_XML} -Stop Mininet - [Documentation] Stops mininet - Log Stopping mininet - Delete All Sessions - Read - Write exit - Read Until > - Close Connection diff --git a/tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/030_Config_100k_flows.txt b/tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/030_Config_100k_flows.txt deleted file mode 100644 index c5931e16f3..0000000000 --- a/tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/030_Config_100k_flows.txt +++ /dev/null @@ -1,50 +0,0 @@ -*** Settings *** -Documentation Test suite for Stats Manager flows collection -Library libconfig.py -Suite Teardown Delete Flows - - -*** Variables *** -${expdur}= 660 - -*** Test Cases *** -Configure 100k Flows - ${task1}= Configure Flows ${CONTROLLER} ${RESTCONFPORT} 1 1 1 10000 - ${task2}= Configure Flows ${CONTROLLER} ${RESTCONFPORT} 1 1 10001 20000 - ${task3}= Configure Flows ${CONTROLLER} ${RESTCONFPORT} 1 1 20001 30000 - ${task4}= Configure Flows ${CONTROLLER} ${RESTCONFPORT} 1 1 30001 40000 - ${task5}= Configure Flows ${CONTROLLER} ${RESTCONFPORT} 1 1 40001 50000 - ${task6}= Configure Flows ${CONTROLLER} ${RESTCONFPORT} 1 2 1 10000 - ${task7}= Configure Flows ${CONTROLLER} ${RESTCONFPORT} 1 2 10001 20000 - ${task8}= Configure Flows ${CONTROLLER} ${RESTCONFPORT} 1 2 20001 30000 - ${task9}= Configure Flows ${CONTROLLER} ${RESTCONFPORT} 1 2 30001 40000 - ${task10}= Configure Flows ${CONTROLLER} ${RESTCONFPORT} 1 2 40001 50000 - ${dur}= Wait Until ${task1} ${task2} ${task3} ${task4} ${task5} ${task6} ${task7} ${task8} ${task9} ${task10} timeout=${expdur} - ${added1}= Call Method ${task1} result - ${added2}= Call Method ${task2} result - ${added3}= Call Method ${task3} result - ${added4}= Call Method ${task4} result - ${added5}= Call Method ${task5} result - ${added6}= Call Method ${task6} result - ${added7}= Call Method ${task7} result - ${added8}= Call Method ${task8} result - ${added9}= Call Method ${task9} result - ${added10}= Call Method ${task10} result - ${count}= Evaluate ${added1}+${added2}+${added3}+${added4}+${added5}+${added6}+${added7}+${added8}+${added9}+${added10} - Should Be Equal As Strings ${count} 100000 ${count} flows added in ${dur} seconds - - -*** Keywords **** -Delete Flows - ${task1}= Deconfigure Flows ${CONTROLLER} ${RESTCONFPORT} 1 1 1 10000 - ${task2}= Deconfigure Flows ${CONTROLLER} ${RESTCONFPORT} 1 1 10001 20000 - ${task3}= Deconfigure Flows ${CONTROLLER} ${RESTCONFPORT} 1 1 20001 30000 - ${task4}= Deconfigure Flows ${CONTROLLER} ${RESTCONFPORT} 1 1 30001 40000 - ${task5}= Deconfigure Flows ${CONTROLLER} ${RESTCONFPORT} 1 1 40001 50000 - ${task6}= Deconfigure Flows ${CONTROLLER} ${RESTCONFPORT} 1 2 1 10000 - ${task7}= Deconfigure Flows ${CONTROLLER} ${RESTCONFPORT} 1 2 10001 20000 - ${task8}= Deconfigure Flows ${CONTROLLER} ${RESTCONFPORT} 1 2 20001 30000 - ${task9}= Deconfigure Flows ${CONTROLLER} ${RESTCONFPORT} 1 2 30001 40000 - ${task10}= Deconfigure Flows ${CONTROLLER} ${RESTCONFPORT} 1 2 40001 50000 - Wait Until ${task1} ${task2} ${task3} ${task4} ${task5} ${task6} ${task7} ${task8} ${task9} ${task10} timeout=${expdur} - diff --git a/tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/libconfig.py b/tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/libconfig.py deleted file mode 100644 index 591a835ebc..0000000000 --- a/tools/OF_Test/robot_suites/998__Independent_OF_Tests_ovs/libconfig.py +++ /dev/null @@ -1,157 +0,0 @@ -import requests -import time -from threading import Thread -from functools import wraps -# from multiprocessing import Process - -__all__ = ['configure_flows', 'wait_until', 'deconfigure_flows'] - - -# class KeyWord(Process): -class KeyWord(Thread): - def __init__(self, *args, **kwargs): - super(KeyWord, self).__init__(*args, **kwargs) - self._stop = False - self._kw_result = None - - def stop(self): - self._stop = True - - def result(self): - return self._kw_result - - -def async_task(func): - """Taken from http://code.activestate.com/recipes/576684-simple-threading-decorator/ - and modified - """ - @wraps(func) - def async_func(*args, **kwargs): - func_hl = KeyWord(target=func, args=args, kwargs=kwargs) - func_hl._Thread__args = (func_hl,) + func_hl._Thread__args - # func_hl._args = (func_hl,) + func_hl._args - func_hl.start() - return func_hl - - return async_func - - -def wait_until(*tasks, **kwargs): - tstart = time.time() - - timeout = 30 - if 'timeout' in kwargs: - timeout = int(kwargs['timeout']) - - cnt = len(tasks) - while time.time() < (timeout + tstart): - tfinished = 0 - for t in tasks: - if t.is_alive() is False: - tfinished += 1 - continue - t.join(timeout=0.2) - if tfinished == cnt: - return (time.time()-tstart) - - for t in tasks: - if t.is_alive() is True: - t.stop() - # t.terminate() - t.join() - - return (time.time()-tstart) - - -@async_task -def Example_of_robot_keyword(self, a, b, c): - """be carefull, when calling this kw from robot, - do not count on self, it is a thread object itself - injected by decorator. The purpose is to make - possibility to exit from thread on demand by - wait until keywork which makes thread.stop() - if needed. In your fw you should use self._stop - variable. - - - robot sample: - ${thread}= Example Of Robot Keyword a b c - """ - while True: - if self._stop is True: - break - - -@async_task -def configure_flows(self, host, port, switchid, tableid, minid, maxid): - flow_template = ''' - - false - - - 0 - - - 0 - - - - - - {} - {} - 255 - false - - - - 2048 - - - 10.0.1.0/24 - - 1 - FooXf{} - {} - false -''' - - self._kw_result = 0 - - ses = requests.Session() - - for i in range(int(minid), int(maxid) + 1): - if self._stop is True: - break - fid = str(i) - flow = flow_template.format(tableid, fid, fid, fid) - url = 'http://{}:{}/restconf/config/opendaylight-inventory:nodes/node/openflow:{}/table/{}/flow/{}'.format( - host, port, switchid, tableid, fid) - - try: - rsp = ses.put(url, headers={'Content-Type': 'application/xml'}, data=flow, timeout=3) - if rsp.status_code == 200: - self._kw_result += 1 - - except Exception: - pass - - -@async_task -def deconfigure_flows(self, host, port, switchid, tableid, minid, maxid): - """Result will be the number of status code 200 returned""" - self._kw_result = 0 - ses = requests.Session() - - for fid in range(int(minid), int(maxid)): - if self._stop is True: - break - url = 'http://{}:{}/restconf/config/opendaylight-inventory:nodes/node/openflow:{}/table/{}/flow/{}'.format( - host, port, switchid, tableid, fid) - - try: - rsp = ses.delete(url, headers={'Content-Type': 'application/xml'}, timeout=3) - if rsp.status_code == 200: - self._kw_result += 1 - except Exception: - pass diff --git a/tools/OF_Test/test.py b/tools/OF_Test/test.py deleted file mode 100644 index ba7baf14ff..0000000000 --- a/tools/OF_Test/test.py +++ /dev/null @@ -1,14 +0,0 @@ -from xml.dom import minidom - -xmldoc = minidom.parse('./xmls/f1.xml') -flow = xmldoc.childNodes[0] -match = flow.childNodes[15] - -ipv4dst = match.getElementsByTagName("ipv4-destination") -print ipv4dst[0].childNodes[0].data - -cvok = match.getElementsByTagName("cvok") -print cvok - -print "ethernet type", match.getElementsByTagName("type")[0].childNodes[0].data -# print "ethernet cvok", match.getElementsByTagName("cvok")[0].childNodes[0].data diff --git a/tools/OF_Test/test1.py b/tools/OF_Test/test1.py deleted file mode 100644 index 8e3b06831f..0000000000 --- a/tools/OF_Test/test1.py +++ /dev/null @@ -1,13 +0,0 @@ -from xml.dom import minidom - -xmldoc = minidom.parse('./xmls/f7.xml') -flow = xmldoc.childNodes[0] -match = flow.childNodes[11] - -print match.toxml() - -ip_match = match.getElementsByTagName("ip-match") -print 'ip-match:', ip_match - -# print "ethernet type", match.getElementsByTagName("type")[0].childNodes[0].data -# print "ethernet cvok", match.getElementsByTagName("cvok")[0].childNodes[0].data diff --git a/tools/OF_Test/xmls/f1.xml b/tools/OF_Test/xmls/f1.xml deleted file mode 100644 index 7afe8fe792..0000000000 --- a/tools/OF_Test/xmls/f1.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 124 - 255 - false - - - - 2048 - - - 10.0.1.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 1 - 34 - FooXf1 - 2 - false - diff --git a/tools/OF_Test/xmls/f10.xml b/tools/OF_Test/xmls/f10.xml deleted file mode 100644 index 2587391008..0000000000 --- a/tools/OF_Test/xmls/f10.xml +++ /dev/null @@ -1,48 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 133 - 255 - - - - 2048 - - -
ff:ff:29:01:19:61
-
- -
00:00:00:11:23:ae
-
-
- 17.1.2.3/8 - 172.168.5.6/16 - - 132 - 0 - 0 - - 768 - 384 - 0 -
- 1200 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 3400 - FooXf10 - 2 - false -
diff --git a/tools/OF_Test/xmls/f11.xml b/tools/OF_Test/xmls/f11.xml deleted file mode 100644 index 8384da29eb..0000000000 --- a/tools/OF_Test/xmls/f11.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 134 - 255 - - - - 2048 - - -
ff:ff:29:01:19:61
-
- -
00:00:00:11:23:ae
-
-
- 17.1.2.3/8 - 172.168.5.6/16 - - 1 - 3f - 3 - - - 6 - 3 - - 0 -
- 1200 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 11 - 3400 - FooXf11 - 2 - false -
- diff --git a/tools/OF_Test/xmls/f12.xml b/tools/OF_Test/xmls/f12.xml deleted file mode 100644 index 1eb7559d0f..0000000000 --- a/tools/OF_Test/xmls/f12.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - false - - - 0 - - - 0 - - - - 1 - - - - - - 2 - 135 - 255 - - - - 2054 - - -
ff:ff:ff:ff:ff:ff
-
- -
00:00:00:01:23:ae
-
-
- 1 -
- 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 12 - 34 - FooXf12 - 2 - false -
diff --git a/tools/OF_Test/xmls/f13.xml b/tools/OF_Test/xmls/f13.xml deleted file mode 100644 index e93b571366..0000000000 --- a/tools/OF_Test/xmls/f13.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - false - - - 0 - - - 0 - - - - 1 - - - - - - 2 - 136 - 255 - - - - 2054 - - -
ff:ff:ff:ff:ff:ff
-
- -
00:00:00:01:23:ae
-
-
- 1 - 192.168.4.1 - 10.21.22.23 -
- 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 13 - 34 - FooXf13 - 2 - false -
- diff --git a/tools/OF_Test/xmls/f14.xml b/tools/OF_Test/xmls/f14.xml deleted file mode 100644 index 6c075ae7d9..0000000000 --- a/tools/OF_Test/xmls/f14.xml +++ /dev/null @@ -1,52 +0,0 @@ - - - false - - - 0 - - - 0 - - - - 1 - - - - - - 2 - 137 - 255 - - - - 2054 - - -
ff:ff:ff:ff:FF:ff
-
- -
00:00:FC:01:23:ae
-
-
- 1 - 192.168.4.1 - 10.21.22.23 - -
12:34:56:78:98:AB
-
- -
FE:DC:BA:98:76:54
-
-
- 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 14 - 34 - FooXf14 - 2 - false -
- diff --git a/tools/OF_Test/xmls/f15.xml b/tools/OF_Test/xmls/f15.xml deleted file mode 100644 index 99d3eeddda..0000000000 --- a/tools/OF_Test/xmls/f15.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 138 - 255 - - - - 2048 - - -
ff:ff:29:01:19:61
-
- -
00:00:00:11:23:ae
-
-
- - - 78 - true - - -
- 1200 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 15 - 3400 - FooXf15 - 2 - false -
- diff --git a/tools/OF_Test/xmls/f16.xml b/tools/OF_Test/xmls/f16.xml deleted file mode 100644 index 2c70f7428c..0000000000 --- a/tools/OF_Test/xmls/f16.xml +++ /dev/null @@ -1,35 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 139 - 266 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 16 - 34 - FooXf16 - 2 - false - diff --git a/tools/OF_Test/xmls/f17.xml b/tools/OF_Test/xmls/f17.xml deleted file mode 100644 index 7f69eaeb72..0000000000 --- a/tools/OF_Test/xmls/f17.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 140 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf17 - 2 - false - diff --git a/tools/OF_Test/xmls/f18.xml b/tools/OF_Test/xmls/f18.xml deleted file mode 100644 index 252075a1bc..0000000000 --- a/tools/OF_Test/xmls/f18.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 141 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf18 - 2 - false - diff --git a/tools/OF_Test/xmls/f19.xml b/tools/OF_Test/xmls/f19.xml deleted file mode 100644 index 8c21a76163..0000000000 --- a/tools/OF_Test/xmls/f19.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 142 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf19 - 2 - false - diff --git a/tools/OF_Test/xmls/f2.xml b/tools/OF_Test/xmls/f2.xml deleted file mode 100644 index 46c1885bec..0000000000 --- a/tools/OF_Test/xmls/f2.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 125 - 255 - false - - - - 2048 - - - 10.0.0.1 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 2 - 34 - FooXf2 - 2 - false - diff --git a/tools/OF_Test/xmls/f20.xml b/tools/OF_Test/xmls/f20.xml deleted file mode 100644 index b26b4085f2..0000000000 --- a/tools/OF_Test/xmls/f20.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 143 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf20 - 2 - false - diff --git a/tools/OF_Test/xmls/f21.xml b/tools/OF_Test/xmls/f21.xml deleted file mode 100644 index cc5aca0722..0000000000 --- a/tools/OF_Test/xmls/f21.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 144 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf21 - 2 - false - diff --git a/tools/OF_Test/xmls/f22.xml b/tools/OF_Test/xmls/f22.xml deleted file mode 100644 index 9175388409..0000000000 --- a/tools/OF_Test/xmls/f22.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 145 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf22 - 2 - false - diff --git a/tools/OF_Test/xmls/f23.xml b/tools/OF_Test/xmls/f23.xml deleted file mode 100644 index 8d1dbcc16f..0000000000 --- a/tools/OF_Test/xmls/f23.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - false - - - 0 - - - 0 - - 0 - 1 - - - - - - 2 - 146 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf23 - 2 - false - diff --git a/tools/OF_Test/xmls/f24.xml b/tools/OF_Test/xmls/f24.xml deleted file mode 100644 index f80ccaa7be..0000000000 --- a/tools/OF_Test/xmls/f24.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 147 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf24 - 2 - false - diff --git a/tools/OF_Test/xmls/f25.xml b/tools/OF_Test/xmls/f25.xml deleted file mode 100644 index 3a30568302..0000000000 --- a/tools/OF_Test/xmls/f25.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 148 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf25 - 2 - false - diff --git a/tools/OF_Test/xmls/f26.xml b/tools/OF_Test/xmls/f26.xml deleted file mode 100644 index b2786c3eed..0000000000 --- a/tools/OF_Test/xmls/f26.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - false - - - 0 - - - 0 - - 11 - - - - - - 2 - 149 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf26 - 2 - false - diff --git a/tools/OF_Test/xmls/f27.xml b/tools/OF_Test/xmls/f27.xml deleted file mode 100644 index 069278261f..0000000000 --- a/tools/OF_Test/xmls/f27.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - false - - - 0 - - 12 - 10 - - - - 2 - 150 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf27 - 2 - false - diff --git a/tools/OF_Test/xmls/f28.xml b/tools/OF_Test/xmls/f28.xml deleted file mode 100644 index 08e61e9e13..0000000000 --- a/tools/OF_Test/xmls/f28.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 151 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf28 - 2 - false - diff --git a/tools/OF_Test/xmls/f29.xml b/tools/OF_Test/xmls/f29.xml deleted file mode 100644 index bbdba6d50d..0000000000 --- a/tools/OF_Test/xmls/f29.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 152 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf29 - 2 - false - diff --git a/tools/OF_Test/xmls/f3.xml b/tools/OF_Test/xmls/f3.xml deleted file mode 100644 index 07e4ebe591..0000000000 --- a/tools/OF_Test/xmls/f3.xml +++ /dev/null @@ -1,33 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 126 - 255 - false - - - -
00:00:00:00:00:01
-
-
-
- 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 3 - 34 - FooXf3 - 2 - false -
diff --git a/tools/OF_Test/xmls/f30.xml b/tools/OF_Test/xmls/f30.xml deleted file mode 100644 index 96125c9b84..0000000000 --- a/tools/OF_Test/xmls/f30.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - 8 - - - - - - 2 - 153 - 255 - false - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 30 - 34 - FooXf30 - 2 - false - diff --git a/tools/OF_Test/xmls/f31.xml b/tools/OF_Test/xmls/f31.xml deleted file mode 100644 index 81cb38c087..0000000000 --- a/tools/OF_Test/xmls/f31.xml +++ /dev/null @@ -1,33 +0,0 @@ - - - false - - - 0 - - - - - - 2 - 154 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf31 - 2 - false - diff --git a/tools/OF_Test/xmls/f32.xml b/tools/OF_Test/xmls/f32.xml deleted file mode 100644 index 3c837225db..0000000000 --- a/tools/OF_Test/xmls/f32.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - false - - - 0 - - - 0 - - 1 - - - - - - 2 - 155 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf32 - 2 - false - diff --git a/tools/OF_Test/xmls/f33.xml b/tools/OF_Test/xmls/f33.xml deleted file mode 100644 index 0294b10209..0000000000 --- a/tools/OF_Test/xmls/f33.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - false - - - 0 - - - 0 - - 10.0.0.1/24 - - - - - - 2 - 156 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf33 - 2 - false - diff --git a/tools/OF_Test/xmls/f34.xml b/tools/OF_Test/xmls/f34.xml deleted file mode 100644 index 27a0e7dffc..0000000000 --- a/tools/OF_Test/xmls/f34.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - false - - - 0 - - - 0 - - 10.0.0.21/24 - - - - - - 2 - 157 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf34 - 2 - false - diff --git a/tools/OF_Test/xmls/f35.xml b/tools/OF_Test/xmls/f35.xml deleted file mode 100644 index 60d3eaa413..0000000000 --- a/tools/OF_Test/xmls/f35.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - false - - - 0 - - - 0 - - 10.0.23.21/24 - - - - - - 2 - 158 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf35 - 2 - false - diff --git a/tools/OF_Test/xmls/f36.xml b/tools/OF_Test/xmls/f36.xml deleted file mode 100644 index 8942668bfa..0000000000 --- a/tools/OF_Test/xmls/f36.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - 1 - - - - - - 2 - 159 - 255 - false - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 36 - 34 - FooXf36 - 2 - false - diff --git a/tools/OF_Test/xmls/f37.xml b/tools/OF_Test/xmls/f37.xml deleted file mode 100644 index 6b82df960a..0000000000 --- a/tools/OF_Test/xmls/f37.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - false - - - 0 - - - 0 - - 1 - - - - - - 2 - 160 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf37 - 2 - false - diff --git a/tools/OF_Test/xmls/f38.xml b/tools/OF_Test/xmls/f38.xml deleted file mode 100644 index faa2fe8e4a..0000000000 --- a/tools/OF_Test/xmls/f38.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - false - - - 0 - - - 0 - - 1 - - - - - - 2 - 161 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf38 - 2 - false - diff --git a/tools/OF_Test/xmls/f39.xml b/tools/OF_Test/xmls/f39.xml deleted file mode 100644 index 5f2130ec67..0000000000 --- a/tools/OF_Test/xmls/f39.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - false - - - 0 - - - 0 - - 109 - - - - - - 2 - 162 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf39 - 2 - false - diff --git a/tools/OF_Test/xmls/f4.xml b/tools/OF_Test/xmls/f4.xml deleted file mode 100644 index 0f08362ee3..0000000000 --- a/tools/OF_Test/xmls/f4.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 127 - 255 - false - - - - 45 - - -
ff:ff:ff:ff:ff:ff
-
- -
00:00:00:00:23:ae
-
-
-
- 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 4 - 34 - FooXf4 - 2 - false -
diff --git a/tools/OF_Test/xmls/f40.xml b/tools/OF_Test/xmls/f40.xml deleted file mode 100644 index 3eacd6ee1d..0000000000 --- a/tools/OF_Test/xmls/f40.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - false - - - 0 - - - 0 - - 109 - - - - - - 2 - 163 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf40 - 2 - false - diff --git a/tools/OF_Test/xmls/f41.xml b/tools/OF_Test/xmls/f41.xml deleted file mode 100644 index a1c4539202..0000000000 --- a/tools/OF_Test/xmls/f41.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - false - - - 0 - - - 0 - - 2 - - - - - - 2 - 164 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf41 - 2 - false - diff --git a/tools/OF_Test/xmls/f42.xml b/tools/OF_Test/xmls/f42.xml deleted file mode 100644 index 538ff0012f..0000000000 --- a/tools/OF_Test/xmls/f42.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 165 - 10 - 10 - false - 2 - - - - 2048 - - - 10.0.0.1/24 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf42 - 2 - false - diff --git a/tools/OF_Test/xmls/f43.xml b/tools/OF_Test/xmls/f43.xml deleted file mode 100644 index b720bdcc79..0000000000 --- a/tools/OF_Test/xmls/f43.xml +++ /dev/null @@ -1,42 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 166 - 10 - 10 - false - 2 - - - - 34525 - - - - 135 - 1 - - - 58 - - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf43 - 2 - false - diff --git a/tools/OF_Test/xmls/f44.xml b/tools/OF_Test/xmls/f44.xml deleted file mode 100644 index ed37acc8b5..0000000000 --- a/tools/OF_Test/xmls/f44.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 167 - 10 - 10 - false - 2 - - 202 - 10122 - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf44 - 2 - false - diff --git a/tools/OF_Test/xmls/f45.xml b/tools/OF_Test/xmls/f45.xml deleted file mode 100644 index 00d934e756..0000000000 --- a/tools/OF_Test/xmls/f45.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 168 - 10 - 10 - false - 2 - - - [B@46645a66 - 500 - - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf45 - 2 - false - diff --git a/tools/OF_Test/xmls/f46.xml b/tools/OF_Test/xmls/f46.xml deleted file mode 100644 index 59d482c0f1..0000000000 --- a/tools/OF_Test/xmls/f46.xml +++ /dev/null @@ -1,44 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 169 - 10 - 10 - false - 2 - - - - 34525 - - - - 10028 - - 00:0c:29:0e:4c:67 - c2:00:54:f5:00:00 - - 135 - 0 - - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf46 - 2 - false - diff --git a/tools/OF_Test/xmls/f47.xml b/tools/OF_Test/xmls/f47.xml deleted file mode 100644 index ba6fa1384c..0000000000 --- a/tools/OF_Test/xmls/f47.xml +++ /dev/null @@ -1,42 +0,0 @@ - - - false - - - 0 - - - 0 - - 5 - - - - - - 2 - 170 - 10 - 10 - false - 2 - - - - 2048 - - - 22 - 1435 - - 132 - - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf47 - 2 - false - diff --git a/tools/OF_Test/xmls/f48.xml b/tools/OF_Test/xmls/f48.xml deleted file mode 100644 index f20d5d791d..0000000000 --- a/tools/OF_Test/xmls/f48.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - false - - - 0 - - 2 - - - - 2 - 171 - 10 - 10 - false - 2 - - - [B@f7d120c - 10668 - - - 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 34 - FooXf48 - 2 - false - diff --git a/tools/OF_Test/xmls/f49.xml b/tools/OF_Test/xmls/f49.xml deleted file mode 100644 index 1f8c5d7c80..0000000000 --- a/tools/OF_Test/xmls/f49.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - false - - - - 1 - - - - 2 - 172 - 10 - 10 - false - 2 - - - - 10 - true - - 3 - - - 0 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 10 - 0 - FooXf49 - 2 - false - diff --git a/tools/OF_Test/xmls/f5.xml b/tools/OF_Test/xmls/f5.xml deleted file mode 100644 index 9b1a870379..0000000000 --- a/tools/OF_Test/xmls/f5.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 128 - 255 - - - - 2048 - - -
ff:ff:ff:ff:ff:ff
-
- -
00:00:00:00:23:ae
-
-
- 10.1.2.3/24 - 20.4.5.6/16 - 0 -
- 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 5 - 34 - FooXf5 - 2 - false -
diff --git a/tools/OF_Test/xmls/f6.xml b/tools/OF_Test/xmls/f6.xml deleted file mode 100644 index a220752942..0000000000 --- a/tools/OF_Test/xmls/f6.xml +++ /dev/null @@ -1,44 +0,0 @@ - - - false - - - 0 - - - 0 - - - - 1 - - - - - - 2 - 129 - 255 - - - - 2048 - - -
ff:ff:ff:ff:ff:ff
-
- -
00:00:00:01:23:ae
-
-
- 10.1.2.3/24 - 40.4.5.6/16 -
- 12 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 5 - 34 - FooXf5 - 2 - false -
diff --git a/tools/OF_Test/xmls/f7.xml b/tools/OF_Test/xmls/f7.xml deleted file mode 100644 index 64047dd9ca..0000000000 --- a/tools/OF_Test/xmls/f7.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 130 - 255 - - - - 2048 - - -
ff:ff:ff:ff:ff:aa
-
- -
00:00:00:11:23:ae
-
-
- 10.1.2.3/24 - 20.4.5.6/16 - - 56 - 15 - 1 - - 0 -
- 12000 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 7 - 12000 - FooXf7 - 2 - false -
diff --git a/tools/OF_Test/xmls/f8.xml b/tools/OF_Test/xmls/f8.xml deleted file mode 100644 index 8c19936b5c..0000000000 --- a/tools/OF_Test/xmls/f8.xml +++ /dev/null @@ -1,50 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 131 - 255 - - - - 2048 - - -
ff:ff:29:01:19:61
-
- -
00:00:00:11:23:ae
-
-
- 17.1.2.3/8 - 172.168.5.6/16 - - 6 - 2 - 2 - - 25364 - 8080 - 0 -
- 1200 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 8 - 3400 - FooXf8 - 2 - false -
- - diff --git a/tools/OF_Test/xmls/f9.xml b/tools/OF_Test/xmls/f9.xml deleted file mode 100644 index b32c398302..0000000000 --- a/tools/OF_Test/xmls/f9.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - false - - - 0 - - - 0 - - - - - - 2 - 132 - 255 - - - - 2048 - - -
20:14:29:01:19:61
-
- -
00:00:00:11:23:ae
-
-
- 19.1.2.3/10 - 172.168.5.6/18 - - 17 - 8 - 3 - - 25364 - 8080 - 0 -
- 1200 - FlowModFlags [_cHECKOVERLAP=false, _rESETCOUNTS=false, _nOPKTCOUNTS=false, _nOBYTCOUNTS=false, _sENDFLOWREM=false] - 9 - 3400 - FooXf9 - 2 - false -
- diff --git a/tools/Robot_Tool/README.md b/tools/Robot_Tool/README.md deleted file mode 100644 index 999946e68c..0000000000 --- a/tools/Robot_Tool/README.md +++ /dev/null @@ -1,73 +0,0 @@ -robot_tool -========== -*robot test tool for OpenDaylight Project.* - -* Version: 0.1 -* Authors: [Baohua Yang](mailto:yangbaohua@gmail.com), [Denghui Huang](mailto:huangdenghui@gmail.com) -* Homepage: - -##Get Code - -`git clone https://github.com/yeasy/robot_tool.git` - - -##Usage -###Prerequisites -* Python 2.6/2.7 -* Python [Roboframework-requests library](https://github.com/bulkan/robotframework-requests/) - - pip install -U robotframework-requests - -* [OpenDaylight Controller](https://wiki.opendaylight.org/view/GettingStarted:Developer_Main) - ``` - # Download and build OpenDaylight Controller - git clone https://git.opendaylight.org/gerrit/p/controller.git - cd controller/opendaylight/distribution/opendaylight - mvn clean install -DskipTests -Dmaven.compile.fork=true -U - ``` -* [Mininet](http://mininet.org/walkthrough/) -* [Robotframework](http://robotframework.org/) - -###Run Test -* Start the [OpenDaylight Controller](https://wiki.opendaylight.org/view/GettingStarted:Developer_Main) - - ``` - cd controller/target/distribution.opendaylight-0.1.0-SNAPSHOT-osgipackage/opendaylight/ - ./run.sh - ``` -* Start mininet, and make sure mininet has all switches connected to the controller, for example, - `sudo mn --controller=remote,ip=your_controller_ip --topo tree,2` -* Go to the suites directory, executing the suite such as `pybot --variable topo_tree_level:2 base` which will run all tests in the base edition or `pybot --variable topo_tree_level:2 switch_manager.txt` to test the switch manager module. - -##Code Structure - - robot_tool - \---------suites # all robot test suites - | \-----base # all test suites for the base edition - | | - | \-----service_provider # all test suites for the service provider edition - | | - | \-----virtualization # all test suites for the service provider edition - | - \---------libraries # all keywords - | - \---------resources # resources related files - | - \---------variables # all variables - - -##Development Plan -* Finish test suites for the base edition. - -##About OpenDaylight -OpenDaylight is the first production-quality open-source SDN management platform sponsored by Linux Foundation. -Lead SDN enterprises (Ericsson, IBM, Microsoft, Redhat, Cisco, Juniper, NEC, VMWare etc.) are involved to develop and support the project. -Please go to the official [homepage](http://www.opendaylight.org) page to find more information. - - -##Robot framework user guide. - http://robotframework.googlecode.com/hg/doc/userguide/RobotFrameworkUserGuide.html?r=2.8.1 - -##Testlibraries references. - 3.1 A list of available test libraries for Robot Framework - http://code.google.com/p/robotframework/wiki/TestLibraries diff --git a/tools/Robot_Tool/create_docs.py b/tools/Robot_Tool/create_docs.py deleted file mode 100644 index bca2d82330..0000000000 --- a/tools/Robot_Tool/create_docs.py +++ /dev/null @@ -1,70 +0,0 @@ -""" -Robot testdoc and testlib generator -Authors: Kailash Khalasi (kkhalasi@iix.net) -Created: 2015-07-21 -This script will issue a command to all the robot suites and libraries in the given directories -to generate documentation using Robot's "testdoc" and "libdoc" tool. -ex usage: python create_docs.py suitelocation librarylocation suitedocoutputlocation libdocoutputlocation -ex values: -suitelocation:$HOME/integration/test/csit/suites -librarylocation:$HOME/integration/test/csit/libraries -suitedocoutputlocation:/tmp/RobotDocs -libdocoutputlocation: /tmp/RobotLibs -""" - -import os -from sys import argv -import robot.testdoc -import robot.libdoc - -if len(argv) != 5: - suiteRoot = os.getenv("HOME")+'/integration/test/csit/suites' - libraryRoot = os.getenv("HOME")+'/integration/test/csit/libraries' - tmpSuite = '/tmp/RobotDocs/' - tmpLib = '/tmp/RobotLibs/' - print "All arguments are not passed....Using default arguments:" - print 'Suite Location: ' + suiteRoot - print 'Library Location: ' + libraryRoot - print 'Suite Doc Output Location: ' + tmpSuite - print 'Library Doc Output Location: ' + tmpLib -else: - script, suiteRoot, libraryRoot, tmpSuite, tmpLib = argv - - -def generate_docs(testDir, outputFolder, debug=False): - """ - Generate Robot Documentation - - Args: - testDir: The directory in which your robot files live (can be suites or libraries) - outputFolder: The directory where you want your generated docs to be placed. - - This function will "walk" through each robot file in the given "suitelocation" - and "librarylocation" and will issue a python -m robot.testdoc|libdoc on each - of those files. The script will first determine if you've passed in a robot - suite location or robot library location. The outcome generates an HTML file - (our expected documents) to the given "suitedocoutputlocation"|"libdocoutputlocation". - - :param debug: Default is false. Setting debug to true will print the output of each - command entered to generate a robot doc - """ - - if testDir == suiteRoot: - docFunction = robot.testdoc.testdoc - else: - docFunction = robot.libdoc.libdoc - - for root, dirs, files, in os.walk(testDir): - for file in files: - if file.endswith(".robot"): - inputFile = os.path.join(root, file) - outputFile = os.path.join(outputFolder, file + ".html") - docFunction(inputFile, outputFile) - -tmpDirs = [tmpSuite, tmpLib] -for dirs in tmpDirs: - if not os.path.exists(dirs): - os.makedirs(dirs) - -generate_docs(suiteRoot, tmpSuite) -generate_docs(libraryRoot, tmpLib) diff --git a/tools/Robot_Tool/libraries/ArpHandler.py b/tools/Robot_Tool/libraries/ArpHandler.py deleted file mode 100644 index bbab7c63e8..0000000000 --- a/tools/Robot_Tool/libraries/ArpHandler.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-01 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class ArpHandler(TestModule): - """ - Test for the arp handler. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/subnetservice', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_subnets(self): - """ - The name is suggested to match the NB API. - list all subnets and their properties. - """ - return super(self.__class__, self).get_entries('subnets') - - def add_subnet_gateway(self, name, body): - """ - Add a subnet gateway. - """ - super(self.__class__, self).add_entry('subnet', name, body) - - def remove_subnet_gateway(self, name): - """ - Remove a subnet gateway. - """ - super(self.__class__, self).remove_entry('subnet', name) - - def test_subnet_operations(self, name, body): - """ - Test subnet operations, like adding and removeing a subnet. - >>> ArpHandler().test_subnet_operations('test',{'name':'test','subnet':'10.0.0.254/8'}) - True - """ - return super(self.__class__, self).test_add_remove_operations('subnets', 'subnet', name, body, 'subnetConfig') - - -if __name__ == '__main__': - print 'arp handler' diff --git a/tools/Robot_Tool/libraries/Common.py b/tools/Robot_Tool/libraries/Common.py deleted file mode 100644 index 6eaa5a4ed5..0000000000 --- a/tools/Robot_Tool/libraries/Common.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Library for the robot based system test tool of the OpenDaylight project. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-14 -""" -import collections - -''' -Common constants and functions for the robot framework. -''' - - -def collection_should_contain(collection, *members): - """ - Fail if not every members is in the collection. - """ - if not isinstance(collection, collections.Iterable): - return False - for m in members: - if m not in collection: - return False - else: - return True - - -def combine_strings(*strings): - """ - Combines the given `strings` together and returns the result. - The given strings are not altered by this keyword. - """ - result = '' - for s in strings: - if isinstance(s, str) or isinstance(s, unicode): - result += s - if result == '': - return None - else: - return result - -if __name__ == '__main__': - pass diff --git a/tools/Robot_Tool/libraries/ContainerManager.py b/tools/Robot_Tool/libraries/ContainerManager.py deleted file mode 100644 index e40a422b1d..0000000000 --- a/tools/Robot_Tool/libraries/ContainerManager.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-01 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class ContainerManager(TestModule): - """ - Test for the container manager. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/containermanager', user=DEFAULT_USER, password=DEFAULT_PWD, - container=None, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_containers(self): - """ - The name is suggested to match the NB API. - Show the containers - """ - return super(self.__class__, self).get_entries('containers') - - def add_container(self, name, body): - """ - Add a container - """ - super(self.__class__, self).add_entry('container', name, body) - - def remove_container(self, name): - """ - Remove a container - """ - super(self.__class__, self).remove_entry('container', name) - - def test_container_operations(self, name, body): - """ - Test subnet operations, like adding and removeing a subnet. - >>> ContainerManager().test_container_operations('cont1', - {'container':'cont1','flowSpecs': [], 'staticVlan':'10','nodeConnectors':[ - "OF|1@OF|00:00:00:00:00:00:00:01", - "OF|23@OF|00:00:00:00:00:00:20:21"]}) - True - """ - return super(self.__class__, self).test_add_remove_operations('containers', 'container', name, body, - 'containerConfig') diff --git a/tools/Robot_Tool/libraries/ForwardingManager.py b/tools/Robot_Tool/libraries/ForwardingManager.py deleted file mode 100644 index 837a2bf5fc..0000000000 --- a/tools/Robot_Tool/libraries/ForwardingManager.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-01 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class ForwardingManager(TestModule): - """ - Test for the forwarding manager. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/staticroute', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_routes(self): - """ - The name is suggested to match the NB API. - list all routes - """ - return super(self.__class__, self).get_entries('routes') - - def add_static_route(self, name, body): - """ - Add a static route. - """ - super(self.__class__, self).add_entry('route', name, body) - - def remove_static_route(self, name): - """ - Remove a static route - """ - super(self.__class__, self).remove_entry('route', name) - - def test_static_route_operations(self, name, body): - """ - Test static route operations, like adding and removeing a route. - >>> ForwardingManager().test_static_route_operations('route1', - {'name':'route1','prefix':'192.168.1.0/24','nextHop':'10.0.0.2'}) - True - """ - return super(self.__class__, self).test_add_remove_operations('routes', 'route', name, body, 'staticRoute') diff --git a/tools/Robot_Tool/libraries/ForwardingRuleManager.py b/tools/Robot_Tool/libraries/ForwardingRuleManager.py deleted file mode 100644 index b80fb14cfa..0000000000 --- a/tools/Robot_Tool/libraries/ForwardingRuleManager.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-05 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class ForwardingRuleManager(TestModule): - """ - Test for the forwarding rule manager. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/flowprogrammer', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_flows(self): - """ - The name is suggested to match the NB API. - Show the flows - """ - return super(self.__class__, self).get_entries('') - - def add_flow_to_node(self, node_type, node_id, name, body): - suffix = 'node/' + node_type + '/' + node_id + '/staticFlow' - super(self.__class__, self).add_entry(suffix, name, body) - - def remove_flow_from_node(self, node_type, node_id, name): - suffix = 'node/' + node_type + '/' + node_id + '/staticFlow' - super(self.__class__, self).remove_entry(suffix, name) - - def test_flow_operations(self, node_type, node_id, name, body): - """ - Test the add,remove,show actions on flows. - >>> body = {'installInHw':'true','name':'flow1','node': - {'id':'00:00:00:00:00:00:00:02','type':'OF'}, - 'priority':'1','etherType':'0x800','nwDst':'10.0.0.1/32','actions':['OUTPUT=1']} - >>> ForwardingRuleManager().test_flow_operations('OF','00:00:00:00:00:00:00:02','flow1',body) - True - >>> body = {'installInHw':'true','name':'flow2','node': - {'id':'00:00:00:00:00:00:00:02','type':'OF'}, - 'priority':'1','etherType':'0x800','nwDst':'10.0.0.2/32','actions':['OUTPUT=2']} - >>> ForwardingRuleManager().test_flow_operations('OF','00:00:00:00:00:00:00:02','flow2',body) - True - """ - result = [] - # current flow table should be empty. - r = self.get_flows() - result.append(body not in r['flowConfig']) - # Add a flow - self.add_flow_to_node(node_type, node_id, name, body) - r = self.get_flows() - result.append(body in r['flowConfig']) - # Remove the flow and test if succeed - if result == [True, True]: - self.remove_flow_from_node(node_type, node_id, name) - r = self.get_flows() - result.append(body not in r['flowConfig']) - return result == [True, True, True] diff --git a/tools/Robot_Tool/libraries/HostTracker.py b/tools/Robot_Tool/libraries/HostTracker.py deleted file mode 100644 index cb353a5023..0000000000 --- a/tools/Robot_Tool/libraries/HostTracker.py +++ /dev/null @@ -1,57 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-06 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class HostTracker(TestModule): - """ - Test for the host tracker.. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/hosttracker', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_hosts(self): - """ - The name is suggested to match the NB API. - list all active hosts, should be done after using h1 ping h2 in mininet - """ - return super(self.__class__, self).get_entries(['hosts/active', 'hosts/inactive'], 'hostConfig') - - def add_host(self, name, body): - """ - Add a host. - """ - super(self.__class__, self).add_entry('address', name, body) - - def remove_host(self, name): - """ - Remove a host. - """ - super(self.__class__, self).remove_entry('address', name) - - def test_host_operations(self, name, body): - """ - Test host operations, like adding and removing. - >>> HostTracker().test_host_operations('10.0.1.4', - {'nodeType': 'OF', 'dataLayerAddress': '5e:bf:79:84:10:a6', 'vlan': '1', - 'nodeId': '00:00:00:00:00:00:00:03', 'nodeConnectorId': '9', - 'networkAddress': '10.0.1.4', 'staticHost': True, 'nodeConnectorType': 'OF'}) - True - """ - return super(self.__class__, self).test_add_remove_operations(['hosts/active', 'hosts/inactive'], 'address', - name, body, - 'hostConfig') diff --git a/tools/Robot_Tool/libraries/MininetHandler.py b/tools/Robot_Tool/libraries/MininetHandler.py deleted file mode 100644 index 95f33db2de..0000000000 --- a/tools/Robot_Tool/libraries/MininetHandler.py +++ /dev/null @@ -1,51 +0,0 @@ -""" -Library for the robot based system test tool of the OpenDaylight project. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-18 -""" -from mininet.net import Mininet - - -class MininetHandler(object): - ''' - MininetHandler class will provide all operations about Mininet, such as config controller_ip, start or stop net. - ''' - def __init__(self, controller_ip='127.0.0.1'): - self.controller_ip = controller_ip - self.net = None - - def set_controller_ip(self, controller_ip): - self.controller_ip = controller_ip - - def config_net(self): - net = Mininet(switch=OVSKernelSwitch, controller=RemoteController) # noqa - - print '*** Adding controller' - net.addController('c0', ip=self.controller_ip) - - print '*** Adding hosts' - h1 = net.addHost('h1', mac='00:00:00:00:00:01') - h2 = net.addHost('h2', mac='00:00:00:00:00:02') - h3 = net.addHost('h3', mac='00:00:00:00:00:03') - h4 = net.addHost('h4', mac='00:00:00:00:00:04') - - print '*** Adding switch' - s1 = net.addSwitch('s1') - s2 = net.addSwitch('s2') - s3 = net.addSwitch('s3') - - print '*** Creating links' - net.addLink(h1, s2) - net.addLink(h2, s2) - net.addLink(h3, s3) - net.addLink(h4, s3) - net.addLink(s1, s2) - net.addLink(s1, s3) - - self.net = net - - def start_net(self): - self.net.start() - - def stop_net(self): - self.net.stop() diff --git a/tools/Robot_Tool/libraries/README.md b/tools/Robot_Tool/libraries/README.md deleted file mode 100644 index ffaa7cff71..0000000000 --- a/tools/Robot_Tool/libraries/README.md +++ /dev/null @@ -1 +0,0 @@ -This directory stores all keywords. diff --git a/tools/Robot_Tool/libraries/RequestsLibrary.py b/tools/Robot_Tool/libraries/RequestsLibrary.py deleted file mode 100644 index 7b1629ea48..0000000000 --- a/tools/Robot_Tool/libraries/RequestsLibrary.py +++ /dev/null @@ -1,205 +0,0 @@ -import requests -import json - -from urllib import urlencode - -import robot - -from robot.libraries.BuiltIn import BuiltIn - - -class RequestsLibrary(object): - ROBOT_LIBRARY_SCOPE = 'Global' - - def __init__(self): - self._cache = robot.utils.ConnectionCache('No sessions created') - self.builtin = BuiltIn() - - def _utf8_urlencode(self, data): - if not type(data) is dict: - return data - - utf8_data = {} - for k, v in data.iteritems(): - utf8_data[k] = unicode(v).encode('utf-8') - return urlencode(utf8_data) - - def create_session(self, alias, url, headers={}, cookies=None, - auth=None, timeout=None, proxies=None, - verify=False): - - """ Create Session: create a HTTP session to a server - - `url` Base url of the server - - `alias` Robot Framework alias to identify the session - - `headers` Dictionary of default headers - - `auth` Dictionary of username & password for HTTP Basic Auth - - `timeout` connection timeout - - `proxies` proxy server url - - `verify` set to True if Requests should verify the certificate - """ - - self.builtin.log('Creating session: %s' % alias, 'DEBUG') - auth = requests.auth.HTTPBasicAuth(*auth) if auth else None - s = session = requests.Session() - s.headers.update(headers) - s.auth = auth if auth else s.auth - s.proxies = proxies if proxies else s.proxies - - s.verify = self.builtin.convert_to_boolean(verify) - - # cant pass these into the Session anymore - self.timeout = timeout - self.cookies = cookies - self.verify = verify - - # cant use hooks :( - s.url = url - - self._cache.register(session, alias=alias) - return session - - def delete_all_sessions(self): - """ Removes all the session objects """ - - self._cache.empty_cache() - - def to_json(self, content): - """ Convert a string to a JSON object - - `content` String content to convert into JSON - """ - return json.loads(content) - - def _get_url(self, session, uri): - ''' Helpere method to get the full url - ''' - url = session.url - if uri: - slash = '' if uri.startswith('/') else '/' - url = "%s%s%s" % (session.url, slash, uri) - return url - - def get(self, alias, uri, headers=None): - """ Send a GET request on the session object found using the - given `alias` - - `alias` that will be used to identify the Session object in the cache - - `uri` to send the GET request to - - `headers` a dictionary of headers to use with the request - """ - - session = self._cache.switch(alias) - resp = session.get(self._get_url(session, uri), - headers=headers, - cookies=self.cookies, timeout=self.timeout) - - # store the last response object - session.last_resp = resp - return resp - - def post(self, alias, uri, data={}, headers=None, files={}): - """ Send a POST request on the session object found using the - given `alias` - - `alias` that will be used to identify the Session object in the cache - - `uri` to send the GET request to - - `data` a dictionary of key-value pairs that will be urlencoded - and sent as POST data - or binary data that is sent as the raw body content - - `headers` a dictionary of headers to use with the request - - `files` a dictionary of file names containing file data to POST to the server - """ - - session = self._cache.switch(alias) - data = self._utf8_urlencode(data) - - resp = session.post(self._get_url(session, uri), - data=data, headers=headers, - files=files, - cookies=self.cookies, timeout=self.timeout) - - # store the last response object - session.last_resp = resp - self.builtin.log("Post response: " + resp.content, 'DEBUG') - return resp - - def put(self, alias, uri, data=None, headers=None): - """ Send a PUT request on the session object found using the - given `alias` - - `alias` that will be used to identify the Session object in the cache - - `uri` to send the PUT request to - - `headers` a dictionary of headers to use with the request - - """ - - session = self._cache.switch(alias) - # data = json.dumps(self._utf8_urlencode(data)) - data = json.dumps(data) - - resp = session.put(self._get_url(session, uri), - data=data, headers=headers, - cookies=self.cookies, timeout=self.timeout) - - self.builtin.log("PUT response: %s DEBUG" % resp.content) - - # store the last response object - session.last_resp = resp - return resp - - def delete(self, alias, uri, data=(), headers=None): - """ Send a DELETE request on the session object found using the - given `alias` - - `alias` that will be used to identify the Session object in the cache - - `uri` to send the DELETE request to - - `headers` a dictionary of headers to use with the request - - """ - - session = self._cache.switch(alias) - args = "?%s" % urlencode(data) if data else '' - resp = session.delete("%s%s" % (self._get_url(session, uri), args), - headers=headers, cookies=self.cookies, - timeout=self.timeout) - - # store the last response object - session.last_resp = resp - return resp - - def head(self, alias, uri, headers=None): - """ Send a HEAD request on the session object found using the - given `alias` - - `alias` that will be used to identify the Session object in the cache - - `uri` to send the HEAD request to - - `headers` a dictionary of headers to use with the request - - """ - - session = self._cache.switch(alias) - resp = session.head(self._get_url(session, uri), headers=headers, - cookies=self.cookies, timeout=self.timeout) - - # store the last response object - session.last_resp = resp - return resp diff --git a/tools/Robot_Tool/libraries/StatisticsManager.py b/tools/Robot_Tool/libraries/StatisticsManager.py deleted file mode 100644 index e24721636e..0000000000 --- a/tools/Robot_Tool/libraries/StatisticsManager.py +++ /dev/null @@ -1,46 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-01 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class StatisticsManager(TestModule): - """ - Test for the statistics manager. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/statistics', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_flow_stats(self): - """ - The name is suggested to match the NB API. - Show the flow statistics - """ - return super(self.__class__, self).get_entries('flow') - - def get_port_stats(self): - """ - The name is suggested to match the NB API. - Show the port statistics - """ - return super(self.__class__, self).get_entries('port') - - def get_table_stats(self): - """ - The name is suggested to match the NB API. - Show the table statistics - """ - return super(self.__class__, self).get_entries('table') diff --git a/tools/Robot_Tool/libraries/SwitchManager.py b/tools/Robot_Tool/libraries/SwitchManager.py deleted file mode 100644 index d0cab65006..0000000000 --- a/tools/Robot_Tool/libraries/SwitchManager.py +++ /dev/null @@ -1,48 +0,0 @@ -""" -Library for the robot based system test tool of the OpenDaylight project. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-10 -""" -from robot.libraries.BuiltIn import BuiltIn - - -class SwitchManager(object): - def __init__(self): - self.builtin = BuiltIn() - - def extract_all_nodes(self, content): - """ - Return all nodes. - """ - if isinstance(content, dict) and 'nodeProperties' in content: - self.builtin.log("18") - return [e.get('node') for e in content['nodeProperties']] - else: - self.builtin.log("21") - return None - - def extract_all_properties(self, content, property_type): - if isinstance(content, dict) and property_type in content: - self.builtin.log("26") - list1 = [e.get('properties') for e in content[property_type]] - self.builtin.log(list1) - return [e.get('properties') for e in content[property_type]] - else: - self.builtin.log("29") - return None - - def extract_property_value(self, content, property, property_type): - res = self.extract_all_properties(content, property_type) - return [e.get(property) for e in res] - - def extract_all_node_properties(self, content): - return self.extract_all_properties(content, 'nodeProperties') - - def extract_node_property_values(self, content, property): - return self.extract_property_value(content, property, 'nodeProperties') - - def extract_all_nodeconnector_properties(self, content): - return self.extract_all_properties(content, 'nodeConnectorProperties') - - def extract_nodeconnector_property_values(self, content, property): - return self.extract_property_value(content, property, 'nodeConnectorProperties') diff --git a/tools/Robot_Tool/libraries/Topology.py b/tools/Robot_Tool/libraries/Topology.py deleted file mode 100644 index 131dc667f4..0000000000 --- a/tools/Robot_Tool/libraries/Topology.py +++ /dev/null @@ -1,45 +0,0 @@ -""" -Library for the robot based system test tool of the OpenDaylight project. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-10 -""" -from robot.libraries.BuiltIn import BuiltIn - - -class Topology(object): - ''' - Topology class provide topology database and provide many method to get property of topology. - ''' - topo_nodes_db = [ - [], - [{u'type': u'OF', u'id': u'00:00:00:00:00:00:00:01'}], - [{u'type': u'OF', u'id': u'00:00:00:00:00:00:00:01'}, - {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:02'}, - {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:03'}]] - - def __init__(self): - self.builtin = BuiltIn() - - def get_nodes_from_topology(self, topo_level): - ''' - get nodes from topology database by topology tree level - ''' - if isinstance(topo_level, str) or isinstance(topo_level, unicode): - if topo_level.isdigit(): - topo_level = int(topo_level) - if topo_level <= 0: - return None - return self.topo_nodes_db[topo_level] - else: - return None - elif isinstance(topo_level, int): - if topo_level <= 0: - return None - return self.topo_nodes_db[topo_level] - else: - return None - -if __name__ == '__main__': - topology = Topology() - print topology.get_nodes_from_topology(2) - print topology.get_nodes_from_topology('2') diff --git a/tools/Robot_Tool/libraries/TopologyManager.py b/tools/Robot_Tool/libraries/TopologyManager.py deleted file mode 100644 index a001677b05..0000000000 --- a/tools/Robot_Tool/libraries/TopologyManager.py +++ /dev/null @@ -1,98 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-01 -""" - -import sys - -sys.path.append('..') -from restlib import * # noqa -from testmodule import TestModule - -sys.path.remove('..') - - -class TopologyManager(TestModule): - """ - Test for the topology manager. - Start 2-layer tree topology network. e.g., in Mininet, run - 'sudo mn --controller=remote,ip=127.0.0.1 --mac --topo tree,2' - """ - - def __init__(self, restSubContext='/controller/nb/v2/topology', user=DEFAULT_USER, password=DEFAULT_PWD, - container=DEFAULT_CONTAINER, contentType='json', prefix=DEFAULT_PREFIX): - super(self.__class__, self).__init__(restSubContext, user, password, container, contentType, prefix) - - def get_topology(self): - """ - The name is suggested to match the NB API. - Get the topology - """ - return super(self.__class__, self).get_entries() - - def get_userlinks(self): - """ - The name is suggested to match the NB API. - Show the userlinks. - """ - suffix = 'userLinks' - r = super(self.__class__, self).read(suffix) - if r: - return r - - def add_userlink(self, name, body): - """ - Add a userlink. - """ - suffix = 'userLink' - r = super(self.__class__, self).update(suffix + '/' + name, body) - return r - - def remove_userlink(self, name): - """ - Remove a userlink. - """ - suffix = 'userLink' - r = super(self.__class__, self).delete(suffix + '/' + name) - return r - - def test_get_topology(self): - """ - The name is suggested to match the NB API. - Test the topology - >>> TopologyManager().test_get_topology() - True - """ - result = [] - r = self.get_topology() - if r: - v = [e['edge'] for e in r['edgeProperties']] - result.append({u'tailNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:01'}, - u'type': u'OF', u'id': u'2'}, - u'headNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:03'}, - u'type': u'OF', u'id': u'3'}} in v) - result.append({u'tailNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:03'}, - u'type': u'OF', u'id': u'3'}, - u'headNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:01'}, - u'type': u'OF', u'id': u'2'}} in v) - result.append({u'tailNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:02'}, - u'type': u'OF', u'id': u'3'}, - u'headNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:01'}, - u'type': u'OF', u'id': u'1'}} in v) - result.append({u'tailNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:01'}, - u'type': u'OF', u'id': u'1'}, - u'headNodeConnector': {u'node': {u'type': u'OF', u'id': u'00:00:00:00:00:00:00:02'}, - u'type': u'OF', u'id': u'3'}} in v) - print result == [True, True, True, True] - - def test_userlink_operations(self, name, body): - """ - Test userlink operations, like adding and removing. - >>> TopologyManager().test_userlink_operations('link1', - {'status':'Success','name':'link1', - 'srcNodeConnector':'OF|1@OF|00:00:00:00:00:00:00:02', - 'dstNodeConnector':'OF|1@OF|00:00:00:00:00:00:00:03'}) - True - """ - return super(self.__class__, self).test_add_remove_operations('userLinks', 'userLink', name, body, 'userLinks') diff --git a/tools/Robot_Tool/libraries/restlib.py b/tools/Robot_Tool/libraries/restlib.py deleted file mode 100644 index 11a8dd35f2..0000000000 --- a/tools/Robot_Tool/libraries/restlib.py +++ /dev/null @@ -1,150 +0,0 @@ -""" -CSIT test tools. -Authors: Denghui Huang@IBM, Baohua Yang@IBM -Updated: 2013-11-06 -""" -import json - -import requests - - -# Global variables -DEFAULT_CONTROLLER_IP = '127.0.0.1' -# DEFAULT_CONTROLLER_IP = '9.186.105.113' #just for temp test -DEFAULT_PORT = '8080' -DEFAULT_PREFIX = 'http://' + DEFAULT_CONTROLLER_IP + ':' + DEFAULT_PORT -DEFAULT_CONTAINER = 'default' -DEFAULT_USER = 'admin' -DEFAULT_PWD = 'admin' -MODULES_DIR = 'modules' -TIMEOUTS = 2 - -''' -Send a POST request. -''' - - -def do_post_request(url, content_type, payload=None, user=DEFAULT_USER, password=DEFAULT_PWD): - data = payload - headers = {} - if content_type == 'json': - headers = {'Content-type': 'application/json', 'Accept': 'application/json'} - if payload is not None: - data = json.dumps(payload) - elif content_type == 'xml': - headers = {'Content-type': 'application/xml', 'Accept': 'application/xml'} - else: - print 'unsupported content-type' - try: - r = requests.post(url, data, headers=headers, auth=(user, password), timeout=TIMEOUTS) - r.raise_for_status() - except (requests.exceptions.HTTPError, requests.exceptions.Timeout): - return 400 - else: - return r.status_code - - -def do_get_request_with_status_code(url, content_type, user=DEFAULT_USER, password=DEFAULT_PWD): - ''' - Send a GET request. - @return The status code. - ''' - r = None - try: - r = requests.get(url, auth=(user, password), timeout=TIMEOUTS) - r.raise_for_status() - except (requests.exceptions.HTTPError, requests.exceptions.Timeout) as e: - print e - return r.status_code - finally: - return r.status_code - - -def do_put_request(url, content_type, payload=None, user=DEFAULT_USER, password=DEFAULT_PWD): - ''' - Send a PUT request. - @return The status code. - ''' - data = payload - headers = {} - if content_type == 'json': - headers = {'Content-type': 'application/json', 'Accept': 'application/json'} - if payload is not None: - data = json.dumps(payload) - elif content_type == 'xml': - headers = {'Content-type': 'application/xml', 'Accept': 'application/xml'} - else: - print 'unsupported content-type' - try: - r = requests.put(url, data, headers=headers, auth=(user, password), timeout=TIMEOUTS) - r.raise_for_status() - except (requests.exceptions.HTTPError, requests.exceptions.Timeout): - return 400 - else: - return r.status_code - - -def do_delete_request(url, user=DEFAULT_USER, password=DEFAULT_PWD): - ''' - Send a DELETE request. - @return The status code. - ''' - r = None - try: - r = requests.delete(url, auth=(user, password), timeout=TIMEOUTS) - r.raise_for_status() - except (requests.exceptions.HTTPError, requests.exceptions.Timeout) as e: - print e - finally: - if r: - return r.status_code - - -def convert_result_to_list(result): - ''' - Convert the result content to list. - ''' - list2 = [] - # print result - content = result.values() - for list1 in content: - list2 = [dict1.values() for dict1 in list1] - # print list2 - list3 = [] - for list4 in list2: - for element in list4: - list3.append(element) - # print list3 - return list3 - - -def do_get_request_with_response_content(url, content_type, user=DEFAULT_USER, password=DEFAULT_PWD, - convert_to_list=False): - ''' - Send a GET request and get the response. - @return response content as list. - ''' - try: - r = requests.get(url, auth=(user, password), timeout=TIMEOUTS) - r.raise_for_status() - except (requests.exceptions.HTTPError, requests.exceptions.Timeout) as e: - print e - return None - else: - if r is not None: - if content_type == 'json': - content = r.json() - return convert_result_to_list(content) if convert_to_list else content - elif content_type == 'xml': # TODO: add parser to xml - return None - - -if __name__ == '__main__': - # example - # Note: in json body, all field name and value (if it is string type) must be enclosed in double quotes. - # This constraint maybe cause by json parser. - body = {"status": "Success", "dstNodeConnector": "OF|1@OF|00:00:00:00:00:00:00:01", "name": "link3", - "srcNodeConnector": "OF|1@OF|00:00:00:00:00:00:00:03"} - url = 'http://127.0.0.1:8080/controller/nb/v2/topology/default/userLink/link3' - content_type = 'json' - print do_put_request(url, content_type, body) diff --git a/tools/Robot_Tool/libraries/testmodule.py b/tools/Robot_Tool/libraries/testmodule.py deleted file mode 100644 index 6ac3478451..0000000000 --- a/tools/Robot_Tool/libraries/testmodule.py +++ /dev/null @@ -1,123 +0,0 @@ -""" -CSIT test tools. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-10-30 -""" - -from restlib import * # noqa - - -class TestModule(object): - """ - Basic module class for test restful APIS. - Support the standard Create, Read, Update, Delete (CRUD) actions. - """ - - def __init__(self, restSubContext, user=DEFAULT_USER, password=DEFAULT_PWD, container=DEFAULT_CONTAINER, - contentType='json', prefix=DEFAULT_PREFIX): - self.restSubContext = restSubContext - self.container = container - self.user = user - self.password = password - self.contentType = contentType - self.prefix = prefix - - def extract_properties(self, content, key, property): - """ - Return all nodes. - """ - if not isinstance(content, dict) or key not in content: - return None - else: - return [e.get(property) for e in content[key]] - - def get_entries(self, suffix=None, key=None): - """ - Get the existed entries in the service. - """ - if isinstance(suffix, list) and key: - result = {} - result[key] = [] - for s in suffix: - result[key].extend(self.get_entries(s).get(key)) - return result - elif isinstance(suffix, str): - return self.read(suffix) - elif not suffix: - return self.read() - else: - return None - - def add_entry(self, suffix, name, body): - """ - Add entry to the service. - """ - self.update(suffix + '/' + name, body) - - def remove_entry(self, suffix, name): - """ - Remove entry from the service. - """ - self.delete(suffix + '/' + name) - - def test_add_remove_operations(self, suffix_entries, suffix_entry, name, body, key): - result = [] - # Add an entry - self.add_entry(suffix_entry, name, body) - r = self.get_entries(suffix_entries, key) - if r: - v = r.get(key) - result.append(body in v if v else False) - # Remove the added entry - if result == [True]: - self.remove_entry(suffix_entry, name) - r = self.get_entries(suffix_entries, key) - v = r.get(key) - result.append(body not in v if v else True) - return result == [True, True] - - def create(self, suffix, body=None): - """ - POST to given suffix url. - TODO: complete - """ - url = self.prefix + self.restSubContext - if self.container: - url += '/' + self.container - if suffix: - url += '/' + suffix - return do_post_request(url, self.contentType, body, self.user, self.password) - - def read(self, suffix=None): - """ - GET from given suffix url. - """ - url = self.prefix + self.restSubContext - if self.container: - url += '/' + self.container - if suffix: - url += '/' + suffix - return do_get_request_with_response_content(url, self.contentType, self.user, self.password) - - def update(self, suffix, body=None): - """ - PUT to given suffix url. - """ - url = self.prefix + self.restSubContext - if self.container: - url += '/' + self.container - if suffix: - url += '/' + suffix - return do_put_request(url, self.contentType, body, self.user, self.password) - - def delete(self, suffix): - """ - DELETE to given suffix url. - TODO: complete - """ - url = self.prefix + self.restSubContext - if self.container: - url += '/' + self.container - if suffix: - url += '/' + suffix - return do_delete_request(url, self.user, self.password) diff --git a/tools/Robot_Tool/resources/README.md b/tools/Robot_Tool/resources/README.md deleted file mode 100644 index b0f356cacb..0000000000 --- a/tools/Robot_Tool/resources/README.md +++ /dev/null @@ -1 +0,0 @@ -This directory stores all resources files. diff --git a/tools/Robot_Tool/suites/base/__init__.txt b/tools/Robot_Tool/suites/base/__init__.txt deleted file mode 100644 index 2a200e05e6..0000000000 --- a/tools/Robot_Tool/suites/base/__init__.txt +++ /dev/null @@ -1,15 +0,0 @@ -*** Settings *** -Documentation Test suite for the OpenDaylight base edition -Suite Setup Log Start the test on the base edition -Force Tags base -Metadata Version 0.1 -Metadata More Info For more information see https://wiki.opendaylight.org/view/CrossProject:Integration_Group:CSIT_Test_Tools:_Robot_Based -Metadata Executed At ${HOST} -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** - -*** Keywords *** diff --git a/tools/Robot_Tool/suites/base/arp_handler.txt b/tools/Robot_Tool/suites/base/arp_handler.txt deleted file mode 100644 index fd4910ef5c..0000000000 --- a/tools/Robot_Tool/suites/base/arp_handler.txt +++ /dev/null @@ -1,41 +0,0 @@ -*** Settings *** -Documentation Test suite for the arp handler module. -Suite Teardown Delete All Sessions -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** -${name} test -${key} subnetConfig -${REST_CONTEXT} /controller/nb/v2/subnetservice - -*** Test Cases *** -Add a subnet - [Documentation] Add a subnet, list to validate the result. - [Tags] add - ${body} Create Dictionary name=${name} subnet=10.0.0.254/8 - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Put session ${REST_CONTEXT}/${CONTAINER}/subnet/${name} data=${body} - Should Be Equal As Strings ${resp.status_code} 201 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER}/subnets - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Contain Value ${content} ${body} - -Remove a subnet - [Documentation] Remove a subnet, list to validate the result. - [Tags] remove - ${body} Create Dictionary name=${name} subnet=10.0.0.254/8 - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Delete session ${REST_CONTEXT}/${CONTAINER}/subnet/${name} - Should Be Equal As Strings ${resp.status_code} 204 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER}/subnets - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Not Contain Value ${content} ${body} diff --git a/tools/Robot_Tool/suites/base/container_manager.txt b/tools/Robot_Tool/suites/base/container_manager.txt deleted file mode 100644 index 74fa06e87b..0000000000 --- a/tools/Robot_Tool/suites/base/container_manager.txt +++ /dev/null @@ -1,46 +0,0 @@ -*** Settings *** -Documentation Test suite for the container manager module. -Suite Teardown Delete All Sessions -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** -${name} test_container1 -@{nodeconnectors} OF|1@OF|00:00:00:00:00:00:00:01 OF|23@OF|00:00:00:00:00:00:20:21 -${key} containerConfig -${REST_CONTEXT} /controller/nb/v2/containermanager - -*** Test Cases *** -Add a container - [Documentation] Add a container, list to validate the result. - [Tags] add - ${flowspecs} Convert To List ${EMPTY} - ${body} Create Dictionary container=${name} flowSpecs=${flowspecs} staticVlan=10 - ... nodeConnectors=${nodeconnectors} - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Put session ${REST_CONTEXT}/container/${name} data=${body} - Should Be Equal As Strings ${resp.status_code} 201 Response status code error - ${resp} Get session ${REST_CONTEXT}/containers - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Contain Value ${content} ${body} - -Remove a container - [Documentation] Remove a container, list to validate the result. - [Tags] remove - ${flowspecs} Convert To List ${EMPTY} - ${body} Create Dictionary container=${name} flowSpecs=${flowspecs} staticVlan=10 - ... nodeConnectors=${nodeconnectors} - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Delete session ${REST_CONTEXT}/container/${name} - Should Be Equal As Strings ${resp.status_code} 204 Response status code error - ${resp} Get session ${REST_CONTEXT}/containers - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Not Contain Value ${content} ${body} diff --git a/tools/Robot_Tool/suites/base/forwarding_manager.txt b/tools/Robot_Tool/suites/base/forwarding_manager.txt deleted file mode 100644 index b22e81a128..0000000000 --- a/tools/Robot_Tool/suites/base/forwarding_manager.txt +++ /dev/null @@ -1,41 +0,0 @@ -*** Settings *** -Documentation Test suite for the forwarding manager module. -Suite Teardown Delete All Sessions -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** -${name} test_route1 -${key} staticRoute -${REST_CONTEXT} /controller/nb/v2/staticroute - -*** Test Cases *** -Add a static route - [Documentation] Add a static route, list to validate the result. - [Tags] add - ${body} Create Dictionary name=${name} prefix=192.168.1.0/24 nextHop=10.0.0.2 - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Put session ${REST_CONTEXT}/${CONTAINER}/route/${name} data=${body} - Should Be Equal As Strings ${resp.status_code} 201 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER}/routes - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Contain Value ${content} ${body} - -Remove a subnet - [Documentation] Remove a subnet, list to validate the result. - [Tags] remove - ${body} Create Dictionary name=${name} prefix=192.168.1.0/24 nextHop=10.0.0.2 - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Delete session ${REST_CONTEXT}/${CONTAINER}/route/${name} - Should Be Equal As Strings ${resp.status_code} 204 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER}/routes - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Not Contain Value ${content} ${body} diff --git a/tools/Robot_Tool/suites/base/forwarding_rule_manager.txt b/tools/Robot_Tool/suites/base/forwarding_rule_manager.txt deleted file mode 100644 index 59a49d8534..0000000000 --- a/tools/Robot_Tool/suites/base/forwarding_rule_manager.txt +++ /dev/null @@ -1,48 +0,0 @@ -*** Settings *** -Documentation Test suite for the forwarding rule manager module. -Suite Teardown Delete All Sessions -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** -${name} flow1 -${key} flowConfig -${node_id} 00:00:00:00:00:00:00:02 -${REST_CONTEXT} /controller/nb/v2/flowprogrammer - -*** Test Cases *** -Add a flow - [Documentation] Add a flow, list to validate the result. - [Tags] add - ${node} Create Dictionary type=OF id=${node_id} - ${actions} Create List OUTPUT=1 - ${body} Create Dictionary name=${name} installInHw=true node=${node} - ... priority=1 etherType=0x800 nwDst=10.0.0.1/32 actions=${actions} - ${headers} Create Dictionary Content-Type application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Put session ${REST_CONTEXT}/${CONTAINER}/node/OF/${node_id}/staticFlow/${name} data=${body} - Should Be Equal As Strings ${resp.status_code} 201 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Contain Value ${content} ${body} - -Remove a flow - [Documentation] Remove a flow, list to validate the result. - [Tags] remove - ${node} Create Dictionary type=OF id=${node_id} - ${actions} Create List OUTPUT=1 - ${body} Create Dictionary name=${name} installInHw=true node=${node} - ... priority=1 etherType=0x800 nwDst=10.0.0.1/32 actions=${actions} - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Delete session ${REST_CONTEXT}/${CONTAINER}/node/OF/${node_id}/staticFlow/${name} - Should Be Equal As Strings ${resp.status_code} 204 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Not Contain Value ${content} ${body} diff --git a/tools/Robot_Tool/suites/base/host_tracker.txt b/tools/Robot_Tool/suites/base/host_tracker.txt deleted file mode 100644 index ed4259d205..0000000000 --- a/tools/Robot_Tool/suites/base/host_tracker.txt +++ /dev/null @@ -1,45 +0,0 @@ -*** Settings *** -Documentation Test suite for the host tracker module. -Suite Teardown Delete All Sessions -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** -${name} 10.0.1.4 -${key} hostConfig -${REST_CONTEXT} /controller/nb/v2/hosttracker - -*** Test Cases *** -Add a host - [Documentation] Add a host, list to validate the result. - [Tags] add - ${body} Create Dictionary nodeType=OF dataLayerAddress=5e:bf:79:84:10:a6 - ... vlan=1 nodeId=00:00:00:00:00:00:00:03 nodeConnectorId=9 - ... networkAddress=10.0.1.4 staticHost=${True} nodeConnectorType=OF - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Put session ${REST_CONTEXT}/${CONTAINER}/address/${name} data=${body} - Should Be Equal As Strings ${resp.status_code} 201 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER}/hosts/inactive - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Contain Value ${content} ${body} - -Remove a host - [Documentation] Remove a host, list to validate the result. - [Tags] remove - ${body} Create Dictionary nodeType=OF dataLayerAddress=5e:bf:79:84:10:a6 - ... vlan=1 nodeId=00:00:00:00:00:00:00:03 nodeConnectorId=9 - ... networkAddress=10.0.1.4 staticHost=${True} nodeConnectorType=OF - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Delete session ${REST_CONTEXT}/${CONTAINER}/address/${name} - Should Be Equal As Strings ${resp.status_code} 204 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER}/hosts/inactive - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Not Contain Value ${content} ${body} diff --git a/tools/Robot_Tool/suites/base/statistics_manager.txt b/tools/Robot_Tool/suites/base/statistics_manager.txt deleted file mode 100644 index 9aab569753..0000000000 --- a/tools/Robot_Tool/suites/base/statistics_manager.txt +++ /dev/null @@ -1,12 +0,0 @@ -*** Settings *** -Documentation Test suite for the statistics manager module. -Suite Teardown Delete All Sessions -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** -${REST_CONTEXT} /controller/nb/v2/statistics - -*** Test Cases *** diff --git a/tools/Robot_Tool/suites/base/switch_manager.txt b/tools/Robot_Tool/suites/base/switch_manager.txt deleted file mode 100644 index e17ff38d49..0000000000 --- a/tools/Robot_Tool/suites/base/switch_manager.txt +++ /dev/null @@ -1,143 +0,0 @@ -*** Settings *** -Documentation Test suite for the switch manager bundle. -Suite Setup Create Session ${ODL_CONTROLLER_SESSION} http://${CONTROLLER}:8080 auth=${AUTH} headers=${HEADERS} -Suite Teardown Delete All Sessions -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Library ../../libraries/SwitchManager.py -Variables ../../variables/Variables.py -Library ../../libraries/Topology.py - -*** Variables *** -${REST_CONTEXT} /controller/nb/v2/switchmanager - -*** Test Cases *** -List all nodes - [Documentation] List all nodes and their properties in the network. - [Tags] list_info - Log ${TOPO_TREE_LEVEL} - ${topo_nodes} Get Nodes From Topology ${TOPO_TREE_LEVEL} - ${resp} Get ${ODL_CONTROLLER_SESSION} ${REST_CONTEXT}/${CONTAINER}/nodes - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${jsondata}= To JSON ${resp.content} - ${nodes} Extract All Nodes ${jsondata} - Collection Should Contain ${nodes} ${topo_nodes} - -Get a node - [Documentation] Get a node - Get node 00:00:00:00:00:00:00:02 OF - -Add property to node - [Documentation] Add a property to node - Add property to node OF 00:00:00:00:00:00:00:02 description Switch2 - Node property should exist OF 00:00:00:00:00:00:00:02 description Switch2 - -Remove property from node - [Documentation] Remove a property from node - Remove property from node OF 00:00:00:00:00:00:00:02 description - Node property should not exist OF 00:00:00:00:00:00:00:02 description Switch2 - -Add property to nodeconnector - [Documentation] Add a property to nodeconnector - Add property to nodeconnector OF 00:00:00:00:00:00:00:02 OF 2 bandwidth 1000 - Nodeconnector property should exist OF 00:00:00:00:00:00:00:02 OF 2 bandwidth ${1000} - -Remove property from nodeconnector - [Documentation] Remove a property from nodeconnector - Remove property from nodeconnector OF 00:00:00:00:00:00:00:02 OF 2 bandwidth - Nodeconnector property should not exist OF 00:00:00:00:00:00:00:02 OF 2 bandwidth ${1000} - -*** Keywords *** -Get node - [Arguments] ${node_id} ${node_type} - [Documentation] Get a specific node - ${resp} Get ${ODL_CONTROLLER_SESSION} ${REST_CONTEXT}/${CONTAINER}/nodes - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} TO JSON ${resp.content} - ${node} Create Dictionary id=${node_id} type=${node_type} - ${content} Extract All Nodes ${result} - Log ${content} - List Should Contain Value ${content} ${node} - -Add property to node - [Arguments] ${node_type} ${node_id} ${property} ${value} - [Documentation] Add property to node - ${resp} Put ${ODL_CONTROLLER_SESSION} ${REST_CONTEXT}/${CONTAINER}/node/${node_type}/${node_id}/property/${property}/${value} - Should Be Equal As Strings ${resp.status_code} 201 Response status code error - -Remove property from node - [Arguments] ${node_type} ${node_id} ${property} - [Documentation] Remove property from node - ${resp} Delete ${ODL_CONTROLLER_SESSION} ${REST_CONTEXT}/${CONTAINER}/node/${node_type}/${node_id}/property/${property} - Should Be Equal As Strings ${resp.status_code} 204 Response status code error - -Add property to nodeconnector - [Arguments] ${node_type} ${node_id} ${nc_type} ${nc_id} ${property} ${value} - [Documentation] Add property to nodeconnector - ${resp} Put ${ODL_CONTROLLER_SESSION} ${REST_CONTEXT}/${CONTAINER}/nodeconnector/${node_type}/${node_id}/${nc_type}/${nc_id}/property/${property}/${value} - Should Be Equal As Strings ${resp.status_code} 201 Response status code error - -Remove property from nodeconnector - [Arguments] ${node_type} ${node_id} ${nc_type} ${nc_id} ${property} - [Documentation] Remove property from nodeconnector - ${resp} Delete ${ODL_CONTROLLER_SESSION} ${REST_CONTEXT}/${CONTAINER}/nodeconnector/${node_type}/${node_id}/${nc_type}/${nc_id}/property/${property} - Should Be Equal As Strings ${resp.status_code} 204 Response status code error - -Node property should exist - [Arguments] ${node_type} ${node_id} ${property} ${value} - [Documentation] Property of node should exist - ${resp} Get ${ODL_CONTROLLER_SESSION} ${REST_CONTEXT}/${CONTAINER}/nodes - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} TO JSON ${resp.content} - Log ${result} - ${nodes} Extract All Nodes ${result} - ${property_values} Extract Node Property Values ${result} ${property} - ${node} Create Dictionary id=${node_id} type=${node_type} - ${property_value} Create Dictionary value=${value} - Log ${property_value} - List Should Contain Value ${nodes} ${node} - List Should Contain Value ${property_values} ${property_value} - -Node property should not exist - [Arguments] ${node_type} ${node_id} ${property} ${value} - [Documentation] Property of node should not exist - ${resp} Get ${ODL_CONTROLLER_SESSION} ${REST_CONTEXT}/${CONTAINER}/nodes - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} TO JSON ${resp.content} - Log ${result} - ${nodes} Extract All Nodes ${result} - ${properties} Extract Node Property Values ${result} ${property} - ${node} Create Dictionary id=${node_id} type=${node_type} - ${property} Create Dictionary value=${value} - Log ${property} - List Should Contain Value ${nodes} ${node} - List Should Not Contain Value ${properties} ${property} - -Nodeconnector property should exist - [Arguments] ${node_type} ${node_id} ${nc_type} ${nc_id} ${property} ${value} - [Documentation] Property of nodeconnector should exist - ${resp} Get ${ODL_CONTROLLER_SESSION} ${REST_CONTEXT}/${CONTAINER}/node/${node_type}/${node_id} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} TO JSON ${resp.content} - Log ${result} - ${property_values} Extract Nodeconnector Property Values ${result} ${property} - Log ${property_values} - ${property_value} Create Dictionary value=${value} - List Should Contain Value ${property_values} ${property_value} - -Nodeconnector property should not exist - [Arguments] ${node_type} ${node_id} ${nc_type} ${nc_id} ${property} ${value} - [Documentation] Property of nodeconnector should not exist - ${resp} Get ${ODL_CONTROLLER_SESSION} ${REST_CONTEXT}/${CONTAINER}/node/${node_type}/${node_id} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} TO JSON ${resp.content} - Log ${result} - ${property_values} Extract Nodeconnector Property Values ${result} ${property} - Log ${property_values} - ${property_value} Create Dictionary value=${value} - List Should not Contain Value ${property_values} ${property_value} - -List all nodeconnectors of node - [Arguments] ${node_type} ${node_id} - [Documentation] List all nodeconnectors and properties of node diff --git a/tools/Robot_Tool/suites/base/topology_manager.txt b/tools/Robot_Tool/suites/base/topology_manager.txt deleted file mode 100644 index 2ce0569b71..0000000000 --- a/tools/Robot_Tool/suites/base/topology_manager.txt +++ /dev/null @@ -1,45 +0,0 @@ -*** Settings *** -Documentation Test suite for the arp handler bundle. -Suite Teardown Delete All Sessions -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** -${name} test_userlink1 -${key} userLinks -${REST_CONTEXT} /controller/nb/v2/topology - -*** Test Cases *** -Add a userlink - [Documentation] Add a userlink, list to validate the result. - [Tags] add - ${body} Create Dictionary name=${name} status=Success - ... srcNodeConnector=OF|1@OF|00:00:00:00:00:00:00:02 - ... dstNodeConnector=OF|1@OF|00:00:00:00:00:00:00:03 - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Put session ${REST_CONTEXT}/${CONTAINER}/userLink/${name} data=${body} - Should Be Equal As Strings ${resp.status_code} 201 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER}/userLinks - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Contain Value ${content} ${body} - -Remove a userlink - [Documentation] Remove a userlink, list to validate the result. - [Tags] remove - ${body} Create Dictionary name=${name} status=Success - ... srcNodeConnector=OF|1@OF|00:00:00:00:00:00:00:02 - ... dstNodeConnector=OF|1@OF|00:00:00:00:00:00:00:03 - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${CONTROLLER}:8080 headers=${headers} auth=${auth} - ${resp} Delete session ${REST_CONTEXT}/${CONTAINER}/userLink/${name} - Should Be Equal As Strings ${resp.status_code} 204 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER}/userLinks - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Not Contain Value ${content} ${body} diff --git a/tools/Robot_Tool/suites/cluster/010__bridge_topology.txt b/tools/Robot_Tool/suites/cluster/010__bridge_topology.txt deleted file mode 100644 index 90fe924a88..0000000000 --- a/tools/Robot_Tool/suites/cluster/010__bridge_topology.txt +++ /dev/null @@ -1,68 +0,0 @@ -*** Settings *** -Documentation Introducing switches to both the controllers C1 and C2 -Suite Teardown Delete All Sessions -Library SSHLibrary -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** -${node1} "00:00:00:00:00:00:00:01" -${node2} "00:00:00:00:00:00:00:02" -${node3} "00:00:00:00:00:00:00:03" -${REST_CONTEXT} /controller/nb/v2/topology -${REST_CONTEXT_ST} /controller/nb/v2/statistics -${CONTROLLER1} 10.125.136.38:6633 -${CONTROLLER2} 10.125.136.43:6633 -${Controller1_IP} 10.125.136.38 -${Controller2_IP} 10.125.136.43 -${CTRL_USER} odluser -${FLOW} "10.0.0.1" - -*** Test Cases *** -Switches are introduced to both the controllers C1 & C2 - [Documentation] Switches are introduced to both the controllers C1 & C2 - [Tags] Set-Bridges - Switch Connection 1 - ${stdout1}= Execute Command sudo ovs-vsctl set-controller s1 tcp:${CONTROLLER1} tcp:${CONTROLLER2} - ${stdout2}= Execute Command sudo ovs-vsctl set-controller s2 tcp:${CONTROLLER1} tcp:${CONTROLLER2} - ${stdout2}= Execute Command sudo ovs-vsctl set-controller s3 tcp:${CONTROLLER1} tcp:${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s1" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s2" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s3" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - - -Verifying the topology in C1 - [Documentation] Get Topology for C1 and validate the result. - [Tags] get - Create Session session http://${Controller1_IP}:8080 headers=${HEADERS} auth=${auth} - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Sleep 10 - Should Be Equal As Strings ${resp.status_code} 200 - Log ${resp.content} - Should Contain X Times ${resp.content} ${node1} 4 - Should Contain X Times ${resp.content} ${node2} 2 - Should Contain X Times ${resp.content} ${node3} 2 - -Verifying the topology in C2 - [Documentation] Get Topology for C2 and validate the result. - [Tags] get - Create Session session http://${Controller2_IP}:8080 headers=${HEADERS} auth=${auth} - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Sleep 10 - Should Be Equal As Strings ${resp.status_code} 200 - Log ${resp.content} - Should Contain X Times ${resp.content} ${node1} 4 - Should Contain X Times ${resp.content} ${node2} 2 - Should Contain X Times ${resp.content} ${node3} 2 - diff --git a/tools/Robot_Tool/suites/cluster/015__FRM.txt b/tools/Robot_Tool/suites/cluster/015__FRM.txt deleted file mode 100644 index 3f94fa8f96..0000000000 --- a/tools/Robot_Tool/suites/cluster/015__FRM.txt +++ /dev/null @@ -1,59 +0,0 @@ -*** Settings *** -Documentation Introducing switches to both the controllers C1 and C2 -Suite Teardown Delete All Sessions -Library SSHLibrary -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** -${node1} "00:00:00:00:00:00:00:01" -${node2} "00:00:00:00:00:00:00:02" -${node3} "00:00:00:00:00:00:00:03" -${key} flowConfig -${name} flow1 -${REST_CONTEXT_P} /controller/nb/v2/flowprogrammer -${REST_CONTEXT_ST} /controller/nb/v2/statistics -${CONTROLLER1} 10.125.136.38:6633 -${CONTROLLER2} 10.125.136.43:6633 -${Controller1_IP} 10.125.136.38 -${Controller2_IP} 10.125.136.43 -${node_id} 00:00:00:00:00:00:00:02 -${FLOW} "10.0.0.1" - - -*** Test Cases *** -Add a flow in C1 - [Documentation] Adding a flow, list to validate the result in C1 - [Tags] add - ${node} Create Dictionary type=OF id=${node_id} - ${actions} Create List OUTPUT=1 - ${body} Create Dictionary name=${name} installInHw=true node=${node} - ... priority=1 etherType=0x800 nwDst=10.0.0.1/32 actions=${actions} - Create Session session http://${Controller1_IP}:8080 auth=${AUTH} headers=${HEADERS} - ${resp} Put session ${REST_CONTEXT_P}/${CONTAINER}/node/OF/${node_id}/staticFlow/${name} data=${body} - Should Be Equal As Strings ${resp.status_code} 201 - - -Check flow stats in C1 - [Documentation] Checking the flow stats and validating the result in C1 - [Tags] get - Sleep 10 - Create Session session http://${Controller1_IP}:8080 auth=${AUTH} headers=${HEADERS} - ${resp} Get session ${REST_CONTEXT_ST}/${CONTAINER}/flow - Should Be Equal As Strings ${resp.status_code} 200 - Log ${resp.content} - Should Contain ${resp.content} ${FLOW} - - -Check flow stats in C2 - [Documentation] Checking the flow stats and validating the result in C2 - [Tags] get - Sleep 10 - Create Session session http://${Controller2_IP}:8080 auth=${AUTH} headers=${HEADERS} - ${resp} Get session ${REST_CONTEXT_ST}/${CONTAINER}/flow - Should Be Equal As Strings ${resp.status_code} 200 - Log ${resp.content} - Should Contain ${resp.content} ${FLOW} - diff --git a/tools/Robot_Tool/suites/cluster/020__c1_fails.txt b/tools/Robot_Tool/suites/cluster/020__c1_fails.txt deleted file mode 100644 index 4e229b8aac..0000000000 --- a/tools/Robot_Tool/suites/cluster/020__c1_fails.txt +++ /dev/null @@ -1,67 +0,0 @@ -*** Settings *** -Documentation controller1 fails -Library SSHLibrary -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** -${node1} "00:00:00:00:00:00:00:01" -${node2} "00:00:00:00:00:00:00:02" -${node3} "00:00:00:00:00:00:00:03" -${REST_CONTEXT} /controller/nb/v2/topology -${REST_CONTEXT_ST} /controller/nb/v2/statistics -${CONTROLLER1} 10.125.136.38:6633 -${CONTROLLER2} 10.125.136.43:6633 -${Controller1_IP} 10.125.136.38 -${Controller2_IP} 10.125.136.43 -${CTRL_USER} odluser -${FLOW} "10.0.0.1" - -*** Test Cases *** -C1 fails - [Documentation] C1 fails suddenly - Open Connection ${Controller1_IP} prompt=$ - Login With Public Key odluser ${USER_HOME}/.ssh/id_rsa any - Write cd controller-base/opendaylight - Write ./run.sh -stop - Sleep 5 - Read - -C2 takes over and verifying the bridges - [Documentation] C2 is taking over from c1 failure - Switch Connection 1 - ${stdout}= Execute Command sudo ovs-vsctl show - Read - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - Sleep 5 - Should Contain X Times ${stdout} is_connected: true 3 - -Verifying the topology in C2 - [Documentation] Get Topology for C2 and validate the result. - [Tags] get - Create Session session http://${Controller2_IP}:8080 headers=${HEADERS} auth=${auth} - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 - Log ${resp.content} - Should Contain X Times ${resp.content} ${node1} 4 - Should Contain X Times ${resp.content} ${node2} 2 - Should Contain X Times ${resp.content} ${node3} 2 - -Check flow in flow stats in C2 - [Documentation] Checking the flow stats and validating the result in C2 - [Tags] get - Sleep 10 - Create Session session http://${Controller2_IP}:8080 headers=${HEADERS} auth=${auth} - ${resp} Get session ${REST_CONTEXT_ST}/${CONTAINER}/flow - Should Be Equal As Strings ${resp.status_code} 200 - Log ${resp.content} - Should Contain ${resp.content} ${FLOW} - - - - - - - diff --git a/tools/Robot_Tool/suites/cluster/025__c2_fails.txt b/tools/Robot_Tool/suites/cluster/025__c2_fails.txt deleted file mode 100644 index 310a95c7e6..0000000000 --- a/tools/Robot_Tool/suites/cluster/025__c2_fails.txt +++ /dev/null @@ -1,71 +0,0 @@ -*** Settings *** -Documentation controller1 fails -Library SSHLibrary -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - -*** Variables *** -${node1} "00:00:00:00:00:00:00:01" -${node2} "00:00:00:00:00:00:00:02" -${node3} "00:00:00:00:00:00:00:03" -${REST_CONTEXT} /controller/nb/v2/topology -${REST_CONTEXT_ST} /controller/nb/v2/statistics -${CONTROLLER1} 10.125.136.38:6633 -${CONTROLLER2} 10.125.136.43:6633 -${Controller1_IP} 10.125.136.38 -${Controller2_IP} 10.125.136.43 -${CTRL_USER} odluser -${FLOW} "10.0.0.1" - -*** Test Cases *** -C1 recovers from failure - [Documentation] C1 recovers from failure - Switch Connection 2 - Write cd controller-base/opendaylight - Write ./run.sh -start - Sleep 60 - Read - -C2 fails - [Documentation] C2 fails suddenly - Open Connection ${Controller2_IP} prompt=$ - Login With Public Key odluser ${USER_HOME}/.ssh/id_rsa any - Write cd controller-base/opendaylight - Write ./run.sh -stop - Sleep 5 - Read - -C1 takes over and verifying the bridges - [Documentation] C1 is taking over from C2 - Switch Connection 1 - ${stdout}= Execute Command sudo ovs-vsctl show - Read - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - -Verifying the topology in C1 - [Documentation] Get Topology for C1 and validate the result. - [Tags] get - Create Session session http://${Controller1_IP}:8080 headers=${HEADERS} auth=${auth} - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 - Log ${resp.content} - Should Contain X Times ${resp.content} ${node1} 4 - Should Contain X Times ${resp.content} ${node2} 2 - Should Contain X Times ${resp.content} ${node3} 2 - -Check flow in flow stats in C1 - [Documentation] Checking the flow stats and validating the result in C1 - [Tags] get - Sleep 10 - ${resp} Get session ${REST_CONTEXT_ST}/${CONTAINER}/flow - Should Be Equal As Strings ${resp.status_code} 200 - Log ${resp.content} - Should Contain ${resp.content} ${FLOW} - - - - - diff --git a/tools/Robot_Tool/suites/cluster/__init__.txt b/tools/Robot_Tool/suites/cluster/__init__.txt deleted file mode 100644 index 120a843302..0000000000 --- a/tools/Robot_Tool/suites/cluster/__init__.txt +++ /dev/null @@ -1,27 +0,0 @@ -*** Settings *** -Documentation Test suite for the OpenDaylight Cluster -Suite Setup Start Suite -Suite Teardown Stop Suite -Library SSHLibrary - -*** Variables *** -${start}= sudo mn --controller=remote,ip=${CONTROLLER},port=6633 --topo tree,2 - -*** Keywords *** -Start Suite - Log Start the test on the base edition - Open Connection ${MININET} prompt=> - Login With Public Key ${MININET_USER} ${USER_HOME}/.ssh/id_rsa any - Write sudo mn -c - Sleep 5 - Write ${start} - Sleep 20 - Read -Stop Suite - Log Stop the test on the base edition - Read - Write exit - Sleep 4 - Read - Close Connection - diff --git a/tools/Robot_Tool/suites/ha/005__two_controller_running.txt b/tools/Robot_Tool/suites/ha/005__two_controller_running.txt deleted file mode 100644 index 157c04950b..0000000000 --- a/tools/Robot_Tool/suites/ha/005__two_controller_running.txt +++ /dev/null @@ -1,154 +0,0 @@ -*** Settings *** -Documentation ODL controller clustering test case using the gherkin syntax. -... -... This test has a workflow similar to the keyword-driven -... examples. The difference is that the keywords use higher -... abstraction level and their arguments are embedded into -... the keyword names. -... -... This kind of _gherkin_ syntax has been made popular by -... [http://cukes.info|Cucumber]. It works well especially when -... tests act as examples that need to be easily understood also -... by the business people. -Resource resource.txt -Library Selenium2Library -Library SSHLibrary -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - - -*** Test Cases *** -Two controllers running - Given C1 a controller in cluster of two controllers - And C2 a controller in cluster of two controllers - And all switches are introduced to both controllers - When C1 is up and running - And C2 is up and running - Then the system is working with C1 and C2 - - - -*** Keywords *** -C1 is up and running - Open Browser To Login Page 1 - Input Username admin - Input Password admin - Submit Credentials - Welcome Page 1 Should Be Open - [Teardown] Close Browser - -C2 is up and running - Open Browser To Login Page 2 - Input Username admin - Input Password admin - Submit Credentials - Welcome Page 2 Should Be Open - [Teardown] Close Browser - - -Open Browser To Login Page 1 - Set Selenium Speed ${DELAY2} - Open Browser ${LOGIN URL1} ${BROWSER} - Maximize Browser Window - Login Page Should Be Open - -Open Browser To Login Page 2 - Set Selenium Speed ${DELAY2} - Open Browser ${LOGIN URL2} ${BROWSER} - Maximize Browser Window - Login Page Should Be Open - -Login Page Should Be Open - Title Should Be OpenDaylight - Login - -Input Username - [Arguments] ${username} - Input Text j_username ${username} - -Input Password - [Arguments] ${password} - Input Text j_password ${password} - -Submit Credentials - Click Button Log In - -Welcome Page 1 Should Be Open - Location Should Be ${LOGIN URL1} - Title Should Be OpenDaylight - -Welcome Page 2 Should Be Open - Location Should Be ${LOGIN URL2} - Title Should Be OpenDaylight - -C1 a controller in cluster of two controllers - Open Connection ${Controller1_IP} prompt=> - Login ${VM USERNAME} ${VM PASSWORD} - Write cd controller-base/opendaylight - Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - - - -C2 a controller in cluster of two controllers - Open Connection ${Controller2_IP} prompt=> - Login ${VM USERNAME} ${VM PASSWORD} - Write cd controller-base/opendaylight - Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - - -all switches are introduced to both controllers - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} -# ${stdout1}= Execute Command sudo ovs-vsctl set-controller s1 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s2 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s3 tcp:${CONTROLLER1} tcp:${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - Should Contain ${stdout} Bridge "s1" - Should Contain ${stdout} Bridge "s2" - Should Contain ${stdout} Bridge "s3" - Should Contain X Times ${stdout} is_connected: true 6 - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s1" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s2" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s3" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - - -the system is working with C1 and C2 - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${Controller1_IP}:8080 headers=${headers} auth=${auth} - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - Log ${resp.content} - Should Contain X Times ${resp.content} ${nodeconn1} 2 - Should Contain X Times ${resp.content} ${nodeconn2} 2 - Should Contain X Times ${resp.content} ${nodeconn3} 2 - Should Contain X Times ${resp.content} ${nodeconn4} 2 - - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${Controller2_IP}:8080 headers=${headers} auth=${auth} - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - Log ${resp.content} - Should Contain X Times ${resp.content} ${nodeconn1} 2 - Should Contain X Times ${resp.content} ${nodeconn2} 2 - Should Contain X Times ${resp.content} ${nodeconn3} 2 - Should Contain X Times ${resp.content} ${nodeconn4} 2 - - - diff --git a/tools/Robot_Tool/suites/ha/010__c1_fails.txt b/tools/Robot_Tool/suites/ha/010__c1_fails.txt deleted file mode 100644 index fd32d38947..0000000000 --- a/tools/Robot_Tool/suites/ha/010__c1_fails.txt +++ /dev/null @@ -1,115 +0,0 @@ -*** Settings *** -Documentation ODL controller clustering test case using the gherkin syntax. -... -... This test has a workflow similar to the keyword-driven -... examples. The difference is that the keywords use higher -... abstraction level and their arguments are embedded into -... the keyword names. -... -... This kind of _gherkin_ syntax has been made popular by -... [http://cukes.info|Cucumber]. It works well especially when -... tests act as examples that need to be easily understood also -... by the business people. -Resource resource.txt -Library Selenium2Library -Library SSHLibrary -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - - -*** Test Cases *** -Controller1 fails - Given C1 a controller in cluster of two controllers - And C2 a controller in cluster of two controllers - And all switches are introduced to both controllers - When C1 goes down - Then C2 takes over - And the system is working with C2 - - -*** Keywords *** -C1 goes down - Open Connection ${Controller1_IP} prompt=> - Login ${VM USERNAME} ${VM PASSWORD} - Write cd controller-base/opendaylight - Write sudo ./run.sh -stop - Set Selenium Speed ${DELAY2} - Open Browser ${LOGIN URL1} ${BROWSER} - Maximize Browser Window - Set Selenium Speed ${DELAY} - Location Should Be ${ERROR URL1} - Title Should Be Problem loading page - [Teardown] Close Browser - - -C1 a controller in cluster of two controllers -# Open Connection ${Controller1_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - - - -C2 a controller in cluster of two controllers -# Open Connection ${Controller2_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - - -all switches are introduced to both controllers - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} -# ${stdout1}= Execute Command sudo ovs-vsctl set-controller s1 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s2 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s3 tcp:${CONTROLLER1} tcp:${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - Should Contain ${stdout} Bridge "s1" - Should Contain ${stdout} Bridge "s2" - Should Contain ${stdout} Bridge "s3" - Should Contain X Times ${stdout} is_connected: true 6 - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s1" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s2" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s3" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - - -the system is working with C2 - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${Controller2_IP}:8080 headers=${headers} auth=${auth} - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - Log ${resp.content} - Should Contain X Times ${resp.content} ${nodeconn1} 2 - Should Contain X Times ${resp.content} ${nodeconn2} 2 - Should Contain X Times ${resp.content} ${nodeconn3} 2 - Should Contain X Times ${resp.content} ${nodeconn4} 2 - - -C2 takes over - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - Should Contain X Times ${stdout} is_connected: true 3 - - - - diff --git a/tools/Robot_Tool/suites/ha/015__c2_fails.txt b/tools/Robot_Tool/suites/ha/015__c2_fails.txt deleted file mode 100644 index bca9e7cb48..0000000000 --- a/tools/Robot_Tool/suites/ha/015__c2_fails.txt +++ /dev/null @@ -1,113 +0,0 @@ -*** Settings *** -Documentation ODL controller clustering test case using the gherkin syntax. -... -... This test has a workflow similar to the keyword-driven -... examples. The difference is that the keywords use higher -... abstraction level and their arguments are embedded into -... the keyword names. -... -... This kind of _gherkin_ syntax has been made popular by -... [http://cukes.info|Cucumber]. It works well especially when -... tests act as examples that need to be easily understood also -... by the business people. -Resource resource.txt -Library Selenium2Library -Library SSHLibrary -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - - -*** Test Cases *** -Controller2 fails - Given C1 a controller in cluster of two controllers - And C2 a controller in cluster of two controllers - And all switches are introduced to both controllers - When C2 goes down - Then C1 takes over - And the system is working with C1 - - - -*** Keywords *** -C2 goes down - Open Connection ${Controller2_IP} prompt=> - Login ${VM USERNAME} ${VM PASSWORD} - Write cd controller-base/opendaylight - Write sudo ./run.sh -stop - Set Selenium Speed ${DELAY2} - Open Browser ${LOGIN URL2} ${BROWSER} - Maximize Browser Window - Set Selenium Speed ${DELAY} - Location Should Be ${ERROR URL2} - Title Should Be Problem loading page - [Teardown] Close Browser - - -C1 a controller in cluster of two controllers -# Open Connection ${Controller1_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - - - -C2 a controller in cluster of two controllers -# Open Connection ${Controller2_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - - -all switches are introduced to both controllers - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} -# ${stdout1}= Execute Command sudo ovs-vsctl set-controller s1 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s2 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s3 tcp:${CONTROLLER1} tcp:${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - Should Contain ${stdout} Bridge "s1" - Should Contain ${stdout} Bridge "s2" - Should Contain ${stdout} Bridge "s3" - Should Contain X Times ${stdout} is_connected: true 6 - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s1" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s2" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s3" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - - -the system is working with C1 - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${Controller1_IP}:8080 headers=${headers} auth=${auth} - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - Log ${resp.content} - Should Contain X Times ${resp.content} ${nodeconn1} 2 - Should Contain X Times ${resp.content} ${nodeconn2} 2 - Should Contain X Times ${resp.content} ${nodeconn3} 2 - Should Contain X Times ${resp.content} ${nodeconn4} 2 - - -C1 takes over - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - Should Contain X Times ${stdout} is_connected: true 3 - diff --git a/tools/Robot_Tool/suites/ha/020__c1_recovers.txt b/tools/Robot_Tool/suites/ha/020__c1_recovers.txt deleted file mode 100644 index daf15dd268..0000000000 --- a/tools/Robot_Tool/suites/ha/020__c1_recovers.txt +++ /dev/null @@ -1,130 +0,0 @@ -*** Settings *** -Documentation ODL controller clustering test case using the gherkin syntax. -... -... This test has a workflow similar to the keyword-driven -... examples. The difference is that the keywords use higher -... abstraction level and their arguments are embedded into -... the keyword names. -... -... This kind of _gherkin_ syntax has been made popular by -... [http://cukes.info|Cucumber]. It works well especially when -... tests act as examples that need to be easily understood also -... by the business people. -Resource resource.txt -Library Selenium2Library -Library SSHLibrary -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - - -*** Test Cases *** -Controller1 recovers after failure - Given C1 goes down - And C2 takes over -# And C2 a controller in cluster of two controllers -# And all switches are introduced to both controllers - When C1 recovers - Then the system is working with C1 and C2 - - -*** Keywords *** -C1 goes down - Open Connection ${Controller1_IP} prompt=> - Login ${VM USERNAME} ${VM PASSWORD} - Write cd controller-base/opendaylight - Write sudo ./run.sh -stop - Set Selenium Speed ${DELAY2} - Open Browser ${LOGIN URL1} ${BROWSER} - Maximize Browser Window - Set Selenium Speed ${DELAY} - Location Should Be ${ERROR URL1} - Title Should Be Problem loading page - [Teardown] Close Browser - - -C1 a controller in cluster of two controllers -# Open Connection ${Controller1_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - - - -C2 a controller in cluster of two controllers -# Open Connection ${Controller2_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - - -all switches are introduced to both controllers - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} -# ${stdout1}= Execute Command sudo ovs-vsctl set-controller s1 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s2 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s3 tcp:${CONTROLLER1} tcp:${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - Should Contain ${stdout} Bridge "s1" - Should Contain ${stdout} Bridge "s2" - Should Contain ${stdout} Bridge "s3" - Should Contain X Times ${stdout} is_connected: true 6 - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s1" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s2" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s3" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - - -the system is working with C1 and C2 - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${Controller1_IP}:8080 headers=${headers} auth=${auth} - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - Log ${resp.content} - Should Contain X Times ${resp.content} ${nodeconn1} 2 - Should Contain X Times ${resp.content} ${nodeconn2} 2 - Should Contain X Times ${resp.content} ${nodeconn3} 2 - Should Contain X Times ${resp.content} ${nodeconn4} 2 - - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${Controller2_IP}:8080 headers=${headers} auth=${auth} - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - Log ${resp.content} - Should Contain X Times ${resp.content} ${nodeconn1} 2 - Should Contain X Times ${resp.content} ${nodeconn2} 2 - Should Contain X Times ${resp.content} ${nodeconn3} 2 - Should Contain X Times ${resp.content} ${nodeconn4} 2 - - -C2 takes over - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - Should Contain X Times ${stdout} is_connected: true 3 - - -C1 recovers - Open Connection ${Controller1_IP} prompt=> - Login ${VM USERNAME} ${VM PASSWORD} - Write cd controller-base/opendaylight - Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - - diff --git a/tools/Robot_Tool/suites/ha/025__two_controllers_fail.txt b/tools/Robot_Tool/suites/ha/025__two_controllers_fail.txt deleted file mode 100644 index dcddbca15c..0000000000 --- a/tools/Robot_Tool/suites/ha/025__two_controllers_fail.txt +++ /dev/null @@ -1,113 +0,0 @@ -*** Settings *** -Documentation ODL controller clustering test case using the gherkin syntax. -... -... This test has a workflow similar to the keyword-driven -... examples. The difference is that the keywords use higher -... abstraction level and their arguments are embedded into -... the keyword names. -... -... This kind of _gherkin_ syntax has been made popular by -... [http://cukes.info|Cucumber]. It works well especially when -... tests act as examples that need to be easily understood also -... by the business people. -Resource resource.txt -Library Selenium2Library -Library SSHLibrary -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - - -*** Test Cases *** -Controller1 and Controller2 fail - Given C1 a controller in cluster of two controllers - And C2 a controllers in cluster of two controllers - And all switches are introduced to both controllers - When C1 goes down - And C2 goes down - Then the system does not work any more - - -*** Keywords *** -C1 goes down - Open Connection ${Controller1_IP} prompt=> - Login ${VM USERNAME} ${VM PASSWORD} - Write cd controller-base/opendaylight - Write sudo ./run.sh -stop - Set Selenium Speed ${DELAY2} - Open Browser ${LOGIN URL1} ${BROWSER} - Maximize Browser Window - Set Selenium Speed ${DELAY} - Location Should Be ${ERROR URL1} - Title Should Be Problem loading page - [Teardown] Close Browser - -C2 goes down - Open Connection ${Controller2_IP} prompt=> - Login ${VM USERNAME} ${VM PASSWORD} - Write cd controller-base/opendaylight - Write sudo ./run.sh -stop - Set Selenium Speed ${DELAY2} - Open Browser ${LOGIN URL2} ${BROWSER} - Maximize Browser Window - Set Selenium Speed ${DELAY} - Location Should Be ${ERROR URL2} - Title Should Be Problem loading page - [Teardown] Close Browser - - -C1 a controller in cluster of two controllers -# Open Connection ${Controller1_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - - - -C2 a controller in cluster of two controllers -# Open Connection ${Controller2_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - - -all switches are introduced to both controllers - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} -# ${stdout1}= Execute Command sudo ovs-vsctl set-controller s1 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s2 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s3 tcp:${CONTROLLER1} tcp:${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - Should Contain ${stdout} Bridge "s1" - Should Contain ${stdout} Bridge "s2" - Should Contain ${stdout} Bridge "s3" - Should Contain X Times ${stdout} is_connected: true 6 - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s1" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s2" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s3" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - - -the system does not work any more - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Not Contain ${stdout} is_connected: true - - diff --git a/tools/Robot_Tool/suites/ha/030__two_controller_see_flow.txt b/tools/Robot_Tool/suites/ha/030__two_controller_see_flow.txt deleted file mode 100644 index 74471c154d..0000000000 --- a/tools/Robot_Tool/suites/ha/030__two_controller_see_flow.txt +++ /dev/null @@ -1,91 +0,0 @@ -*** Settings *** -Documentation ODL controller clustering test case using the gherkin syntax. -... -... This test has a workflow similar to the keyword-driven -... examples. The difference is that the keywords use higher -... abstraction level and their arguments are embedded into -... the keyword names. -... -... This kind of _gherkin_ syntax has been made popular by -... [http://cukes.info|Cucumber]. It works well especially when -... tests act as examples that need to be easily understood also -... by the business people. -Resource resource.txt -Library Selenium2Library -Library SSHLibrary -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py -Suite Teardown Delete All Sessions - -*** Test Cases *** -The installed flow can be seen in a cluster of two controllers - Given C1 a controller in cluster of two controllers - And C2 a controller in cluster of two controllers - When a flow is installed in a bridge - Then C1 see the flow - And C2 see the flow - - -*** Keywords *** -C1 a controller in cluster of two controllers -# Open Connection ${Controller1_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - - - -C2 a controller in cluster of two controllers -# Open Connection ${Controller2_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - - -a flow is installed in a bridge - ${node} Create Dictionary type=OF id=${node_id} - ${actions} Create List OUTPUT=1 - ${body} Create Dictionary name=${name} installInHw=true node=${node} - ... priority=995 etherType=0x800 nwDst=10.0.0.1/32 actions=${actions} - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${Controller1_IP}:8080 headers=${headers} auth=${auth} - ${resp} Put session ${REST_CONTEXT}/${CONTAINER}/node/OF/${node_id}/staticFlow/${name} data=${body} - Should Be Equal As Strings ${resp.status_code} 201 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Contain Value ${content} ${body} - - -C1 see the flow - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${Controller1_IP}:8080 headers=${headers} auth=${auth} - ${resp} Get session ${REST_CONTEXT_2} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} actions - List Should Contain Value ${content} OUTPUT=1 - - -C2 see the flow - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${Controller2_IP}:8080 headers=${headers} auth=${auth} - ${resp} Get session ${REST_CONTEXT_2} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} actions - List Should Contain Value ${content} OUTPUT=1 - - - diff --git a/tools/Robot_Tool/suites/ha/035__installed_flow_remains.txt b/tools/Robot_Tool/suites/ha/035__installed_flow_remains.txt deleted file mode 100644 index ffc0ec4e12..0000000000 --- a/tools/Robot_Tool/suites/ha/035__installed_flow_remains.txt +++ /dev/null @@ -1,142 +0,0 @@ -*** Settings *** -Documentation ODL controller clustering test case using the gherkin syntax. -... -... This test has a workflow similar to the keyword-driven -... examples. The difference is that the keywords use higher -... abstraction level and their arguments are embedded into -... the keyword names. -... -... This kind of _gherkin_ syntax has been made popular by -... [http://cukes.info|Cucumber]. It works well especially when -... tests act as examples that need to be easily understood also -... by the business people. -Resource resource.txt -Library Selenium2Library -Library SSHLibrary -Library Collections -Library RequestsLibrary -Library ../../libraries/Common.py -Variables ../../variables/Variables.py - - - -The installed flow remains in the bridge after the controller failure - Given C1 a controller in cluster of two controllers - And C2 a controller in cluster of two controllers - And both controllers get provisioned on all OVS bridges - And a flow is installed in a bridge - And C1 see the flow - And C2 see the flow - And C1 goes down - When C1 recovers - Then C1 see the flow - - - -*** Keywords *** -C1 a controller in cluster of two controllers -# Open Connection ${Controller1_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - - - -C2 a controller in cluster of two controllers -# Open Connection ${Controller2_IP} prompt=> -# Login ${VM USERNAME} ${VM PASSWORD} -# Write cd controller-base/opendaylight -# Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - - -both controllers get provisioned on all OVS bridges - Open Connection ${HOST} - Login ${VM USERNAME} ${VM PASSWORD} -# ${stdout1}= Execute Command sudo ovs-vsctl set-controller s1 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s2 tcp:${CONTROLLER1} tcp:${CONTROLLER2} -# ${stdout2}= Execute Command sudo ovs-vsctl set-controller s3 tcp:${CONTROLLER1} tcp:${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl show - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER1}" 3 - Should Contain X Times ${stdout} Controller "tcp:${CONTROLLER2}" 3 - Should Contain ${stdout} Bridge "s1" - Should Contain ${stdout} Bridge "s2" - Should Contain ${stdout} Bridge "s3" - Should Contain X Times ${stdout} is_connected: true 6 - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s1" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s2" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - ${stdout}= Execute Command sudo ovs-vsctl get-controller "s3" - Should Contain ${stdout} ${CONTROLLER1} - Should Contain ${stdout} ${CONTROLLER2} - - -a flow is installed in a bridge - ${node} Create Dictionary type=OF id=${node_id} - ${actions} Create List OUTPUT=1 - ${body} Create Dictionary name=${name} installInHw=true node=${node} - ... priority=999 etherType=0x800 nwDst=10.0.0.1/32 actions=${actions} - ${headers} Create Dictionary Content-Type=application/json - Create Session session http://${Controller1_IP}:8080 headers=${headers} auth=${auth} - ${resp} Put session ${REST_CONTEXT}/${CONTAINER}/node/OF/${node_id}/staticFlow/${name} data=${body} - Should Be Equal As Strings ${resp.status_code} 201 Response status code error - ${resp} Get session ${REST_CONTEXT}/${CONTAINER} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} ${key} - List Should Contain Value ${content} ${body} - - -C1 goes down - Open Connection ${Controller1_IP} prompt=> - Login ${VM USERNAME} ${VM PASSWORD} - Write cd controller-base/opendaylight - Write sudo ./run.sh -stop - Set Selenium Speed ${DELAY2} - Open Browser ${LOGIN URL1} ${BROWSER} - Maximize Browser Window - Set Selenium Speed ${DELAY} - Location Should Be ${ERROR URL1} - Title Should Be Problem loading page - [Teardown] Close Browser - - -C1 recovers - Open Connection ${Controller1_IP} prompt=> - Login ${VM USERNAME} ${VM PASSWORD} - Write cd controller-base/opendaylight - Write sudo ./run.sh -Dsupernodes=${Controller1_IP}:${Controller2_IP} -start - - - -C1 see the flow - ${headers} Create Dictionary Content-Type application/json - Create Session session http://${Controller1_IP}:8080 headers=${headers} auth=${auth} - ${resp} Get session ${REST_CONTEXT_2} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} actions - List Should Contain Value ${content} OUTPUT=1 - - - -C2 see the flow - ${headers} Create Dictionary Content-Type application/json - Create Session session http://${Controller2_IP}:8080 headers=${headers} auth=${auth} - ${resp} Get session ${REST_CONTEXT_2} - Should Be Equal As Strings ${resp.status_code} 200 Response status code error - ${result} To JSON ${resp.content} - ${content} Get From Dictionary ${result} actions - List Should Contain Value ${content} OUTPUT=1 - - diff --git a/tools/Robot_Tool/suites/ha/__init__.txt b/tools/Robot_Tool/suites/ha/__init__.txt deleted file mode 100644 index 703827c941..0000000000 --- a/tools/Robot_Tool/suites/ha/__init__.txt +++ /dev/null @@ -1,25 +0,0 @@ -*** Settings *** -Documentation Test suite for the OpenDaylight base edition -Suite Setup Start Suite -Suite Teardown Stop Suite -Library SSHLibrary - -*** Variables *** -${start}= sudo mn --controller=remote,ip=${CONTROLLER} --topo tree,2 - -*** Keywords *** -Start Suite - Log Start the test on the base edition - Open Connection ${MININET} prompt=> - Login With Public Key ${MININET_USER} ${USER_HOME}/.ssh/id_rsa any - Write ${start} - Sleep 65 - Read -Stop Suite - Log Stop the test on the base edition - Write exit - Sleep 2 - Write sudo mn -c - Sleep 2 - Read - Close Connection diff --git a/tools/Robot_Tool/suites/ha/resource.txt b/tools/Robot_Tool/suites/ha/resource.txt deleted file mode 100644 index 48ecf9be14..0000000000 --- a/tools/Robot_Tool/suites/ha/resource.txt +++ /dev/null @@ -1,38 +0,0 @@ -*** Settings *** -Documentation A resource file with reusable keywords and variables. -... -... The system specific keywords created here form our own -... domain specific language. They utilize keywords provided -... by the imported Selenium2Library. - -*** Variables *** -${CONTROLLER1} 192.168.56.101:6633 -${CONTROLLER2} 192.168.56.102:6633 -${Controller1_IP} 192.168.56.101 -${Controller2_IP} 192.168.56.102 -${HOST} 192.168.56.103 -${BROWSER} Firefox -${DELAY} 0 -${DELAY2} 40 -${USER} admin -${PASSWORD} admin -${SERVER1} 192.168.56.101:8080 -${SERVER2} 192.168.56.102:8080 -${LOGIN URL1} http://${SERVER1}/ -${LOGIN URL2} http://${SERVER2}/ -${ERROR URL1} http://${SERVER1}/ -${ERROR URL2} http://${SERVER2}/ -${VM USERNAME} odluser -${VM PASSWORD} odluser1 - -${nodeconn1} NodeConnector":{"type":"OF","node":{"type":"OF","id":"00:00:00:00:00:00:00:01"},"id":"1"} -${nodeconn2} NodeConnector":{"type":"OF","node":{"type":"OF","id":"00:00:00:00:00:00:00:01"},"id":"2"} -${nodeconn3} NodeConnector":{"type":"OF","node":{"type":"OF","id":"00:00:00:00:00:00:00:02"},"id":"3"} -${nodeconn4} NodeConnector":{"type":"OF","node":{"type":"OF","id":"00:00:00:00:00:00:00:03"},"id":"3"} - - -${name} flow995 -${key} flowConfig -${node_id} 00:00:00:00:00:00:00:02 -${REST_CONTEXT} /controller/nb/v2/flowprogrammer -${REST_CONTEXT_2} /controller/nb/v2/statistics/default/flow diff --git a/tools/Robot_Tool/variables/README.md b/tools/Robot_Tool/variables/README.md deleted file mode 100644 index 53ce041e52..0000000000 --- a/tools/Robot_Tool/variables/README.md +++ /dev/null @@ -1 +0,0 @@ -This directory stores all predefined variables. diff --git a/tools/Robot_Tool/variables/Variables.py b/tools/Robot_Tool/variables/Variables.py deleted file mode 100644 index 47f108fce5..0000000000 --- a/tools/Robot_Tool/variables/Variables.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Library for the robot based system test tool of the OpenDaylight project. -Authors: Baohua Yang@IBM, Denghui Huang@IBM -Updated: 2013-11-14 -""" - -# Global variables -CONTROLLER = '127.0.0.1' -PORT = '8080' -PREFIX = 'http://' + CONTROLLER + ':' + PORT -CONTAINER = 'default' -USER = 'admin' -PWD = 'admin' -AUTH = [u'admin', u'admin'] -HEADERS = {'Content-Type': 'application/json'} -ODL_CONTROLLER_SESSION = None -TOPO_TREE_LEVEL = 2 diff --git a/tools/clustering/cluster-deployer/deploy.py b/tools/clustering/cluster-deployer/deploy.py index 102742f035..a948a1bc0b 100755 --- a/tools/clustering/cluster-deployer/deploy.py +++ b/tools/clustering/cluster-deployer/deploy.py @@ -212,27 +212,27 @@ class Deployer: distribution_name + " " + self.dir_name + "/odl") # Copy all the generated files to the server - self.remote.mkdir(self.dir_name - + "/odl/configuration/initial") - self.remote.copy_file(akka_conf, self.dir_name - + "/odl/configuration/initial/") - self.remote.copy_file(module_shards_conf, self.dir_name - + "/odl/configuration/initial/") - self.remote.copy_file(modules_conf, self.dir_name - + "/odl/configuration/initial/") - self.remote.copy_file(features_cfg, self.dir_name - + "/odl/etc/") - self.remote.copy_file(jolokia_xml, self.dir_name - + "/odl/deploy/") - self.remote.copy_file(management_cfg, self.dir_name - + "/odl/etc/") + self.remote.mkdir(self.dir_name + + "/odl/configuration/initial") + self.remote.copy_file(akka_conf, self.dir_name + + "/odl/configuration/initial/") + self.remote.copy_file(module_shards_conf, self.dir_name + + "/odl/configuration/initial/") + self.remote.copy_file(modules_conf, self.dir_name + + "/odl/configuration/initial/") + self.remote.copy_file(features_cfg, self.dir_name + + "/odl/etc/") + self.remote.copy_file(jolokia_xml, self.dir_name + + "/odl/deploy/") + self.remote.copy_file(management_cfg, self.dir_name + + "/odl/etc/") if datastore_cfg is not None: self.remote.copy_file(datastore_cfg, self.dir_name + "/odl/etc/") # Add symlink - self.remote.exec_cmd("ln -sfn " + self.dir_name + " " - + args.rootdir + "/deploy/current") + self.remote.exec_cmd("ln -sfn " + self.dir_name + " " + + args.rootdir + "/deploy/current") # Run karaf self.remote.start_controller(self.dir_name) @@ -258,10 +258,10 @@ def main(): replicas = {} for x in range(0, len(hosts)): - ds_seed_nodes.append("akka.tcp://opendaylight-cluster-data@" - + hosts[x] + ":2550") - rpc_seed_nodes.append("akka.tcp://odl-cluster-rpc@" - + hosts[x] + ":2551") + ds_seed_nodes.append("akka.tcp://opendaylight-cluster-data@" + + hosts[x] + ":2550") + rpc_seed_nodes.append("akka.tcp://odl-cluster-rpc@" + + hosts[x] + ":2551") all_replicas.append("member-" + str(x + 1)) for x in range(0, 10): diff --git a/tools/clustering/cluster-monitor/isolate.py b/tools/clustering/cluster-monitor/isolate.py index d5960a9129..a1eebc963d 100644 --- a/tools/clustering/cluster-monitor/isolate.py +++ b/tools/clustering/cluster-monitor/isolate.py @@ -31,9 +31,17 @@ Usage:python isolate.py [controller to be isolated] """ import sys -sys.path.append('../../../csit/libraries') -import UtilLibrary -import json + + +def import_utility_modules(): + global UtilLibrary, json + import sys + sys.path.append('../../../csit/libraries') + import UtilLibrary + import json + + +import_utility_modules() try: with open('cluster.json') as cluster_file: diff --git a/tools/clustering/cluster-monitor/rejoin.py b/tools/clustering/cluster-monitor/rejoin.py index 92507a0ffd..172788c0ae 100644 --- a/tools/clustering/cluster-monitor/rejoin.py +++ b/tools/clustering/cluster-monitor/rejoin.py @@ -28,9 +28,17 @@ Usage:python rejoin.py """ import sys -sys.path.append('../../../csit/libraries') -import UtilLibrary -import json + + +def import_utility_modules(): + global UtilLibrary, json + import sys + sys.path.append('../../../csit/libraries') + import UtilLibrary + import json + + +import_utility_modules() try: with open('cluster.json') as cluster_file: diff --git a/tools/clustering/cluster-monitor/timed_isolation.py b/tools/clustering/cluster-monitor/timed_isolation.py index 8492575afc..8406e592ef 100644 --- a/tools/clustering/cluster-monitor/timed_isolation.py +++ b/tools/clustering/cluster-monitor/timed_isolation.py @@ -32,11 +32,19 @@ Usage:python timed_isolation.py [controller to be isolated] [duration of isolat """ import sys -sys.path.append('../../../csit/libraries') -import UtilLibrary -import json import time + +def import_utility_modules(): + global UtilLibrary, json + import sys + sys.path.append('../../../csit/libraries') + import UtilLibrary + import json + + +import_utility_modules() + try: with open('cluster.json') as cluster_file: data = json.load(cluster_file) diff --git a/tools/fastbgp/bgp_app_peer.py b/tools/fastbgp/bgp_app_peer.py index 2af032abd6..a57c64b7ee 100755 --- a/tools/fastbgp/bgp_app_peer.py +++ b/tools/fastbgp/bgp_app_peer.py @@ -6,11 +6,6 @@ # terms of the Eclipse Public License v1.0 which accompanies this distribution, # and is available at http://www.eclipse.org/legal/epl-v10.html -__author__ = "Radovan Sajben" -__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." -__license__ = "Eclipse Public License v1.0" -__email__ = "rsajben@cisco.com" - import requests import ipaddr import argparse @@ -19,6 +14,12 @@ import time import xml.dom.minidom as md +__author__ = "Radovan Sajben" +__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." +__license__ = "Eclipse Public License v1.0" +__email__ = "rsajben@cisco.com" + + def _build_url(odl_ip, port, uri): """Compose URL from generic IP, port and URI fragment. diff --git a/tools/fastbgp/play.py b/tools/fastbgp/play.py index e79dfaf3d5..cd0ce016ac 100755 --- a/tools/fastbgp/play.py +++ b/tools/fastbgp/play.py @@ -11,11 +11,6 @@ EXABGP in this type of scenario.""" # terms of the Eclipse Public License v1.0 which accompanies this distribution, # and is available at http://www.eclipse.org/legal/epl-v10.html -__author__ = "Vratko Polak" -__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." -__license__ = "Eclipse Public License v1.0" -__email__ = "vrpolak@cisco.com" - import argparse import binascii import ipaddr @@ -29,6 +24,12 @@ import thread from copy import deepcopy +__author__ = "Vratko Polak" +__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." +__license__ = "Eclipse Public License v1.0" +__email__ = "vrpolak@cisco.com" + + def parse_arguments(): """Use argparse to get arguments, @@ -689,8 +690,9 @@ class MessageGenerator(object): "\x41" # "32 bit AS Numbers Support" # (see RFC 6793, section 3) "\x04" # Capability value length - # My AS in 32 bit format - + struct.pack(">I", my_autonomous_system) + ) + optional_parameter_hex += ( + struct.pack(">I", my_autonomous_system) # My AS in 32 bit format ) optional_parameters_hex += optional_parameter_hex @@ -812,13 +814,17 @@ class MessageGenerator(object): "\x06" # Length (6) "\x02" # AS segment type (AS_SEQUENCE) "\x01" # AS segment length (1) - # AS segment (4 bytes) - + struct.pack(">I", my_autonomous_system) + + ) + my_AS = struct.pack(">I", my_autonomous_system) + path_attributes_hex += my_AS # AS segment (4 bytes) + path_attributes_hex += ( "\x40" # Flags ("Well-Known") "\x03" # Type (NEXT_HOP) "\x04" # Length (4) - # IP address of the next hop (4 bytes) - + struct.pack(">I", int(next_hop)) + ) + next_hop = struct.pack(">I", int(next_hop)) + path_attributes_hex += ( + next_hop # IP address of the next hop (4 bytes) ) else: path_attributes_hex = "" diff --git a/tools/mdsal_benchmark/dsbenchmark.py b/tools/mdsal_benchmark/dsbenchmark.py index 43e3242e98..6cc20f00eb 100755 --- a/tools/mdsal_benchmark/dsbenchmark.py +++ b/tools/mdsal_benchmark/dsbenchmark.py @@ -1,8 +1,4 @@ #!/usr/bin/python -__author__ = "Jan Medved" -__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "jmedved@cisco.com" import argparse import requests @@ -11,6 +7,13 @@ import csv import time import re + +__author__ = "Jan Medved" +__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "jmedved@cisco.com" + + parser = argparse.ArgumentParser(description='Datastore Benchmarking' '' 'See documentation @:' diff --git a/tools/mdsal_benchmark/ntfbenchmark.py b/tools/mdsal_benchmark/ntfbenchmark.py index 1d887e3c90..1fd649b692 100755 --- a/tools/mdsal_benchmark/ntfbenchmark.py +++ b/tools/mdsal_benchmark/ntfbenchmark.py @@ -7,15 +7,17 @@ # and is available at http://www.eclipse.org/legal/epl-v10.html ############################################################################## +import argparse +import requests +import json +import csv + + __author__ = "Jan Medved" __copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." __license__ = "Eclipse Public License v1.0" __email__ = "jmedved@cisco.com" -import argparse -import requests -import json -import csv global BASE_URL diff --git a/tools/mdsal_benchmark/rpcbenchmark.py b/tools/mdsal_benchmark/rpcbenchmark.py index b664040672..5e091fd02e 100755 --- a/tools/mdsal_benchmark/rpcbenchmark.py +++ b/tools/mdsal_benchmark/rpcbenchmark.py @@ -7,15 +7,17 @@ # and is available at http://www.eclipse.org/legal/epl-v10.html ############################################################################## +import argparse +import requests +import json +import csv + + __author__ = "Jan Medved" __copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." __license__ = "Eclipse Public License v1.0" __email__ = "jmedved@cisco.com" -import argparse -import requests -import json -import csv global BASE_URL diff --git a/tools/netconf_tools/getter.py b/tools/netconf_tools/getter.py index 8289209911..cac22dd203 100644 --- a/tools/netconf_tools/getter.py +++ b/tools/netconf_tools/getter.py @@ -23,12 +23,6 @@ overhead of context switching remains). # terms of the Eclipse Public License v1.0 which accompanies this distribution, # and is available at http://www.eclipse.org/legal/epl-v10.html -__author__ = "Vratko Polak" -__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." -__license__ = "Eclipse Public License v1.0" -__email__ = "vrpolak@cisco.com" - - import argparse import collections # For deque and Counter. import threading @@ -36,6 +30,12 @@ import time import AuthStandalone +__author__ = "Vratko Polak" +__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." +__license__ = "Eclipse Public License v1.0" +__email__ = "vrpolak@cisco.com" + + def str2bool(text): """Utility converter, based on http://stackoverflow.com/a/19227287""" return text.lower() in ("yes", "true", "y", "t", "1") @@ -129,11 +129,9 @@ def run_thread(thread_target, *thread_args): args = parse_arguments() # Construct the work for the workers. -url_start = ( - 'config/' - "network-topology:network-topology/topology/topology-netconf/node/" - + args.name + "-" -) +url_start = 'config/network-topology:network-topology/' +url_start += "topology/topology-netconf/node/" +url_start += args.name + "-" url_end = "/yang-ext:mount" headers = {'Content-Type': 'application/xml', "Accept": "application/xml"} kwargs = {"headers": headers} diff --git a/tools/odl-mdsal-clustering-tests/clustering-functional-test/crud.py b/tools/odl-mdsal-clustering-tests/clustering-functional-test/crud.py index 84baac7fad..4f8124995e 100644 --- a/tools/odl-mdsal-clustering-tests/clustering-functional-test/crud.py +++ b/tools/odl-mdsal-clustering-tests/clustering-functional-test/crud.py @@ -1,10 +1,12 @@ +import sys +import util +import settings + + __author__ = "Basheeruddin Ahmed" __copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." __license__ = "New-style BSD" __email__ = "syedbahm@cisco.com" -import sys -import util -import settings def addCar(numberOfCars): diff --git a/tools/odl-mdsal-clustering-tests/clustering-functional-test/settings.py b/tools/odl-mdsal-clustering-tests/clustering-functional-test/settings.py index c7d9bf8d99..053cde70e4 100644 --- a/tools/odl-mdsal-clustering-tests/clustering-functional-test/settings.py +++ b/tools/odl-mdsal-clustering-tests/clustering-functional-test/settings.py @@ -1,8 +1,3 @@ -__author__ = "Basheeruddin Ahmed" -__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "syedbahm@cisco.com" - from string import Template # helps in taking the hostname entered by the user @@ -10,6 +5,12 @@ global hostname global port +__author__ = "Basheeruddin Ahmed" +__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "syedbahm@cisco.com" + + def getServer(): return hostname + ":" + port # noqa diff --git a/tools/odl-mdsal-clustering-tests/clustering-functional-test/util.py b/tools/odl-mdsal-clustering-tests/clustering-functional-test/util.py index 1c81235c69..aa34bc0d2b 100644 --- a/tools/odl-mdsal-clustering-tests/clustering-functional-test/util.py +++ b/tools/odl-mdsal-clustering-tests/clustering-functional-test/util.py @@ -1,12 +1,12 @@ +import requests + + __author__ = "Basheeruddin Ahmed" __copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." __license__ = "New-style BSD" __email__ = "syedbahm@cisco.com" -import requests - - def get(url, userId, password): """Helps in making GET REST calls""" headers = {} diff --git a/tools/odl-mdsal-clustering-tests/clustering-performance-test/config_cleanup.py b/tools/odl-mdsal-clustering-tests/clustering-performance-test/config_cleanup.py index 6fe7d714a0..1dd17f273e 100755 --- a/tools/odl-mdsal-clustering-tests/clustering-performance-test/config_cleanup.py +++ b/tools/odl-mdsal-clustering-tests/clustering-performance-test/config_cleanup.py @@ -1,12 +1,14 @@ #!/usr/bin/python +import argparse +import requests +import sys + + __author__ = "Jan Medved" __copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." __license__ = "New-style BSD" __email__ = "jmedved@cisco.com" -import argparse -import requests -import sys getheaders = {'Accept': 'application/json'} diff --git a/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_add_delete_test.py b/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_add_delete_test.py index 8f7215cbad..ecbcbb971d 100755 --- a/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_add_delete_test.py +++ b/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_add_delete_test.py @@ -1,10 +1,5 @@ #!/usr/bin/python -__author__ = "Jan Medved" -__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "jmedved@cisco.com" - import argparse import time from flow_config_blaster import FlowConfigBlaster, get_json_from_file @@ -12,6 +7,12 @@ from inventory_crawler import InventoryCrawler from config_cleanup import cleanup_config_odl +__author__ = "Jan Medved" +__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "jmedved@cisco.com" + + def wait_for_stats(crawler, exp_found, timeout, delay): """ Waits for the ODL stats manager to catch up. Polls ODL inventory every diff --git a/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_config_blaster.py b/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_config_blaster.py index d7e5bb9932..ddf3bb1239 100755 --- a/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_config_blaster.py +++ b/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_config_blaster.py @@ -1,8 +1,4 @@ #!/usr/bin/python -__author__ = "Jan Medved" -__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "jmedved@cisco.com" from random import randrange import json @@ -16,6 +12,12 @@ import requests import netaddr +__author__ = "Jan Medved" +__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "jmedved@cisco.com" + + class Counter(object): def __init__(self, start=0): self.lock = threading.Lock() diff --git a/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_config_blaster_fle.py b/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_config_blaster_fle.py index ac531254f0..3c3535d5f3 100755 --- a/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_config_blaster_fle.py +++ b/tools/odl-mdsal-clustering-tests/clustering-performance-test/flow_config_blaster_fle.py @@ -1,9 +1,4 @@ #!/usr/bin/python -__author__ = "Jan Medved" -__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "jmedved@cisco.com" - from flow_config_blaster import FlowConfigBlaster import argparse import netaddr @@ -11,6 +6,12 @@ import time import json +__author__ = "Jan Medved" +__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "jmedved@cisco.com" + + class FlowConfigBlasterFLE(FlowConfigBlaster): """ FlowConfigBlaster, Floodlight Edition; Uses the Floodlight Static Flow Entry Pusher REST API to inject flows. diff --git a/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_crawler.py b/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_crawler.py index 2712314b39..f24c0f724d 100755 --- a/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_crawler.py +++ b/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_crawler.py @@ -1,15 +1,16 @@ #!/usr/bin/python -__author__ = "Jan Medved" -__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "jmedved@cisco.com" - import argparse import requests import re import json +__author__ = "Jan Medved" +__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "jmedved@cisco.com" + + class InventoryCrawler(object): reported_flows = 0 found_flows = 0 diff --git a/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_perf.py b/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_perf.py index 2a4cfa925d..1309fefab7 100644 --- a/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_perf.py +++ b/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_perf.py @@ -1,14 +1,15 @@ -__author__ = "Jan Medved" -__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "jmedved@cisco.com" - import argparse import requests import time import threading +__author__ = "Jan Medved" +__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "jmedved@cisco.com" + + class Counter(object): def __init__(self, start=0): self.lock = threading.Lock() diff --git a/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_read_blaster.py b/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_read_blaster.py index 94a3c4d5e2..eefe01fa42 100755 --- a/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_read_blaster.py +++ b/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_read_blaster.py @@ -1,9 +1,5 @@ #!/usr/bin/python -__author__ = "Gary Wu" -__email__ = "gary.wu1@huawei.com" - - import requests import argparse import time @@ -14,6 +10,11 @@ import collections from Queue import Queue + +__author__ = "Gary Wu" +__email__ = "gary.wu1@huawei.com" + + GET_HEADERS = {'Accept': 'application/json'} INVENTORY_URL = 'http://%s:%d/restconf/%s/opendaylight-inventory:nodes' diff --git a/tools/odl-mdsal-clustering-tests/clustering-performance-test/pretty_print.py b/tools/odl-mdsal-clustering-tests/clustering-performance-test/pretty_print.py index 5cc4fe300c..8d503a30e4 100755 --- a/tools/odl-mdsal-clustering-tests/clustering-performance-test/pretty_print.py +++ b/tools/odl-mdsal-clustering-tests/clustering-performance-test/pretty_print.py @@ -1,11 +1,13 @@ #!/usr/bin/python +import json +import sys + + __author__ = "Jan Medved" __copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." __license__ = "New-style BSD" __email__ = "jmedved@cisco.com" -import json -import sys if __name__ == "__main__": diff --git a/tools/odl-mdsal-clustering-tests/clustering-performance-test/shard_perf_test.py b/tools/odl-mdsal-clustering-tests/clustering-performance-test/shard_perf_test.py index 328975a1e8..6b631eaaf4 100755 --- a/tools/odl-mdsal-clustering-tests/clustering-performance-test/shard_perf_test.py +++ b/tools/odl-mdsal-clustering-tests/clustering-performance-test/shard_perf_test.py @@ -1,9 +1,4 @@ #!/usr/bin/python -__author__ = "Jan Medved" -__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." -__license__ = "New-style BSD" -__email__ = "jmedved@cisco.com" - from random import randrange import json import argparse @@ -13,6 +8,12 @@ import sys import requests +__author__ = "Jan Medved" +__copyright__ = "Copyright(c) 2014, Cisco Systems, Inc." +__license__ = "New-style BSD" +__email__ = "jmedved@cisco.com" + + class Counter(object): def __init__(self, start=0): self.lock = threading.Lock() diff --git a/tools/odl-mdsal-clustering-tests/scripts/cluster_rest_script.py b/tools/odl-mdsal-clustering-tests/scripts/cluster_rest_script.py index 81c1ecf669..ce6342d37b 100644 --- a/tools/odl-mdsal-clustering-tests/scripts/cluster_rest_script.py +++ b/tools/odl-mdsal-clustering-tests/scripts/cluster_rest_script.py @@ -666,11 +666,10 @@ if __name__ == "__main__": if (args.action not in _handler_matrix or args.itemtype not in _handler_matrix[args.action]): - logger.error("Unsupported combination of action: " + - str(args.action) + " and item: " + str(args.itemtype)) - raise NotImplementedError("Unsupported combination of action: " - + str(args.action) + - " and item: " + str(args.itemtype)) + msg = "Unsupported combination of action: " + str(args.action) + msg += " and item: " + str(args.itemtype) + logger.error(msg) + raise NotImplementedError(msg) # TODO: need to filter out situations when we cannot use more items # in one rest request (rpc or delete?) diff --git a/tools/odl-ovsdb-performance-tests/ovsdbconfigblaster.py b/tools/odl-ovsdb-performance-tests/ovsdbconfigblaster.py index 7065909ffa..2be7fc1d7a 100644 --- a/tools/odl-ovsdb-performance-tests/ovsdbconfigblaster.py +++ b/tools/odl-ovsdb-performance-tests/ovsdbconfigblaster.py @@ -1,6 +1,11 @@ """ Script to add bridges/ports/termination points to ovsdb config """ +import argparse +import logging +import requests + + __author__ = 'Marcus Williams' __copyright__ = "Copyright (c) 2015, Intel Corp Inc., Cisco Systems Inc. and others" __credits__ = ["Jan Medved, Lori Jakab"] @@ -8,10 +13,6 @@ __license__ = "New-style BSD" __email__ = "marcus.williams@intel.com" __version__ = "0.0.1" -import argparse -import logging -import requests - class OvsdbConfigBlaster (object): PUT_HEADERS = {'Content-Type': 'application/json', @@ -102,19 +103,18 @@ class OvsdbConfigBlaster (object): 'ip': vswitch_ip, 'remote-ip': vswitch_remote_ip, 'ovsdb-port': vswitch_ovsdb_port, - 'node-id': 'ovsdb://%s:%s' - % (vswitch_ip, - vswitch_ovsdb_port), - 'post-url': urlprefix - + OvsdbConfigBlaster.return_ovsdb_url( + 'node-id': 'ovsdb://%s:%s' % + (vswitch_ip, vswitch_ovsdb_port), + 'post-url': urlprefix + + OvsdbConfigBlaster.return_ovsdb_url( vswitch_ip, vswitch_ovsdb_port), - 'get-config-url': urlprefix - + OvsdbConfigBlaster.return_ovsdb_url( + 'get-config-url': urlprefix + + OvsdbConfigBlaster.return_ovsdb_url( vswitch_ip, vswitch_ovsdb_port), - 'get-oper-url': urlprefix - + OvsdbConfigBlaster.return_ovsdb_url( + 'get-oper-url': urlprefix + + OvsdbConfigBlaster.return_ovsdb_url( vswitch_ip, vswitch_ovsdb_port)}}) @@ -181,9 +181,9 @@ class OvsdbConfigBlaster (object): } self.send_rest(self.session, self.vswitch_dict[vswitch_name] - .get('post-url') - + '%2Fbridge%2F' - + bridge_name, + .get('post-url') + + '%2Fbridge%2F' + + bridge_name, add_bridge_body) self.session.close() diff --git a/tools/pcep_updater/updater.py b/tools/pcep_updater/updater.py index a126ffe7c4..aa711eac2b 100644 --- a/tools/pcep_updater/updater.py +++ b/tools/pcep_updater/updater.py @@ -31,12 +31,6 @@ But that may not be true for more mature implementation. # terms of the Eclipse Public License v1.0 which accompanies this distribution, # and is available at http://www.eclipse.org/legal/epl-v10.html -__author__ = "Vratko Polak" -__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." -__license__ = "Eclipse Public License v1.0" -__email__ = "vrpolak@cisco.com" - - import argparse import collections # For deque and Counter. import ipaddr @@ -49,6 +43,12 @@ except ImportError: # Python 2.6 does not have Counter in collections. import AuthStandalone +__author__ = "Vratko Polak" +__copyright__ = "Copyright(c) 2015, Cisco Systems, Inc." +__license__ = "Eclipse Public License v1.0" +__email__ = "vrpolak@cisco.com" + + def str2bool(text): """Utility converter, based on http://stackoverflow.com/a/19227287""" return text.lower() in ("yes", "true", "y", "t", "1") -- 2.36.6