Details
-
Bug
-
Status: Resolved
-
Resolution: Done
-
unspecified
-
None
-
None
-
Operating System: Mac OS
Platform: Macintosh
-
1373
Description
I get a lot of null pointer exceptions when more than restconf client tries to program flows. See attached logs
To reproduce:
1. connect mininet to the controller
2. start the below pything script with the following command line:
python flow_config_perf.py --nflows 20 --nthreads 2
The test Python script below this line
----------------------------------
from random import randrange
import json
import argparse
import requests
import time
import threading
class Counter(object):
def _init_(self, start=0):
self.lock = threading.Lock()
self.value = start
def increment(self, value=1):
self.lock.acquire()
try:
self.value = self.value + value
finally:
self.lock.release()
class Timer(object):
def _init_(self, verbose=False):
self.verbose = verbose
def _enter_(self):
self.start = time.time()
return self
def _exit_(self, *args):
self.end = time.time()
self.secs = self.end - self.start
self.msecs = self.secs * 1000 # millisecs
if self.verbose:
print ("elapsed time: %f ms" % self.msecs)
putheaders =
{'content-type': 'application/json'}getheaders =
{'Accept': 'application/json'}- ODL IP:port
- We fist delete all existing service functions
DELURL = "restconf/config/opendaylight-inventory:nodes/node/openflow:%d/table/0/flow/%d"
GETURL = "restconf/config/opendaylight-inventory:nodes/node/openflow:%d/table/0/flow/%d" - Incremental PUT. This URL is for a list element
PUTURL = "restconf/config/opendaylight-inventory:nodes/node/openflow:%d/table/0/flow/%d"
INVURL = 'restconf/operational/opendaylight-inventory:nodes'
N1T0_URL = 'restconf/operational/opendaylight-inventory:nodes/node/openflow:1/table/0'
print_lock = threading.Lock()
JSON_FLOW_MOD1 = '''{
"flow-node-inventory:flow": [
{
"flow-node-inventory:cookie": %d,
"flow-node-inventory:cookie_mask": 65535,
"flow-node-inventory:flow-name": "%s",
"flow-node-inventory:hard-timeout": %d,
"flow-node-inventory:id": "%s",
"flow-node-inventory:idle-timeout": %d,
"flow-node-inventory:installHw": false,
"flow-node-inventory:instructions": {
"flow-node-inventory:instruction": [
{
"flow-node-inventory:apply-actions": {
"flow-node-inventory:action": [
{
"flow-node-inventory:dec-nw-ttl": {},
"flow-node-inventory:order": 0
}
]
},
"flow-node-inventory:order": 0
}
]
},
"flow-node-inventory:match": {
"flow-node-inventory:metadata":
},
"flow-node-inventory:priority": 2,
"flow-node-inventory:strict": false,
"flow-node-inventory:table_id": 0
}
]
}'''
add_ok_rate = Counter(0.0)
add_total_rate = Counter(0.0)
del_ok_rate = Counter(0.0)
del_total_rate = Counter(0.0)
def add_flow(url_template, s, res, tid, node, flow_id, metadata):
flow_data = JSON_FLOW_MOD1 % (tid + flow_id, 'TestFlow-%d' % flow_id, 3400,
str(flow_id), 1200, metadata)
flow_url = url_template % (node, flow_id)
r = s.put(flow_url, data=flow_data, headers=putheaders, stream=False )
try:
res[r.status_code] += 1
except(KeyError):
res[r.status_code] = 1
def delete_flow(url_template, s, res, tid, node, flow_id):
flow_url = url_template % (node, flow_id)
r = s.delete(flow_url, headers=getheaders)
try:
res[r.status_code] += 1
except(KeyError):
res[r.status_code] = 1
def get_num_nodes(inventory_url, s, default_nodes):
nodes = default_nodes
r = s.get(inventory_url, headers=getheaders, stream=False )
if (r.status_code == 200):
try:
inv = json.loads(r.content)['nodes']['node']
nodes = len(inv)
except(KeyError):
pass
return nodes
def add_delete_flows(put_url, del_url, nnodes, nflows, tid, cond):
add_res = {}
del_res = {}
flows = {}
s = requests.Session()
nnodes = get_num_nodes(inv_url, s, nnodes)
with print_lock:
print 'Thread %d: Adding %d flows on %d nodes' % (tid, nflows, nnodes)
with Timer() as t:
for flow in range(nflows):
node_id = randrange(nnodes)
flow_id = tid*100000 + flow
flows[flow_id] = node_id
add_flow(put_url, s, add_res, tid, node_id, flow_id, flow*2+1)
add_time = t.secs
with print_lock:
print 'Thread %d: Deleting %d flows on %d nodes' % (tid, nflows, nnodes)
with Timer() as t:
for flow in range(nflows):
flow_id = tid*100000 + flow
delete_flow(del_url, s, del_res, 100, flows[flow_id], flow_id)
del_time = t.secs
add_ok_rate_t = add_res[200]/add_time
add_total_rate_t = sum(add_res.values())/add_time
add_ok_rate.increment(add_ok_rate_t)
add_total_rate.increment(add_total_rate_t)
del_ok_rate_t = del_res[200]/del_time
del_total_rate_t = sum(del_res.values())/del_time
del_ok_rate.increment(del_ok_rate_t)
del_total_rate.increment(del_total_rate_t)
with print_lock:
print '\nThread %d: ' % tid
print ' Add time: %.2f,' % add_time
print ' Add success rate: %.2f, Add total rate: %.2f' % \
(add_ok_rate_t, add_total_rate_t)
print ' Add Results: ',
print add_res
print ' Delete time: %.2f,' % del_time
print ' Delete success rate: %.2f, Delete total rate: %.2f' % \
(del_ok_rate_t, del_total_rate_t)
print ' Delete Results: ',
print del_res
with cond:
cond.notifyAll()
if _name_ == "_main_":
parser = argparse.ArgumentParser(description='Restconf test program')
parser.add_argument('--odlhost', default='127.0.0.1', help='host where '
'odl controller is running (default is 127.0.0.1)')
parser.add_argument('--odlport', default='8080', help='port on '
'which odl\'s RESTCONF is listening (default is 8080)')
parser.add_argument('--nflows', type=int, default=10, help='number of '
'flow add/delete cycles to send in each thread; default 10')
parser.add_argument('--nthreads', type=int, default=1,
help='Number of request worker threads, default=1')
parser.add_argument('--nnodes', type=int, default=16,
help='Number of nodes if mininet not connected, default=16')
in_args = parser.parse_args()
put_url = 'http://' + in_args.odlhost + ":" + in_args.odlport + '/' + PUTURL
del_url = 'http://' + in_args.odlhost + ":" + in_args.odlport + '/' + DELURL
get_url = 'http://' + in_args.odlhost + ":" + in_args.odlport + '/' + GETURL
inv_url = 'http://' + in_args.odlhost + ":" + in_args.odlport + '/' + INVURL
nnodes = in_args.nnodes
nflows = in_args.nflows
cond = threading.Condition()
threads = []
nthreads = in_args.nthreads
for i in range(nthreads):
t = threading.Thread(target=add_delete_flows,
args=(put_url, del_url, nnodes, nflows, i, cond))
threads.append(t)
t.start()
finished = 0
while finished < nthreads:
with cond:
cond.wait()
finished = finished + 1
print '\n Overall add success rate: %.2f, Overall add rate: %.2f' % \
(add_ok_rate.value, add_total_rate.value)
print ' Overall delete success rate: %.2f, Overall delete rate: %.2f' % \
(del_ok_rate.value, del_total_rate.value)