merge conflict fix

Signed-off-by: shivamcandela <shivam.thakur@candelatech.com>
This commit is contained in:
shivamcandela
2021-07-29 01:01:11 +05:30
25 changed files with 1847 additions and 309 deletions

View File

@@ -137,6 +137,7 @@ class GhostRequest:
self.ghost_json_login = self.ghost_json_url + '/admin/session/' self.ghost_json_login = self.ghost_json_url + '/admin/session/'
self.api_token = _api_token self.api_token = _api_token
self.images = list() self.images = list()
self.webpages = list()
self.pdfs = list() self.pdfs = list()
self.influx_host = influx_host self.influx_host = influx_host
self.influx_port = influx_port self.influx_port = influx_port
@@ -193,7 +194,8 @@ class GhostRequest:
def upload_image(self, def upload_image(self,
image): image):
print(image) if self.debug:
print(image)
ghost_json_url = self.ghost_json_url + '/admin/images/upload/' ghost_json_url = self.ghost_json_url + '/admin/images/upload/'
token = self.encode_token() token = self.encode_token()
@@ -201,7 +203,8 @@ class GhostRequest:
proc = subprocess.Popen(bashCommand, shell=True, stdout=subprocess.PIPE) proc = subprocess.Popen(bashCommand, shell=True, stdout=subprocess.PIPE)
output = proc.stdout.read().decode('utf-8') output = proc.stdout.read().decode('utf-8')
print(output) if self.debug:
print(output)
self.images.append(json.loads(output)['images'][0]['url']) self.images.append(json.loads(output)['images'][0]['url'])
def upload_images(self, def upload_images(self,
@@ -210,7 +213,8 @@ class GhostRequest:
if 'kpi' in image: if 'kpi' in image:
if 'png' in image: if 'png' in image:
self.upload_image(folder + '/' + image) self.upload_image(folder + '/' + image)
print('images %s' % self.images) if self.debug:
print('images %s' % self.images)
def custom_post(self, def custom_post(self,
folder, folder,
@@ -273,16 +277,18 @@ class GhostRequest:
scp_push = SCPClient(ssh_push.get_transport()) scp_push = SCPClient(ssh_push.get_transport())
if parent_folder is not None: if parent_folder is not None:
print("parent_folder %s" % parent_folder)
files = os.listdir(parent_folder) files = os.listdir(parent_folder)
print(files) if self.debug:
print("parent_folder %s" % parent_folder)
print(files)
for file in files: for file in files:
if os.path.isdir(parent_folder + '/' + file) is True: if os.path.isdir(parent_folder + '/' + file) is True:
if os.path.exists(file): if os.path.exists(file):
shutil.rmtree(file) shutil.rmtree(file)
shutil.copytree(parent_folder + '/' + file, file) shutil.copytree(parent_folder + '/' + file, file)
target_folders.append(file) target_folders.append(file)
print('Target folders: %s' % target_folders) if self.debug:
print('Target folders: %s' % target_folders)
else: else:
for folder in folders: for folder in folders:
if self.debug: if self.debug:
@@ -290,25 +296,41 @@ class GhostRequest:
target_folders.append(folder) target_folders.append(folder)
testbeds = list() testbeds = list()
pdfs = list() webpagesandpdfs = list()
high_priority_list = list() high_priority_list = list()
low_priority_list = list() low_priority_list = list()
images = list() images = list()
times = list() times = list()
test_pass_fail = list() test_pass_fail = list()
subtest_pass_fail = list()
subtest_pass_total = 0
subtest_fail_total = 0
test_tag = dict()
for target_folder in target_folders: for target_folder in target_folders:
try: try:
target_file = '%s/kpi.csv' % target_folder target_file = '%s/kpi.csv' % target_folder
df = csvreader.read_csv(file=target_file, sep='\t') df = csvreader.read_csv(file=target_file, sep='\t')
test_rig = csvreader.get_column(df, 'test-rig')[0] test_rig = csvreader.get_column(df, 'test-rig')[0]
test_id = csvreader.get_column(df, 'test-id')[0]
test_tag[test_id] = (csvreader.get_column(df, 'test-tag')[0])
pass_fail = Counter(csvreader.get_column(df, 'pass/fail')) pass_fail = Counter(csvreader.get_column(df, 'pass/fail'))
test_pass_fail.append(pass_fail) test_pass_fail.append(pass_fail)
dut_hw = csvreader.get_column(df, 'dut-hw-version')[0] dut_hw = csvreader.get_column(df, 'dut-hw-version')[0]
dut_sw = csvreader.get_column(df, 'dut-sw-version')[0] dut_sw = csvreader.get_column(df, 'dut-sw-version')[0]
dut_model = csvreader.get_column(df, 'dut-model-num')[0] dut_model = csvreader.get_column(df, 'dut-model-num')[0]
dut_serial = csvreader.get_column(df, 'dut-serial-num')[0] dut_serial = csvreader.get_column(df, 'dut-serial-num')[0]
duts = [dut_serial, dut_hw, dut_sw, dut_model, test_rig] subtest_pass = csvreader.get_column(df, 'Subtest-Pass')
subtest_fail = csvreader.get_column(df, 'Subtest-Fail')
for result in subtest_pass:
subtest_pass_total += int(result)
for result in subtest_fail:
subtest_fail_total += int(result)
subtest_pass_fail_list = dict()
subtest_pass_fail_list['PASS'] = subtest_pass_total
subtest_pass_fail_list['FAIL'] = subtest_fail_total
subtest_pass_fail.append(subtest_pass_fail_list)
duts = [dut_serial, dut_hw, dut_sw, dut_model, test_rig, test_tag]
times_append = csvreader.get_column(df, 'Date') times_append = csvreader.get_column(df, 'Date')
for target_time in times_append: for target_time in times_append:
times.append(float(target_time) / 1000) times.append(float(target_time) / 1000)
@@ -321,69 +343,88 @@ class GhostRequest:
text = text + 'Tests passed: 0<br />' \ text = text + 'Tests passed: 0<br />' \
'Tests failed : 0<br />' \ 'Tests failed : 0<br />' \
'Percentage of tests passed: Not Applicable<br />' 'Percentage of tests passed: Not Applicable<br />'
testbeds.append(test_rig)
if testbed is None:
testbed = test_rig
if test_run is None:
test_run = now.strftime('%B-%d-%Y-%I-%M-%p-report')
local_path = '/home/%s/%s/%s/%s' % (user_push, customer, testbed, test_run)
transport = paramiko.Transport(ghost_host, port)
transport.connect(None, user_push, password_push)
sftp = paramiko.sftp_client.SFTPClient.from_transport(transport)
if self.debug:
print(local_path)
print(target_folder)
try:
sftp.mkdir('/home/%s/%s/%s' % (user_push, customer, testbed))
except:
pass
try:
sftp.mkdir(local_path)
except:
pass
scp_push.put(target_folder, local_path, recursive=True)
files = sftp.listdir(local_path + '/' + target_folder)
pdfs = list()
webpages = list()
for file in files:
if 'pdf' in file:
url = 'http://%s/%s/%s/%s/%s/%s' % (
ghost_host, customer.strip('/'), testbed, test_run, target_folder, file)
pdfs.append('<a href="%s">PDF</a>' % url)
if 'index.html' in files:
url = 'http://%s/%s/%s/%s/%s/%s' % (
ghost_host, customer.strip('/'), testbed, test_run, target_folder, 'index.html')
webpages.append('<a href="%s">HTML</a>' % url)
webpagesandpdfsappend = dict()
webpagesandpdfsappend[test_id] = pdfs + webpages
webpagesandpdfs.append(webpagesandpdfsappend)
scp_push.close()
self.upload_images(target_folder)
for image in self.images:
if 'kpi-' in image:
if '-print' not in image:
images.append('<img src="%s"></img>' % image)
self.images = []
results = csvreader.get_columns(df, ['short-description', 'numeric-score', 'test details', 'pass/fail',
'test-priority'])
results[0] = ['Short Description', 'Score', 'Test Details', 'Pass or Fail', 'test-priority']
for row in results:
try:
row[1] = round(float(row[1]), 2)
except:
pass
low_priority = csvreader.filter_df(results, 'test-priority', 'less than', 94)
high_priority = csvreader.filter_df(results, 'test-priority', 'greater than or equal to', 95)
high_priority_list.append(high_priority)
low_priority_list.append(low_priority)
except: except:
print("Failure") print("Failure")
target_folders.remove(target_folder) target_folders.remove(target_folder)
break failuredict = dict()
testbeds.append(test_rig) failuredict[target_folder] = ['Failure']
if testbed is None: webpagesandpdfs.append(failuredict)
testbed = test_rig
if test_run is None:
test_run = now.strftime('%B-%d-%Y-%I-%M-%p-report')
local_path = '/home/%s/%s/%s/%s' % (user_push, customer, testbed, test_run)
transport = paramiko.Transport(ghost_host, port)
transport.connect(None, user_push, password_push)
sftp = paramiko.sftp_client.SFTPClient.from_transport(transport)
if self.debug:
print(local_path)
print(target_folder)
try:
sftp.mkdir('/home/%s/%s/%s' % (user_push, customer, testbed))
except:
pass
try:
sftp.mkdir(local_path)
except:
pass
scp_push.put(target_folder, local_path, recursive=True)
files = sftp.listdir(local_path + '/' + target_folder)
for file in files:
if 'pdf' in file:
url = 'http://%s/%s/%s/%s/%s/%s' % (
ghost_host, customer.strip('/'), testbed, test_run, target_folder, file)
pdfs.append('PDF of results: <a href="%s">%s</a><br />' % (url, file))
scp_push.close()
self.upload_images(target_folder)
for image in self.images:
if 'kpi-' in image:
if '-print' not in image:
images.append('<img src="%s"></img>' % image)
self.images = []
results = csvreader.get_columns(df, ['short-description', 'numeric-score', 'test details', 'pass/fail',
'test-priority'])
results[0] = ['Short Description', 'Score', 'Test Details', 'Pass or Fail', 'test-priority']
low_priority = csvreader.filter_df(results, 'test-priority', 'less than', 94)
high_priority = csvreader.filter_df(results, 'test-priority', 'greater than or equal to', 95)
high_priority_list.append(high_priority)
low_priority_list.append(low_priority)
test_pass_fail_results = sum((Counter(test) for test in test_pass_fail), Counter()) test_pass_fail_results = sum((Counter(test) for test in test_pass_fail), Counter())
subtest_pass_fail_results = sum((Counter(test) for test in subtest_pass_fail), Counter())
if self.debug:
print(times)
end_time = max(times) end_time = max(times)
start_time = '2021-07-01' start_time = '2021-07-01'
end_time = datetime.utcfromtimestamp(end_time)#.strftime('%Y-%m-%d %H:%M:%S') end_time = datetime.utcfromtimestamp(end_time)
now = time.time() now = time.time()
offset = datetime.fromtimestamp(now) - datetime.utcfromtimestamp(now) offset = datetime.fromtimestamp(now) - datetime.utcfromtimestamp(now)
end_time = end_time + offset end_time = end_time + offset
@@ -397,6 +438,8 @@ class GhostRequest:
['Short Description', 'Score', 'Test Details']) ['Short Description', 'Score', 'Test Details'])
high_priority.append(['Total Passed', test_pass_fail_results['PASS'], 'Total subtests passed during this run']) high_priority.append(['Total Passed', test_pass_fail_results['PASS'], 'Total subtests passed during this run'])
high_priority.append(['Total Failed', test_pass_fail_results['FAIL'], 'Total subtests failed during this run']) high_priority.append(['Total Failed', test_pass_fail_results['FAIL'], 'Total subtests failed during this run'])
high_priority.append(['Subtests Passed', subtest_pass_fail_results['PASS'], 'Total subtests passed during this run'])
high_priority.append(['Subtests Failed', subtest_pass_fail_results['FAIL'], 'Total subtests failed during this run'])
if title is None: if title is None:
title = end_time.strftime('%B %d, %Y %I:%M %p report') title = end_time.strftime('%B %d, %Y %I:%M %p report')
@@ -414,7 +457,8 @@ class GhostRequest:
from_date=start_time, from_date=start_time,
to_date=end_time.strftime('%Y-%m-%d %H:%M:%S'), to_date=end_time.strftime('%Y-%m-%d %H:%M:%S'),
pass_fail='GhostRequest', pass_fail='GhostRequest',
testbed=testbeds[0]) testbed=testbeds[0],
test_tag=test_tag)
if self.influx_token is not None: if self.influx_token is not None:
influxdb = RecordInflux(_influx_host=self.influx_host, influxdb = RecordInflux(_influx_host=self.influx_host,
@@ -422,7 +466,7 @@ class GhostRequest:
_influx_org=self.influx_org, _influx_org=self.influx_org,
_influx_token=self.influx_token, _influx_token=self.influx_token,
_influx_bucket=self.influx_bucket) _influx_bucket=self.influx_bucket)
short_description = 'Ghost Post Tests passed' # variable name short_description = 'Tests passed' # variable name
numeric_score = test_pass_fail_results['PASS'] # value numeric_score = test_pass_fail_results['PASS'] # value
tags = dict() tags = dict()
print(datetime.utcfromtimestamp(max(times))) print(datetime.utcfromtimestamp(max(times)))
@@ -432,7 +476,7 @@ class GhostRequest:
date = datetime.utcfromtimestamp(max(times)).isoformat() date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date) influxdb.post_to_influx(short_description, numeric_score, tags, date)
short_description = 'Ghost Post Tests failed' # variable name short_description = 'Tests failed' # variable name
numeric_score = test_pass_fail_results['FAIL'] # value numeric_score = test_pass_fail_results['FAIL'] # value
tags = dict() tags = dict()
tags['testbed'] = testbeds[0] tags['testbed'] = testbeds[0]
@@ -441,12 +485,38 @@ class GhostRequest:
date = datetime.utcfromtimestamp(max(times)).isoformat() date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date) influxdb.post_to_influx(short_description, numeric_score, tags, date)
short_description = 'Subtests passed' # variable name
numeric_score = subtest_pass_fail_results['PASS'] # value
tags = dict()
print(datetime.utcfromtimestamp(max(times)))
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'Subtest PASS'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
short_description = 'Subtests failed' # variable name
numeric_score = subtest_pass_fail_results['FAIL'] # value
tags = dict()
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'Subtest FAIL'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
text = 'Testbed: %s<br />' % testbeds[0] text = 'Testbed: %s<br />' % testbeds[0]
test_tag_table = ''
for tag in list(set(test_tag.values())):
print(tag)
test_tag_table += (
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Test Tag</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' % tag)
dut_table = '<table width="700px" border="1" cellpadding="2" cellspacing="0" ' \ dut_table = '<table width="700px" border="1" cellpadding="2" cellspacing="0" ' \
'style="border-color: gray; border-style: solid; border-width: 1px; "><tbody>' \ 'style="border-color: gray; border-style: solid; border-width: 1px; "><tbody>' \
'<tr><th colspan="2">Test Information</th></tr>' \ '<tr><th colspan="2">Test Information</th></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Testbed</td>' \ '<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Testbed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \ '<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'%s' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT_HW</td>' \ '<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT_HW</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \ '<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT_SW</td>' \ '<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT_SW</td>' \
@@ -458,16 +528,25 @@ class GhostRequest:
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Tests passed</td>' \ '<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Tests passed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \ '<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Tests failed</td>' \ '<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Tests failed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Subtests passed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Subtests failed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' % ( '<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' % (
duts[4], duts[1], duts[2], duts[3], duts[0], test_pass_fail_results['PASS'], duts[4], test_tag_table, duts[1], duts[2], duts[3], duts[0], test_pass_fail_results['PASS'],
test_pass_fail_results['FAIL']) test_pass_fail_results['FAIL'], subtest_pass_total, subtest_fail_total)
dut_table = dut_table + '</tbody></table>' dut_table = dut_table + '</tbody></table>'
text = text + dut_table text = text + dut_table
for pdf in pdfs: for dictionary in webpagesandpdfs:
print(pdf) text += list(dictionary.keys())[0] + ' report: '
text = text + pdf for value in dictionary.values():
for webpage in value:
text += webpage
if value.index(webpage) + 1 != len(value):
text += ' | '
text += '<br />'
for image in images: for image in images:
text = text + image text = text + image

View File

@@ -137,6 +137,17 @@ class GrafanaRequest:
self.units[script] = dict() self.units[script] = dict()
for index in range(0, len(graph_groups)): for index in range(0, len(graph_groups)):
self.units[script][graph_groups[index]] = units[index] self.units[script][graph_groups[index]] = units[index]
subtests = 0
for score in list(self.csvreader.get_column(csv, 'Subtest-Pass')):
subtests += int(score)
for score in list(self.csvreader.get_column(csv, 'Subtest-Fail')):
subtests += int(score)
if subtests > 0:
dictionary[script].append('Subtests passed')
dictionary[script].append('Subtests failed')
print(subtests)
for item in dictionary[script]:
print('%s, %s' % (item, type(item)))
print(dictionary) print(dictionary)
return dictionary return dictionary
@@ -146,7 +157,8 @@ class GrafanaRequest:
groupBy, groupBy,
index, index,
graph_group, graph_group,
testbed): testbed,
test_tag=None):
query = ( query = (
'from(bucket: "%s")\n ' 'from(bucket: "%s")\n '
'|> range(start: v.timeRangeStart, stop: v.timeRangeStop)\n ' '|> range(start: v.timeRangeStart, stop: v.timeRangeStop)\n '
@@ -158,6 +170,9 @@ class GrafanaRequest:
if graph_group is not None: if graph_group is not None:
graphgroup = ('|> filter(fn: (r) => r["Graph-Group"] == "%s")\n' % graph_group) graphgroup = ('|> filter(fn: (r) => r["Graph-Group"] == "%s")\n' % graph_group)
query += graphgroup query += graphgroup
if test_tag is not None:
graphgroup = ('|> filter(fn: (r) => r["Test-Tag"] == "%s")\n' % test_tag)
query += graphgroup
if testbed is not None: if testbed is not None:
query += ('|> filter(fn: (r) => r["testbed"] == "%s")\n' % testbed) query += ('|> filter(fn: (r) => r["testbed"] == "%s")\n' % testbed)
targets = dict() targets = dict()
@@ -195,7 +210,8 @@ class GrafanaRequest:
to_date='now', to_date='now',
graph_height=8, graph_height=8,
graph__width=12, graph__width=12,
pass_fail=None): pass_fail=None,
test_tag=None):
options = string.ascii_lowercase + string.ascii_uppercase + string.digits options = string.ascii_lowercase + string.ascii_uppercase + string.digits
uid = ''.join(random.choice(options) for i in range(9)) uid = ''.join(random.choice(options) for i in range(9))
input1 = dict() input1 = dict()
@@ -261,7 +277,16 @@ class GrafanaRequest:
targets = list() targets = list()
counter = 0 counter = 0
new_target = self.maketargets(bucket, scriptname, groupBy, counter, graph_group, testbed) try:
new_target = self.maketargets(bucket,
scriptname,
groupBy,
counter,
graph_group,
testbed,
test_tag=test_tag[scriptname])
except:
new_target = self.maketargets(bucket, scriptname, groupBy, counter, graph_group, testbed)
targets.append(new_target) targets.append(new_target)
fieldConfig = dict() fieldConfig = dict()
@@ -331,10 +356,7 @@ class GrafanaRequest:
panel['title'] = scriptname + ' ' + graph_group panel['title'] = scriptname + ' ' + graph_group
else: else:
panel['title'] = scriptname panel['title'] = scriptname
if 'PASS' in panel['title']: print(panel['title'])
panel['title'] = 'Total Passed'
if 'FAIL' in panel['title']:
panel['title'] = 'Total Failed'
panel['transformations'] = list() panel['transformations'] = list()
panel['transformations'].append(transformation) panel['transformations'].append(transformation)
panel['type'] = "graph" panel['type'] = "graph"

View File

@@ -20,6 +20,9 @@ def cv_base_adjust_parser(args):
# TODO: In future, can use TestRig once that GUI update has propagated # TODO: In future, can use TestRig once that GUI update has propagated
args.set.append(["Test Rig ID:", args.test_rig]) args.set.append(["Test Rig ID:", args.test_rig])
if args.test_tag != "":
args.set.append(["TestTag", args.test_tag])
if args.influx_host is not None: if args.influx_host is not None:
if not args.pull_report: if not args.pull_report:
print("Specified influx host without pull_report, will enabled pull_request.") print("Specified influx host without pull_report, will enabled pull_request.")
@@ -60,6 +63,8 @@ def cv_add_base_parser(parser):
# Reporting info # Reporting info
parser.add_argument("--test_rig", default="", parser.add_argument("--test_rig", default="",
help="Specify the test rig info for reporting purposes, for instance: testbed-01") help="Specify the test rig info for reporting purposes, for instance: testbed-01")
parser.add_argument("--test_tag", default="",
help="Specify the test tag info for reporting purposes, for instance: testbed-01")
influx_add_parser_args(parser) # csv_to_influx influx_add_parser_args(parser) # csv_to_influx

0
py-scripts/artifacts/candela_swirl_small-72h.png Normal file → Executable file
View File

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

@@ -84,6 +84,12 @@ th {
li { li {
line-height: 1.5; line-height: 1.5;
} }
.contentDiv2 {
min-width: 800px;
max-width: 8in;
margin: 1em;
padding: 0;
}
.contentDiv { .contentDiv {
min-width: 800px; min-width: 800px;
max-width: 8in; max-width: 8in;
@@ -181,6 +187,20 @@ li {
max-width: 1000px; max-width: 1000px;
max-height: 205px; max-height: 205px;
} }
#BannerLeft {
background-image:url("banner.png");
background-repeat:no-repeat;
padding: 0;
margin: 1em;
min-width: 1000px;
min-height: 205px;
width: 1000px;
height: 205px;
max-width: 1000px;
max-height: 205px;
}
#BannerLogo { #BannerLogo {
text-align: right; text-align: right;
padding: 25px; padding: 25px;

View File

@@ -61,11 +61,12 @@ class CSVtoInflux():
columns = dict(zip(df[0], length)) columns = dict(zip(df[0], length))
print('columns: %s' % columns) print('columns: %s' % columns)
influx_variables = ['script', 'short-description', 'test_details', 'Graph-Group', influx_variables = ['script', 'short-description', 'test_details', 'Graph-Group',
'DUT-HW-version', 'DUT-SW-version', 'DUT-Serial-Num', 'testbed', 'Units'] 'DUT-HW-version', 'DUT-SW-version', 'DUT-Serial-Num', 'testbed', 'Test Tag', 'Units']
csv_variables = ['test-id', 'short-description', 'test details', 'Graph-Group', csv_variables = ['test-id', 'short-description', 'test details', 'Graph-Group',
'dut-hw-version', 'dut-sw-version', 'dut-serial-num', 'test-rig', 'Units'] 'dut-hw-version', 'dut-sw-version', 'dut-serial-num', 'test-rig', 'test-tag', 'Units']
csv_vs_influx = dict(zip(csv_variables, influx_variables)) csv_vs_influx = dict(zip(csv_variables, influx_variables))
for row in df[1:]: for row in df[1:]:
row = [sub.replace('NaN', '0') for sub in row]
tags = dict() tags = dict()
print("row: %s" % row) print("row: %s" % row)
short_description = row[columns['short-description']] short_description = row[columns['short-description']]
@@ -76,9 +77,10 @@ class CSVtoInflux():
date = row[columns['Date']] date = row[columns['Date']]
date = datetime.datetime.utcfromtimestamp(int(date) / 1000).isoformat() #convert to datetime so influx can read it, this is required date = datetime.datetime.utcfromtimestamp(int(date) / 1000).isoformat() #convert to datetime so influx can read it, this is required
for variable in csv_variables: for variable in csv_variables:
index = columns[variable] if variable in columns.keys():
influx_variable = csv_vs_influx[variable] index = columns[variable]
tags[influx_variable] = row[index] influx_variable = csv_vs_influx[variable]
tags[influx_variable] = row[index]
self.influxdb.post_to_influx(short_description, numeric_score, tags, date) self.influxdb.post_to_influx(short_description, numeric_score, tags, date)
def script_name(self): def script_name(self):

47
py-scripts/cv_manager.py Executable file
View File

@@ -0,0 +1,47 @@
#!/usr/bin/env python3
import sys
import os
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath('..'), 'py-dashboard'))
from cv_test_manager import cv_test
from cv_test_manager import *
class CVManager(cv_test):
def __init__(self,
scenario=None,
debug=False,
lfclient_host='localhost'):
self.scenario = scenario
self.debug = debug
self.exit_on_error = False
self.lfclient_host = lfclient_host
def apply_and_build_scenario(self):
self.apply_cv_scenario(self.scenario)
self.build_cv_scenario()
def main():
parser = argparse.ArgumentParser(description='''This is a simple driver script to load a CV Scenario''')
parser.add_argument('--scenario', help='Scenario you wish to build')
parser.add_argument('--debug', help='Enable debugging', default=False, action="store_true")
parser.add_argument('--mgr', default='localhost')
args = parser.parse_args()
manager = CVManager(scenario=args.scenario,
debug=args.debug,
lfclient_host=args.mgr)
manager.apply_and_build_scenario()
if __name__ =="__main__":
main()

View File

@@ -0,0 +1,61 @@
#! /bin/bash
MGR=192.168.100.213
LFUSER=lanforge
LOCALDIR=/home/matthew/Documents/lanforge-scripts/py-scripts/lftest
TESTRIG="Matthew-ct523c"
GHOSTTOKEN=60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742
INFLUXTOKEN=31N9QDhjJHBu4eMUlMBwbK3sOjXLRAhZuCzZGeO8WVCj-xvR8gZWWvRHOcuw-5RHeB7xBFnLs7ZV023k4koR1A==
INFLUXHOST=c7-grafana.candelatech.com
INFLUXBUCKET=stidmatt
GRAFANATOKEN=eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ==
rm -r ${LOCALDIR}
mkdir ${LOCALDIR}
./scenario.py --mgr ${MGR} --load BLANK
sleep 10s
./create_l3.py --mgr ${MGR} --num_stations 4 --ssid stidmatt2 --password stidmatt2 --security wpa2 --radio wiphy0
./lf_dataplane_test.py --mgr ${MGR} --lf_user ${LFUSER} --lf_password lanforge --instance_name wct_instance \
--config_name 64_stations --upstream 1.1.eth1 --influx_host c7-grafana.candelatech.com --influx_org Candela \
--influx_token ${INFLUXTOKEN} --influx_bucket ${INFLUXBUCKET} --test_rig ${TESTRIG} --influx_tag testbed ${TESTRIG} \
--station 1.1.sta0000 --set DUT_NAME linksys-8450 --local_lf_report_dir ${LOCALDIR} \
--pull_report \
--download_speed 85% --upload_speed 0 \
--raw_line 'cust_pkt_sz: 88 1200' \
--raw_line 'directions: DUT Transmit;DUT Receive' \
--raw_line 'traffic_types: UDP' --pull_report --test_tag influxgrafanaghost.sh
#--raw_line 'pkts: Custom;60;142;256;512;1024;MTU'
./lf_wifi_capacity_test.py --mgr ${MGR} --lf_user ${LFUSER} --lf_password lanforge --instance_name linksys-8450 \
--config_name wifi_config --upstream 1.1.eth1 --radio wiphy0 --ssid lanforge --paswd lanforge --security wpa2 \
--influx_host ${INFLUXHOST} --influx_org Candela --influx_bucket ${INFLUXBUCKET} --test_rig ${TESTRIG} \
--influx_token ${INFLUXTOKEN} --influx_tag testbed ${TESTRIG} --set DUT_NAME linksys-8450 --local_lf_report_dir \
${LOCALDIR} --enable FALSE --pull_report --test_tag influxgrafanaghost.sh
./lf_wifi_capacity_test.py --mgr ${MGR} --lf_user ${LFUSER} --lf_password lanforge --instance_name linksys-8450 \
--config_name wifi_config --upstream 1.1.eth1 --radio wiphy0 --ssid lanforge --paswd lanforge --security wpa2 \
--influx_host ${INFLUXHOST} --influx_org Candela --influx_bucket ${INFLUXBUCKET} --test_rig ${TESTRIG} \
--influx_token ${INFLUXTOKEN} --influx_tag testbed ${TESTRIG} --set DUT_NAME linksys-8450 --local_lf_report_dir \
${LOCALDIR} --enable FALSE --pull_report --test_tag Can_we_use_two_test_tags
./lf_ap_auto_test.py --mgr ${MGR} --instance_name ap-auto-instance --config_name test_con --upstream 1.1.eth1 \
--dut5_0 'matthew-router lanforge 04:f0:21:c0:65:7b (1)' --dut2_0 'matthew-router lanforge 04:f0:21:c0:65:7b (1)' \
--max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy0 --radio5 1.1.wiphy0 \
--set 'Basic Client Connectivity' 1 --set 'Multi Band Performance' 1 --set 'Stability' 0 --set 'Capacity' 0 \
--set 'Multi-Station Throughput vs Pkt Size' 0 --set 'Throughput vs Pkt Size' 0 --set 'Band-Steering' 1 \
--influx_host ${INFLUXHOST} --influx_org Candela --influx_bucket ${INFLUXBUCKET} --test_rig ${TESTRIG} \
--influx_token ${INFLUXTOKEN} --influx_tag testbed ${TESTRIG} --pull_report --test_tag influxgrafanaghost.sh \
--local_lf_report_dir ${LOCALDIR}
./ghost_profile.py --ghost_token ${GHOSTTOKEN} --ghost_host 192.168.100.153 --authors Matthew --customer candela \
--user_push lanforge --password_push lanforge --kpi_to_ghost --grafana_token ${GRAFANATOKEN} \
--grafana_host 192.168.100.201 --grafana_bucket ${INFLUXBUCKET} --influx_host ${INFLUXHOST} --influx_org Candela \
--influx_token ${INFLUXTOKEN} --influx_bucket ${INFLUXBUCKET} --parent_folder ${LOCALDIR}

View File

@@ -4,6 +4,7 @@ NAME: lf_csv.py
PURPOSE: PURPOSE:
Common Library for generating csv for LANforge output Common Library for generating csv for LANforge output
KPI - Key Performance Indicators
SETUP: SETUP:
/lanforge/html-reports directory needs to be present or output generated in local file /lanforge/html-reports directory needs to be present or output generated in local file
@@ -18,10 +19,8 @@ COPYWRITE
INCLUDE_IN_README INCLUDE_IN_README
''' '''
import numpy as np
import pandas as pd import pandas as pd
class lf_csv: class lf_csv:
def __init__(self, def __init__(self,
_columns=['Stations', 'bk', 'be', 'vi', 'vo'], _columns=['Stations', 'bk', 'be', 'vi', 'vo'],
@@ -43,6 +42,38 @@ class lf_csv:
print(csv_df) print(csv_df)
csv_df.to_csv(self.filename, index=False, encoding='utf-8', na_rep='NA', float_format='%.2f') csv_df.to_csv(self.filename, index=False, encoding='utf-8', na_rep='NA', float_format='%.2f')
# this layout may need to change
'''
kpi.csv : specific file that is used for the database, dashboard and blog post
A blank entry is a valid entry in some cases.
Date: date of run
test-rig : testbed that the tests are run on for example ct_us_001
test-tag : test specific information to differenciate the test, LANforge radios used, security modes (wpa2 , open)
dut-hw-version : hardware version of the device under test
dut-sw-version : software version of the device under test
dut-model-num : model number / name of the device under test
test-priority : test-priority is arbitrary number, choosing under 95 means it goes down at bottom of blog report, and higher priority goes at top.
test-id : script or test name , AP Auto, wifi capacity, data plane, dfs
short-description : short description of the test
pass/fail : set blank for performance tests
numeric-score : this is the value for the y-axis (x-axis is a timestamp), numeric value of what was measured
test-details : what was measured in the numeric-score, e.g. bits per second, bytes per second, upload speed, minimum cx time (ms)
Units : units used for the numeric-scort
Graph-Group - For the dashboard the graph / panel to put the resutls in . Currently the dashboard is Grafana
'''
class lf_kpi_csv:
def __init__(self,
_kpi_headers = ['Date','test-rig','test-tag','dut-hw-version','dut-sw-version','dut-model-num',
'test-priority','test-id','short-description','pass/fail','numberic-score'
'test details','Units','Graph-Group','Subtest-Pass','Subtest-Fail'],
_kpi_file='kpi.csv' #Currently this is the only file name accepted
):
self.kpi_headers = _kpi_headers
self.kpi_rows = ""
self.kpi_filename = _kpi_file
if __name__ == "__main__": if __name__ == "__main__":
test = lf_csv() test = lf_csv()

View File

@@ -53,6 +53,11 @@ class lf_bar_graph():
_text_font=None, _text_font=None,
_text_rotation=None, _text_rotation=None,
_grp_title = "", _grp_title = "",
_legend_handles=None,
_legend_loc="best",
_legend_box=None,
_legend_ncol=1,
_legend_fontsize=None,
_dpi=96, _dpi=96,
_enable_csv=False): _enable_csv=False):
@@ -79,6 +84,11 @@ class lf_bar_graph():
self.grp_title = _grp_title self.grp_title = _grp_title
self.enable_csv = _enable_csv self.enable_csv = _enable_csv
self.lf_csv = lf_csv() self.lf_csv = lf_csv()
self.legend_handles = _legend_handles
self.legend_loc = _legend_loc
self.legend_box = _legend_box
self.legend_ncol = _legend_ncol
self.legend_fontsize = _legend_fontsize
def build_bar_graph(self): def build_bar_graph(self):
if self.color is None: if self.color is None:
@@ -121,7 +131,7 @@ class lf_bar_graph():
else: else:
plt.xticks(np.arange(0, len(self.data_set[0]), step=self.xaxis_step), self.xaxis_categories, plt.xticks(np.arange(0, len(self.data_set[0]), step=self.xaxis_step), self.xaxis_categories,
fontsize = self.xticks_font) fontsize = self.xticks_font)
plt.legend() plt.legend(handles=self.legend_handles, loc=self.legend_loc, bbox_to_anchor=self.legend_box, ncol=self.legend_ncol, fontsize=self.legend_fontsize)
plt.suptitle(self.title, fontsize=self.title_size) plt.suptitle(self.title, fontsize=self.title_size)
plt.title(self.grp_title) plt.title(self.grp_title)
fig = plt.gcf() fig = plt.gcf()

View File

@@ -331,6 +331,39 @@ class lf_report():
) )
self.html += self.banner_html self.html += self.banner_html
def build_banner_left(self):
# NOTE: {{ }} are the ESCAPED curly braces
self.banner_html = """<!DOCTYPE html>
<html lang='en'>
<head>
<meta charset='UTF-8'>
<meta name='viewport' content='width=device-width, initial-scale=1' />
<style>
body {{ margin: 0; padding: 0; }}
</style>
<link rel='stylesheet' href='report.css' />
<link rel='stylesheet' href='custom.css' />
<title>{title}</title>
</head>
<body>
<div id='BannerBack'>
<div id='BannerLeft'>
<br/>
<img id='BannerLogo' align='right' src="CandelaLogo2-90dpi-200x90-trans.png" border='0'/>
<div class='HeaderStyle'>
<br>
<h1 class='TitleFontPrint' style='color:darkgreen;'> {title} </h1>
<h3 class='TitleFontPrint' style='color:darkgreen;'>{date}</h3>
</div>
</div>
</div>
""".format(
title=self.title,
date=self.date,
)
self.html += self.banner_html
def build_table_title(self): def build_table_title(self):
self.table_title_html = """ self.table_title_html = """
<!-- Table Title--> <!-- Table Title-->
@@ -338,6 +371,9 @@ class lf_report():
""".format(title=self.table_title) """.format(title=self.table_title)
self.html += self.table_title_html self.html += self.table_title_html
def start_content_div2(self):
self.html += "\n<div class='contentDiv2'>\n"
def start_content_div(self): def start_content_div(self):
self.html += "\n<div class='contentDiv'>\n" self.html += "\n<div class='contentDiv'>\n"

View File

@@ -97,7 +97,17 @@ if __name__ == "__main__":
_graph_image_name="Bi-single_radio_2.4GHz", _graph_image_name="Bi-single_radio_2.4GHz",
_label=["bi-downlink", "bi-uplink", 'uplink'], _label=["bi-downlink", "bi-uplink", 'uplink'],
_color=['darkorange', 'forestgreen','blueviolet'], _color=['darkorange', 'forestgreen','blueviolet'],
_color_edge='red') _color_edge='red',
_grp_title="Throughput for each clients",
_xaxis_step=5,
_show_bar_value=True,
_text_font=7,
_text_rotation=45,
_xticks_font=7,
_legend_loc="best",
_legend_box=(1,1),
_legend_ncol=1,
_legend_fontsize=None)
graph_png = graph.build_bar_graph() graph_png = graph.build_bar_graph()
@@ -114,7 +124,8 @@ if __name__ == "__main__":
_yaxis_name="y-axis", _yaxis_name="y-axis",
_graph_image_name="image_name1", _graph_image_name="image_name1",
_color=None, _color=None,
_label=["s1", "s2", "s3"]) _label=["s1", "s2", "s3"],
_enable_csv = False)
graph_png = graph2.build_scatter_graph() graph_png = graph2.build_scatter_graph()
print("graph name {}".format(graph_png)) print("graph name {}".format(graph_png))
@@ -129,7 +140,8 @@ if __name__ == "__main__":
_yaxis_name="y-axis", _yaxis_name="y-axis",
_graph_image_name="image_name_map", _graph_image_name="image_name_map",
_color=None, _color=None,
_label=["s1", "s2"]) _label=["s1", "s2"],
_enable_csv = False)
graph_png = graph2.build_scatter_graph() graph_png = graph2.build_scatter_graph()
print("graph name {}".format(graph_png)) print("graph name {}".format(graph_png))
@@ -144,7 +156,8 @@ if __name__ == "__main__":
_yaxis_name="Login PASS/FAIL", _yaxis_name="Login PASS/FAIL",
_label=['Success', 'Fail', 'both'], _label=['Success', 'Fail', 'both'],
_graph_image_name="login_pass_fail1", _graph_image_name="login_pass_fail1",
_color=None) _color=None,
_enable_csv = False)
graph_png = graph.build_stacked_graph() graph_png = graph.build_stacked_graph()
@@ -163,7 +176,8 @@ if __name__ == "__main__":
_label=['Success', 'Fail'], _label=['Success', 'Fail'],
_graph_image_name="image_name_pass_fail", _graph_image_name="image_name_pass_fail",
_color=["r", "g"], _color=["r", "g"],
_figsize=(9, 4)) _figsize=(9, 4),
_enable_csv = False)
graph_png = graph.build_horizontal_stacked_graph() graph_png = graph.build_horizontal_stacked_graph()

View File

@@ -216,7 +216,7 @@ class RxSensitivityTest(cv_test):
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name, self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets, self.config_name, self.sets,
self.pull_report, self.lf_host, self.lf_user, self.lf_password, self.pull_report, self.lf_host, self.lf_user, self.lf_password,
cv_cmds, ssh_port=self.ssh_port, local_path=self.local_path, cv_cmds, ssh_port=self.ssh_port, local_lf_report_dir=self.local_path,
graph_groups_file=self.graph_groups) graph_groups_file=self.graph_groups)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
@@ -251,9 +251,9 @@ def main():
Example 2: Example 2:
./lf_dataplane_test.py --json <name>.json ./lf_rx_sensitivity_test.py --json <name>.json
see sample json file: lf_dataplane_config.json see sample json file: lf_rx_sensitivity_config.json
Sample <name>.json between using eth1 and eth2 Sample <name>.json between using eth1 and eth2
{ {
@@ -261,7 +261,7 @@ def main():
"port":"8080", "port":"8080",
"lf_user":"lanforge", "lf_user":"lanforge",
"lf_password":"lanforge", "lf_password":"lanforge",
"instance_name":"dataplane-instance", "instance_name":"rx-sensitivity-instance",
"config_name":"test_con", "config_name":"test_con",
"upstream":"1.1.eth1", "upstream":"1.1.eth1",
"dut":"asus_5g", "dut":"asus_5g",
@@ -278,7 +278,7 @@ def main():
"port":"8080", "port":"8080",
"lf_user":"lanforge", "lf_user":"lanforge",
"lf_password":"lanforge", "lf_password":"lanforge",
"instance_name":"dataplane-instance", "instance_name":"rx-sensitivity-instance",
"config_name":"test_con", "config_name":"test_con",
"upstream":"1.1.eth1", "upstream":"1.1.eth1",
"dut":"asus_5g", "dut":"asus_5g",
@@ -296,7 +296,7 @@ def main():
parser.add_argument('--json', help="--json <config.json> json input file", default="") parser.add_argument('--json', help="--json <config.json> json input file", default="")
parser.add_argument("-u", "--upstream", type=str, default="", parser.add_argument("-u", "--upstream", type=str, default="",
help="Upstream port for wifi capacity test ex. 1.1.eth2") help="Upstream port for rx sensitivity test ex. 1.1.eth2")
parser.add_argument("--station", type=str, default="", parser.add_argument("--station", type=str, default="",
help="Station to be used in this test, example: 1.1.sta01500") help="Station to be used in this test, example: 1.1.sta01500")

View File

@@ -318,7 +318,6 @@ if sys.version_info[0] != 3:
if 'py-json' not in sys.path: if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json')) sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
from cv_test_manager import cv_test
from cv_test_manager import * from cv_test_manager import *
from LANforge import LFUtils from LANforge import LFUtils
@@ -357,6 +356,7 @@ class WiFiCapacityTest(cv_test):
report_dir="", report_dir="",
graph_groups=None, graph_groups=None,
test_rig="", test_rig="",
test_tag="",
local_lf_report_dir="" local_lf_report_dir=""
): ):
super().__init__(lfclient_host=lfclient_host, lfclient_port=lf_port) super().__init__(lfclient_host=lfclient_host, lfclient_port=lf_port)
@@ -395,6 +395,7 @@ class WiFiCapacityTest(cv_test):
self.report_dir = report_dir self.report_dir = report_dir
self.graph_groups = graph_groups self.graph_groups = graph_groups
self.test_rig = test_rig self.test_rig = test_rig
self.test_tag = test_tag
self.local_lf_report_dir = local_lf_report_dir self.local_lf_report_dir = local_lf_report_dir
def setup(self): def setup(self):
@@ -453,6 +454,8 @@ class WiFiCapacityTest(cv_test):
cfg_options.append("dl_rate: " + self.download_rate) cfg_options.append("dl_rate: " + self.download_rate)
if self.test_rig != "": if self.test_rig != "":
cfg_options.append("test_rig: " + self.test_rig) cfg_options.append("test_rig: " + self.test_rig)
if self.test_tag != "":
cfg_options.append("test_tag: " + self.test_tag)
cfg_options.append("save_csv: 1") cfg_options.append("save_csv: 1")
@@ -487,7 +490,7 @@ def main():
--instance_name wct_instance --config_name wifi_config --upstream 1.1.eth1 --batch_size 1 --loop_iter 1 \ --instance_name wct_instance --config_name wifi_config --upstream 1.1.eth1 --batch_size 1 --loop_iter 1 \
--protocol UDP-IPv4 --duration 6000 --pull_report --stations 1.1.sta0000,1.1.sta0001 \ --protocol UDP-IPv4 --duration 6000 --pull_report --stations 1.1.sta0000,1.1.sta0001 \
--create_stations --radio wiphy0 --ssid test-ssid --security open --paswd [BLANK] \ --create_stations --radio wiphy0 --ssid test-ssid --security open --paswd [BLANK] \
--test_rig Testbed-01 \ --test_rig Testbed-01 -test_tag TAG\
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \ --influx_host c7-graphana --influx_port 8086 --influx_org Candela \
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \ --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \
--influx_bucket ben \ --influx_bucket ben \
@@ -562,6 +565,7 @@ def main():
sets=args.set, sets=args.set,
graph_groups=args.graph_groups, graph_groups=args.graph_groups,
test_rig=args.test_rig, test_rig=args.test_rig,
test_tag=args.test_tag,
local_lf_report_dir=args.local_lf_report_dir local_lf_report_dir=args.local_lf_report_dir
) )
WFC_Test.setup() WFC_Test.setup()

View File

@@ -131,7 +131,7 @@ if [[ $MGRLEN -gt 0 ]]; then
testgroup_list_groups testgroup_list_groups
testgroup_list_connections testgroup_list_connections
testgroup_delete_group testgroup_delete_group
"./testgroup2.py --num_stations 4 --ssid lanforge --passwd password --security wpa2 --radio wiphy0 --group_name group0 --add_group --mgr $MGR" "./testgroup2.py --num_stations 4 --ssid $SSID_USED --passwd $PASSWD_USED --security $SECURITY --radio $RADIO_USED --group_name group0 --add_group --mgr $MGR"
"./test_ipv4_connection.py --radio $RADIO_USED --num_stations $NUM_STA --ssid $SSID_USED --passwd $PASSWD_USED --security $SECURITY --debug --mgr $MGR" "./test_ipv4_connection.py --radio $RADIO_USED --num_stations $NUM_STA --ssid $SSID_USED --passwd $PASSWD_USED --security $SECURITY --debug --mgr $MGR"
"./test_ipv4_l4_urls_per_ten.py --radio $RADIO_USED --num_stations $NUM_STA --security $SECURITY --ssid $SSID_USED --passwd $PASSWD_USED --num_tests 1 --requests_per_ten 600 --target_per_ten 600 --debug --mgr $MGR" "./test_ipv4_l4_urls_per_ten.py --radio $RADIO_USED --num_stations $NUM_STA --security $SECURITY --ssid $SSID_USED --passwd $PASSWD_USED --num_tests 1 --requests_per_ten 600 --target_per_ten 600 --debug --mgr $MGR"
"./test_ipv4_l4_wifi.py --radio $RADIO_USED --num_stations $NUM_STA --security $SECURITY --ssid $SSID_USED --passwd $PASSWD_USED --test_duration 15s --debug --mgr $MGR" "./test_ipv4_l4_wifi.py --radio $RADIO_USED --num_stations $NUM_STA --security $SECURITY --ssid $SSID_USED --passwd $PASSWD_USED --test_duration 15s --debug --mgr $MGR"

View File

@@ -0,0 +1,54 @@
# Run this app with `python app.py` and
# visit http://127.0.0.1:8050/ in your web browser.
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly.express as px
import pandas as pd
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
df = pd.read_csv('http://192.168.95.6/html-reports/2021-07-20-16-25-05_lf_check/dataplane-2021-07-20-04-28-42/kpi.csv', sep='\t')
append_df = pd.read_csv('http://192.168.95.6/html-reports/2021-07-24-03-00-01_lf_check/dataplane-2021-07-24-03-06-02/kpi.csv', sep='\t')
df = df.append(append_df, ignore_index=True)
#print(df)
fig = (px.scatter(df, x="Date", y="numeric-score",
color="short-description", hover_name="short-description",
size_max=60)).update_traces(mode='lines+markers')
'''
fig = px.scatter(df, x="Date", y="numeric-score",
color="short-description", hover_name="short-description",
size_max=60)
'''
'''
fig = px.scatter(df, x="short-description", y="numeric-score",
color="short-description", hover_name="short-description",
size_max=60)
'''
fig .update_layout(
title="Throughput vs Packet size",
xaxis_title="Packet Size",
yaxis_title="Mbps",
xaxis = {'type' : 'date'}
)
app.layout = html.Div([
dcc.Graph(
id='packet-size vs rate',
figure=fig
)
])
if __name__ == '__main__':
app.run_server(debug=True)

View File

@@ -0,0 +1,94 @@
#!/usr/bin/python3
'''
NAME:
lf_json_test.py
PURPOSE:
EXAMPLE:
./lf_json_test.py -
NOTES:
TO DO NOTES:
'''
import os
import sys
if sys.version_info[0] != 3:
print("This script requires Python3")
exit()
from time import sleep
import argparse
import json
#if 'py-json' not in sys.path:
# sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
# print("path: {}".format(os.path.join(os.path.abspath('..'))))
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
from LANforge import lf_json_autogen
class lf_read_json():
def __init__(self):
self.timeout = 10
def preprocess_data(self):
pass
def main():
# arguments
parser = argparse.ArgumentParser(
prog='lf_json_test.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
lf_json_test.py : lf json test
''',
description='''\
lf_json_test.py
-----------
Summary :
---------
./lf_dataplane_json.py --mgr 192.168.0.101 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth1 --dut asus_5g --duration 15s --station 1.1.13.sta0002 --download_speed 85% --upload_speed 0 --raw_line 'pkts: Custom;60;MTU' --raw_line 'cust_pkt_sz: 88 1200' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1
''')
#parser.add_argument('--json', help="--json <config.json> json input file", default="config.json")
parser.add_argument('--cmd', help="--cmd <json_cmd> json command", default="")
args = parser.parse_args()
json_cmd = args.cmd
print("json cmd {}".format(json_cmd))
#with open(config_json, 'r') as config_file:
# config_data = json.load(config_file)
#print(config_data)
lf_get = lf_json_autogen.LFJsonGet(lfclient_host='192.168.0.101',
lfclient_port=8080,
debug_=True,
)
duts = lf_get.get_chamber(fields = [lf_get.duts])
print("duts {}".format(duts))
print("END lf_read_json.py")
if __name__ == "__main__":
main()

View File

@@ -319,7 +319,7 @@ class StaConnect2(LFCliBase):
def collect_endp_stats(self, endp_map): def collect_endp_stats(self, endp_map):
print("Collecting Data") print("Collecting Data")
fields="?fields=name,tx+bytes,rx+bytes" fields="/all"
for (cx_name, endps) in endp_map.items(): for (cx_name, endps) in endp_map.items():
try: try:
endp_url = "/endp/%s%s" % (endps[0], fields) endp_url = "/endp/%s%s" % (endps[0], fields)

View File

@@ -119,8 +119,10 @@ class L3VariableTime(Realm):
ap_read=False, ap_read=False,
ap_port='/dev/ttyUSB0', ap_port='/dev/ttyUSB0',
ap_baud='115200', ap_baud='115200',
ap_cmd='wl -i wl1 bs_data', ap_cmd_5g='wl -i wl1 bs_data',
ap_chanim_cmd='wl -i wl1 chanim_stats', ap_cmd_2g='wl -i wl0 bs_data',
ap_chanim_cmd_5g='wl -i wl1 chanim_stats',
ap_chanim_cmd_2g='wl -i wl0 chanim_stats',
ap_test_mode=False, ap_test_mode=False,
_exit_on_error=False, _exit_on_error=False,
_exit_on_fail=False, _exit_on_fail=False,
@@ -200,8 +202,10 @@ class L3VariableTime(Realm):
self.ap_read = ap_read self.ap_read = ap_read
self.ap_port = ap_port self.ap_port = ap_port
self.ap_baud = ap_baud self.ap_baud = ap_baud
self.ap_cmd = ap_cmd self.ap_cmd_5g = ap_cmd_5g
self.ap_chanim_cmd = ap_chanim_cmd self.ap_cmd_2g = ap_cmd_2g
self.ap_chanim_cmd_5g = ap_chanim_cmd_5g
self.ap_chanim_cmd_2g = ap_chanim_cmd_2g
self.ap_test_mode = ap_test_mode self.ap_test_mode = ap_test_mode
self.ap_5g_umsched = "" self.ap_5g_umsched = ""
self.ap_5g_msched = "" self.ap_5g_msched = ""
@@ -483,39 +487,73 @@ class L3VariableTime(Realm):
return ap_results return ap_results
def read_ap_stats(self): def read_ap_stats_5g(self):
# 5ghz: wl -i wl1 bs_data 2.4ghz# wl -i wl0 bs_data # 5ghz: wl -i wl1 bs_data
ap_stats = "" ap_stats_5g = ""
try: try:
# configure the serial interface # configure the serial interface
ser = serial.Serial(self.ap_port, int(self.ap_baud), timeout=5) ser = serial.Serial(self.ap_port, int(self.ap_baud), timeout=5)
ss = SerialSpawn(ser) ss = SerialSpawn(ser)
ss.sendline(str(self.ap_cmd)) ss.sendline(str(self.ap_cmd_5g))
ss.expect([pexpect.TIMEOUT], timeout=1) # do not detete line, waits for output
ap_stats_5g = ss.before.decode('utf-8', 'ignore')
print("ap_stats_5g {}".format(ap_stats_5g))
except:
print("WARNING unable to read AP")
return ap_stats_5g
def read_ap_stats_2g(self):
# 2.4ghz# wl -i wl0 bs_data
ap_stats_2g = ""
try:
# configure the serial interface
ser = serial.Serial(self.ap_port, int(self.ap_baud), timeout=5)
ss = SerialSpawn(ser)
ss.sendline(str(self.ap_cmd_2g))
ss.expect([pexpect.TIMEOUT], timeout=1) # do not detete line, waits for output ss.expect([pexpect.TIMEOUT], timeout=1) # do not detete line, waits for output
ap_stats = ss.before.decode('utf-8', 'ignore') ap_stats = ss.before.decode('utf-8', 'ignore')
print("ap_stats {}".format(ap_stats)) print("ap_stats_2g {}".format(ap_stats_2g))
except: except:
print("WARNING unable to read AP") print("WARNING unable to read AP")
return ap_stats return ap_stats_2g
def read_ap_chanim_stats(self): def read_ap_chanim_stats_5g(self):
# 5ghz: wl -i wl1 chanim_stats 2.4ghz# wl -i wl0 chanim_stats # 5ghz: wl -i wl1 chanim_stats
ap_chanim_stats = "" ap_chanim_stats_5g = ""
try: try:
# configure the serial interface # configure the serial interface
ser = serial.Serial(self.ap_port, int(self.ap_baud), timeout=5) ser = serial.Serial(self.ap_port, int(self.ap_baud), timeout=5)
ss = SerialSpawn(ser) ss = SerialSpawn(ser)
ss.sendline(str(self.ap_chanim_cmd)) ss.sendline(str(self.ap_chanim_cmd_5g))
ss.expect([pexpect.TIMEOUT], timeout=1) # do not detete line, waits for output ss.expect([pexpect.TIMEOUT], timeout=1) # do not detete line, waits for output
ap_chanim_stats = ss.before.decode('utf-8', 'ignore') ap_chanim_stats_5g = ss.before.decode('utf-8', 'ignore')
print("ap_stats {}".format(ap_chanim_stats)) print("ap_stats {}".format(ap_chanim_stats_5g))
except: except:
print("WARNING unable to read AP") print("WARNING unable to read AP")
return ap_chanim_stats return ap_chanim_stats_5g
def read_ap_chanim_stats_2g(self):
# 2.4ghz# wl -i wl0 chanim_stats
ap_chanim_stats_2g = ""
try:
# configure the serial interface
ser = serial.Serial(self.ap_port, int(self.ap_baud), timeout=5)
ss = SerialSpawn(ser)
ss.sendline(str(self.ap_chanim_cmd_2g))
ss.expect([pexpect.TIMEOUT], timeout=1) # do not detete line, waits for output
ap_chanim_stats_2g = ss.before.decode('utf-8', 'ignore')
print("ap_stats {}".format(ap_chanim_stats_2g))
except:
print("WARNING unable to read AP")
return ap_chanim_stats_2g
# Run the main body of the test logic. # Run the main body of the test logic.
def start(self, print_pass=False, print_fail=False): def start(self, print_pass=False, print_fail=False):
@@ -621,6 +659,8 @@ class L3VariableTime(Realm):
endps = [] endps = []
ap_row = [] ap_row = []
ap_stats_col_titles = [] ap_stats_col_titles = []
mac_found_5g = False
mac_found_2g = False
while cur_time < end_time: while cur_time < end_time:
# interval_time = cur_time + datetime.timedelta(seconds=5) # interval_time = cur_time + datetime.timedelta(seconds=5)
@@ -640,30 +680,31 @@ class L3VariableTime(Realm):
if self.ap_read: if self.ap_read:
if self.ap_test_mode: if self.ap_test_mode:
# Create the test data as a continuous string # Create the test data as a continuous string
ap_stats = "{}{}{}{}{}{}".format("root@Docsis-Gateway:~# wl -i wl1 bs_data\n", ap_stats_5g = "{}{}{}{}{}{}".format("root@Docsis-Gateway:~# wl -i wl1 bs_data\n",
"Station Address PHY Mbps Data Mbps Air Use Data Use Retries bw mcs Nss ofdma mu-mimo\n", "Station Address PHY Mbps Data Mbps Air Use Data Use Retries bw mcs Nss ofdma mu-mimo\n",
"04:f0:21:82:2f:d6 1016.6 48.9 6.5% 24.4% 16.6% 80 9.7 2 0.0% 0.0%\n", "04:f0:21:82:2f:d6 1016.6 48.9 6.5% 24.4% 16.6% 80 9.7 2 0.0% 0.0%\n",
"50:E0:85:84:7A:E7 880.9 52.2 7.7% 26.1% 20.0% 80 8.5 2 0.0% 0.0%\n", "50:E0:85:84:7A:E7 880.9 52.2 7.7% 26.1% 20.0% 80 8.5 2 0.0% 0.0%\n",
"50:E0:85:89:5D:00 840.0 47.6 6.4% 23.8% 2.3% 80 8.0 2 0.0% 0.0%\n", "50:E0:85:89:5D:00 840.0 47.6 6.4% 23.8% 2.3% 80 8.0 2 0.0% 0.0%\n",
"50:E0:85:87:5B:F4 960.7 51.5 5.9% 25.7% 0.0% 80 9 2 0.0% 0.0%\n", "50:E0:85:87:5B:F4 960.7 51.5 5.9% 25.7% 0.0% 80 9 2 0.0% 0.0%\n",
"- note the MAC will match ap_stats.append((overall) - 200.2 26.5% - - \n") "- note the MAC will match ap_stats.append((overall) - 200.2 26.5% - - \n")
print("ap_stats {}".format(ap_stats)) print("ap_stats {}".format(ap_stats_5g))
# Create the test data as a continuous string # Create the test data as a continuous string
ap_chanim_stats = "{}{}{}{}".format("root@Docsis-Gateway:~# wl -i wl1 chanim_stats\n", ap_chanim_stats_5g = "{}{}{}{}".format(
"version: 3\n", "root@Docsis-Gateway:~# wl -i wl1 chanim_stats\n",
"chanspec tx inbss obss nocat nopkt doze txop goodtx badtx glitch badplcp knoise idle timestamp\n", "version: 3\n",
"0xe06a 61 15 0 17 0 0 6 53 2 0 0 -91 65 343370578\n") "chanspec tx inbss obss nocat nopkt doze txop goodtx badtx glitch badplcp knoise idle timestamp\n",
"0xe06a 61 15 0 17 0 0 6 53 2 0 0 -91 65 343370578\n")
else: else:
# read from the AP # read from the AP
ap_stats = self.read_ap_stats() ap_stats_5g = self.read_ap_stats_5g()
ap_chanim_stats = self.read_ap_chanim_stats() ap_chanim_stats_5g = self.read_ap_chanim_stats_5g()
ap_stats_rows = ap_stats.splitlines() ap_stats_5g_rows = ap_stats_5g.splitlines()
print("From AP stats: ap_stats_rows {}".format(ap_stats_rows)) print("From AP stats: ap_stats_5g_rows {}".format(ap_stats_5g_rows))
ap_chanim_stats_rows = ap_chanim_stats.splitlines() ap_chanim_stats_rows_5g = ap_chanim_stats_5g.splitlines()
print("From AP chanim: ap_chanim_stats_rows {}".format(ap_chanim_stats_rows)) print("From AP chanim: ap_chanim_stats_rows {}".format(ap_chanim_stats_rows_5g))
channel_utilization = 0 channel_utilization = 0
# Query all of our ports # Query all of our ports
@@ -679,68 +720,181 @@ class L3VariableTime(Realm):
pprint(response) pprint(response)
else: else:
# print("response".format(response)) # print("response".format(response))
# pprint(response) pprint(response)
p = response['interface'] p = response['interface']
# print("#### From LANforge: p, response['insterface']:{}".format(p)) # print("#### From LANforge: p, response['insterface']:{}".format(p))
mac = p['mac'] mac = p['mac']
# print("#### From LANforge: p['mac']: {mac}".format(mac=mac)) # print("#### From LANforge: p['mac']: {mac}".format(mac=mac))
# Parse the ap stats to find the matching mac then use that row for reporting # Parse the ap stats to find the matching mac then use that row for reporting
for row in ap_stats_rows: for row in ap_stats_5g_rows:
split_row = row.split() split_row = row.split()
# print("split_row {}".format(split_row)) # print("split_row {}".format(split_row))
# print("split_row[0] {} mac {}".format(split_row[0].lower(),mac.lower())) # print("split_row[0] {} mac {}".format(split_row[0].lower(),mac.lower()))
if self.ap_test_mode: if self.ap_test_mode:
if split_row[0].lower() != mac.lower(): if split_row[0].lower() != mac.lower():
ap_row = split_row ap_row = split_row
mac_found_5g = True
else: else:
try: try:
# split_row[0].lower() , mac from AP # split_row[0].lower() , mac from AP
# mac.lower() , mac from LANforge # mac.lower() , mac from LANforge
if split_row[0].lower() == mac.lower(): if split_row[0].lower() == mac.lower():
ap_row = split_row ap_row = split_row
mac_found_5g = True
except: except:
print(" 'No stations are currently associated.'? from AP") print(" 'No stations are currently associated.'? from AP")
print( print(
" since possibly no stations: excption on compare split_row[0].lower() ") " since possibly no stations: excption on compare split_row[0].lower() ")
print("selected ap_row (from split_row): {}".format(ap_row)) if mac_found_5g == True:
mac_found_5g = False
print("selected ap_row (from split_row): {}".format(ap_row))
# Find latency, jitter for connections using this port. # Find latency, jitter for connections using this port.
latency, jitter, tput = self.get_endp_stats_for_port(p["port"], endps) latency, jitter, tput = self.get_endp_stats_for_port(p["port"], endps)
# now report the ap_chanim_stats along side of the ap_stats # now report the ap_chanim_stats along side of the ap_stats_5g
xtop_reported = False xtop_reported = False
for row in ap_chanim_stats_rows: for row in ap_chanim_stats_rows_5g:
split_row = row.split()
if xtop_reported:
try:
xtop = split_row[7]
channel_utilization = 100 - int(xtop)
except:
print(
"detected chanspec with reading chanim_stats, failed reading xtop")
# should be only one channel utilization
break
else:
try:
if split_row[0].lower() == 'chanspec':
xtop_reported = True
except:
print("Error reading xtop")
# ap information is passed with ap_row so all information needs to be contained in ap_row
ap_row.append(str(channel_utilization))
print("channel_utilization {channel_utilization}".format(
channel_utilization=channel_utilization))
print("ap_row {ap_row}".format(ap_row=ap_row))
ap_stats_5g_col_titles = ['Station Address', 'PHY Mbps', 'Data Mbps', 'Air Use',
'Data Use', 'Retries', 'bw', 'mcs', 'Nss', 'ofdma',
'mu-mimo', 'channel_utilization']
self.write_port_csv(len(temp_stations_list), ul, dl, ul_pdu_str, dl_pdu_str,
atten_val, eid_name, p,
latency, jitter, tput, ap_row,
ap_stats_5g_col_titles) # ap_stats_5g_col_titles used as a length
if self.ap_test_mode:
# Create the test data as a continuous string
ap_stats_2g = "{}{}{}{}{}{}".format("root@Docsis-Gateway:~# wl -i wl1 bs_data\n",
"Station Address PHY Mbps Data Mbps Air Use Data Use Retries bw mcs Nss ofdma mu-mimo\n",
"04:f0:21:82:2f:d6 1016.6 48.9 6.5% 24.4% 16.6% 80 9.7 2 0.0% 0.0%\n",
"50:E0:85:84:7A:E7 880.9 52.2 7.7% 26.1% 20.0% 80 8.5 2 0.0% 0.0%\n",
"50:E0:85:89:5D:00 840.0 47.6 6.4% 23.8% 2.3% 80 8.0 2 0.0% 0.0%\n",
"50:E0:85:87:5B:F4 960.7 51.5 5.9% 25.7% 0.0% 80 9 2 0.0% 0.0%\n",
"- note the MAC will match ap_stats_2g.append((overall) - 200.2 26.5% - - \n")
print("ap_stats_2g {}".format(ap_stats_2g))
# Create the test data as a continuous string
ap_chanim_stats_2g = "{}{}{}{}".format(
"root@Docsis-Gateway:~# wl -i wl1 chanim_stats\n",
"version: 3\n",
"chanspec tx inbss obss nocat nopkt doze txop goodtx badtx glitch badplcp knoise idle timestamp\n",
"0xe06a 61 15 0 17 0 0 6 53 2 0 0 -91 65 343370578\n")
else:
# read from the AP
ap_stats_2g = self.read_ap_stats_2g()
ap_chanim_stats_2g = self.read_ap_chanim_stats_2g()
ap_stats_2g_rows = ap_stats_2g.splitlines()
print("From AP stats: ap_stats_2g_rows {}".format(ap_stats_2g_rows))
ap_chanim_stats_rows_2g = ap_chanim_stats_2g.splitlines()
print("From AP chanim: ap_chanim_stats_rows_2g {}".format(ap_chanim_stats_rows_2g))
channel_utilization = 0
# Query all of our ports
# Note: the endp eid is the shelf.resource.port.endp-id
port_eids = self.gather_port_eids()
for eid_name in port_eids:
eid = self.name_to_eid(eid_name)
url = "/port/%s/%s/%s" % (eid[0], eid[1], eid[2])
# read LANforge to get the mac
response = self.json_get(url)
if (response is None) or ("interface" not in response):
print("query-port: %s: incomplete response:" % (url))
pprint(response)
else:
# print("response".format(response))
pprint(response)
p = response['interface']
# print("#### From LANforge: p, response['insterface']:{}".format(p))
mac = p['mac']
# print("#### From LANforge: p['mac']: {mac}".format(mac=mac))
# Parse the ap stats to find the matching mac then use that row for reporting
for row in ap_stats_2g_rows:
split_row = row.split() split_row = row.split()
if xtop_reported: # print("split_row {}".format(split_row))
try: # print("split_row[0] {} mac {}".format(split_row[0].lower(),mac.lower()))
xtop = split_row[7] if self.ap_test_mode:
channel_utilization = 100 - int(xtop) if split_row[0].lower() != mac.lower():
except: ap_row = split_row
print( mac_found_2g = True
"detected chanspec with reading chanim_stats, failed reading xtop")
# should be only one channel utilization
break
else: else:
try: try:
if split_row[0].lower() == 'chanspec': # split_row[0].lower() , mac from AP
xtop_reported = True # mac.lower() , mac from LANforge
if split_row[0].lower() == mac.lower():
ap_row = split_row
mac_found_2g = True
except: except:
print("Error reading xtop") print(" 'No stations are currently associated.'? from AP")
# ap information is passed with ap_row so all information needs to be contained in ap_row print(
ap_row.append(str(channel_utilization)) " since possibly no stations: excption on compare split_row[0].lower() ")
print("channel_utilization {channel_utilization}".format( if mac_found_2g == True:
channel_utilization=channel_utilization)) mac_found_2g = False
print("ap_row {ap_row}".format(ap_row=ap_row)) print("selected ap_row (from split_row): {}".format(ap_row))
ap_stats_col_titles = ['Station Address', 'PHY Mbps', 'Data Mbps', 'Air Use', # Find latency, jitter for connections using this port.
'Data Use', 'Retries', 'bw', 'mcs', 'Nss', 'ofdma', latency, jitter, tput = self.get_endp_stats_for_port(p["port"], endps)
'mu-mimo', 'channel_utilization']
# now report the ap_chanim_stats along side of the ap_stats_2g
xtop_reported = False
for row in ap_chanim_stats_rows_2g:
split_row = row.split()
if xtop_reported:
try:
xtop = split_row[7]
channel_utilization = 100 - int(xtop)
except:
print(
"detected chanspec with reading chanim_stats, failed reading xtop")
# should be only one channel utilization
break
else:
try:
if split_row[0].lower() == 'chanspec':
xtop_reported = True
except:
print("Error reading xtop")
# ap information is passed with ap_row so all information needs to be contained in ap_row
ap_row.append(str(channel_utilization))
print("channel_utilization {channel_utilization}".format(
channel_utilization=channel_utilization))
print("ap_row {ap_row}".format(ap_row=ap_row))
ap_stats_2g_col_titles = ['Station Address', 'PHY Mbps', 'Data Mbps', 'Air Use',
'Data Use', 'Retries', 'bw', 'mcs', 'Nss', 'ofdma',
'mu-mimo', 'channel_utilization']
self.write_port_csv(len(temp_stations_list), ul, dl, ul_pdu_str, dl_pdu_str,
atten_val, eid_name, p,
latency, jitter, tput, ap_row,
ap_stats_2g_col_titles) # ap_stats_2g_col_titles used as a length
self.write_port_csv(len(temp_stations_list), ul, dl, ul_pdu_str, dl_pdu_str,
atten_val, eid_name, p,
latency, jitter, tput, ap_row,
ap_stats_col_titles) # ap_stats_col_titles used as a length
else: else:
# Query all of our ports # Query all of our ports
@@ -1069,9 +1223,12 @@ python3 .\\test_l3_longevity.py --test_duration 4m --endp_type \"lf_tcp lf_udp m
parser.add_argument('--ap_read', help='--ap_read flag present enable reading ap', action='store_true') parser.add_argument('--ap_read', help='--ap_read flag present enable reading ap', action='store_true')
parser.add_argument('--ap_port', help='--ap_port \'/dev/ttyUSB0\'', default='/dev/ttyUSB0') parser.add_argument('--ap_port', help='--ap_port \'/dev/ttyUSB0\'', default='/dev/ttyUSB0')
parser.add_argument('--ap_baud', help='--ap_baud \'115200\'', default='115200') parser.add_argument('--ap_baud', help='--ap_baud \'115200\'', default='115200')
parser.add_argument('--ap_cmd', help='ap_cmd \'wl -i wl1 bs_data\'', default="wl -i wl1 bs_data") parser.add_argument('--ap_cmd_5g', help='ap_cmd_5g \'wl -i wl1 bs_data\'', default="wl -i wl1 bs_data")
parser.add_argument('--ap_chanim_cmd', help='ap_chanim_cmd \'wl -i wl1 chanim_stats\'', parser.add_argument('--ap_cmd_2g', help='ap_cmd_2g \'wl -i wl0 bs_data\'', default="wl -i wl0 bs_data")
parser.add_argument('--ap_chanim_cmd_5g', help='ap_chanim_cmd_5g \'wl -i wl1 chanim_stats\'',
default="wl -i wl1 chanim_stats") default="wl -i wl1 chanim_stats")
parser.add_argument('--ap_chanim_cmd_2g', help='ap_chanim_cmd_2g \'w1 -i wl0 chanim_stats\'',
default="wl -i wl0 chanim_stats")
parser.add_argument('--ap_scheduler_stats', parser.add_argument('--ap_scheduler_stats',
help='--ap_scheduler_stats flag to clear stats run test then dump ul and dl stats to file', help='--ap_scheduler_stats flag to clear stats run test then dump ul and dl stats to file',
action='store_true') action='store_true')
@@ -1148,11 +1305,17 @@ python3 .\\test_l3_longevity.py --test_duration 4m --endp_type \"lf_tcp lf_udp m
if args.ap_baud: if args.ap_baud:
ap_baud = args.ap_baud ap_baud = args.ap_baud
if args.ap_cmd: if args.ap_cmd_5g:
ap_cmd = args.ap_cmd ap_cmd_5g = args.ap_cmd_5g
if args.ap_chanim_cmd: if args.ap_cmd_2g:
ap_chanim_cmd = args.ap_chanim_cmd ap_cmd_2g = args.ap_cmd_2g
if args.ap_chanim_cmd_5g:
ap_chanim_cmd_5g = args.ap_chanim_cmd_5g
if args.ap_chanim_cmd_2g:
ap_chanim_cmd_2g = args.ap_chanim_cmd_2g
if args.test_duration: if args.test_duration:
test_duration = args.test_duration test_duration = args.test_duration
@@ -1326,8 +1489,10 @@ python3 .\\test_l3_longevity.py --test_duration 4m --endp_type \"lf_tcp lf_udp m
ap_read=ap_read, ap_read=ap_read,
ap_port=ap_port, ap_port=ap_port,
ap_baud=ap_baud, ap_baud=ap_baud,
ap_cmd=ap_cmd, ap_cmd_5g=ap_cmd_5g,
ap_chanim_cmd=ap_chanim_cmd, ap_cmd_2g=ap_cmd_2g,
ap_chanim_cmd_5g=ap_chanim_cmd_5g,
ap_chanim_cmd_2g=ap_chanim_cmd_2g,
ap_test_mode=ap_test_mode) ap_test_mode=ap_test_mode)
ip_var_test.pre_cleanup() ip_var_test.pre_cleanup()

View File

@@ -14,8 +14,10 @@
"lf_mgr_ip": "192.168.100.116", "lf_mgr_ip": "192.168.100.116",
"lf_mgr_port": "8080", "lf_mgr_port": "8080",
"dut_name": "ASUSRT-AX88U", "dut_name": "ASUSRT-AX88U",
"dut_bssid": "3c:7c:3f:55:4d:64", "dut_bssid_2g": "3c:7c:3f:55:4d:60",
"test_timeout": 200, "dut_bssid_5g": "3c:7c:3f:55:4d:64",
"dut_sw": "3.0.0.4.386_42820",
"test_timeout": 300,
"load_blank_db": false, "load_blank_db": false,
"load_factory_default_db": true, "load_factory_default_db": true,
"load_custom_db": false, "load_custom_db": false,
@@ -75,34 +77,155 @@
"suite_l3":{ "suite_l3":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"skip","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"} "test_l3_longevity":{"enabled":"TRUE","load_db":"skip","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
}, },
"suite_wc_dp":{ "auto_suite":{
"CT-US-001_create_chamberview_dut_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview_dut.py","args":"", "CT-US-001_create_chamberview_dut_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[ "args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME", " --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=3c:7c:3f:55:4d:64'", " --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=3c:7c:3f:55:4d:64'", " --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version '3.5.4' --hw_version 5.12.14+ --serial_num ct523c-3b7b --model_num DUT_NAME" " --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
] ]},
}, "CT-US-001_create_chamberview_ap":{
"CT-US-001_create_chamberview_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview.py","args":"", "enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[ "args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario", " --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ", " --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ", " --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy3,AUTO -1 NA\" ", " --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\"" " --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
] ]
}, },
"CT-US-001_wifi_capacity_ATH10K(9984)":{"enabled":"TRUE","load_db":"skip","command":"lf_wifi_capacity_test.py","args":"", "CT-US-001_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[ "args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct", " --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000", " --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan1", " --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 4 --max_stations_5 32 --max_stations_dual 4 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG", " --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME" " --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
] ]
}, },
"CT-US-001_dataplane_ATH10K(9984)":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"", "CT-US-001_lf_ap_auto_test1": {
"enabled": "FALSE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_2G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_lf_ap_auto_test_2": {
"enabled": "FALSE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{
"enabled":"TRUE",
"load_db":"skip",
"command":"ghost_profile.py",
"args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
},
"suite_wc_dp":{
"CT-US-001_create_chamberview_dut_asus11ax_5":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_wiphy0_wiphy1_sta128":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_wiphy0_wiphy1_ATH10K(9984)_sta128":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_wiphy1_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_dataplane_wiphy1_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[ "args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt", " --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan1", " --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan1",
@@ -113,24 +236,35 @@
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME" " --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
] ]
}, },
"CT-US-001_create_chamberview_dut_2":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview_dut.py","args":"", "CT-US-001_create_chamberview_dut_asus11ax_5_2":{
"enabled":"FALSE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[ "args_list":[
"--lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME", "--lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=3c:7c:3f:55:4d:64'", " --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=3c:7c:3f:55:4d:64'", " --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version '3.5.4' --hw_version 5.12.14+ --serial_num ct523c-3b7b --model_num DUT_NAME" " --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
] ]
}, },
"CT-US-001_create_chamberview_2":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview.py","args":"", "CT-US-001_create_chamberview_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[ "args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario", " --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ", " --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy3,AUTO -1 NA\" ", " --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy3,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\" " " --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\" "
] ]
}, },
"CT-US-001_wifi_capacity_AX210":{"enabled":"TRUE","load_db":"skip","command":"lf_wifi_capacity_test.py","args":"", "CT-US-001_wifi_capacity_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[ "args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct", " --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000", " --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
@@ -139,7 +273,11 @@
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME" " --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
] ]
}, },
"CT-US-001_dataplane_AX210":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"", "CT-US-001_dataplane_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[ "args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt", " --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan3", " --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan3",
@@ -150,6 +288,48 @@
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME" " --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
] ]
}, },
"CT-US-001_create_chamberview_dut_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-001_create_chamberview_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-001_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"", "GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",
"args_list":[ "args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER", " --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",

View File

@@ -0,0 +1,97 @@
{
"test_parameters":{
"test_bed": "CT-US-001",
"lf_mgr_ip": "192.168.100.116",
"lf_mgr_port": "8080",
"dut_name": "ASUSRT-AX88U",
"dut_bssid_5g": "3c:7c:3f:55:4d:64",
"dut_sw": "3.0.0.4.386_42820",
"test_timeout": 300,
"load_blank_db": false,
"load_factory_default_db": true,
"load_custom_db": false,
"custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com",
"host_ip_production": "192.168.95.6",
"email_list_test": "chuck.rekiere@candelatech.com",
"host_ip_test": "192.168.95.6",
"email_title_txt": "Lanforge QA Testing CT-US-001 Scripting",
"email_txt": "Lanforge QA Testing CT-US-001 Scripting"
},
"test_network":{
"http_test_ip": "10.40.0.10",
"ftp_test_ip": "10.40.0.10",
"test_ip": "192.168.0.104"
},
"test_generic":{
"radio_used": "wiphy1",
"ssid_used": "asus11ax-5",
"ssid_pw_used": "hello123",
"security_used": "wpa2",
"num_sta": 4,
"col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth2"
},
"radio_dict":{
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"4","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"},
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"4","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"}
},
"test_suites":{
"suite_short":{
"create_l3":{"enabled":"TRUE","command":"create_l4.py","args":"--mgr 192.168.100.116 --radio RADIO_USED --ssid SSID_USED --passwd SSID_PW_USED --security SECURITY_USED --debug"},
"test_l3_longevity":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--mgr 192.168.100.116 --test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"suite_l3":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"NONE","command":"test_l3_longevity.py","args":"--mgr 192.168.100.116 --test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"suite_l3r":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"NONE","command":"test_l3_longevity.py","args":"--mgr 192.168.100.116 --test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000 --local_lf_report_dir REPORT_PATH"}
},
"suite_daily":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"NONE","command":"test_l3_longevity.py","args":"--mgr 192.168.100.116 --test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"},
"example_security_connection0":{"enabled":"TRUE","command":"example_security_connection.py","args":"--mgr 192.168.100.116 --num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"},
"example_security_connection1":{"enabled":"TRUE","command":"example_security_connection.py","args":"--mgr 192.168.100.116 --num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"},
"example_security_connection2":{"enabled":"TRUE","command":"example_security_connection.py","args":"--mgr 192.168.100.116 --num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"},
"example_security_connection3":{"enabled":"TRUE","command":"example_security_connection.py","args":"--mgr 192.168.100.116 --num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"},
"sta_connect2":{"enabled":"FALSE","command":"sta_connect2.py","args":"--mgr 192.168.100.116 --dut_ssid asus11ax-5 --dut_passwd hello123 --dut_security wpa2"},
"sta_connect_example":{"enabled":"FALSE","command":"sta_connect_example.py","args":"--mgr 192.168.100.116 "},
"test_fileio":{"enabled":"TRUE","command":"test_fileio.py","args":"--mgr 192.168.100.116 --macvlan_parent eth2 --num_ports 3 --use_macvlans --first_mvlan_ip 192.168.92.13 --netmask 255.255.255.0 --test_duration 30s --gateway 192.168.92.1"},
"test_generic0":{"enabled":"FALSE","command":"test_generic.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type lfping --dest 10.40.0.1 --debug"},
"test_generic1":{"enabled":"FALSE","command":"test_generic.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type speedtest --speedtest_min_up 20 --speedtest_min_dl 20 --speedtest_max_ping 150 --security wpa2 --debug"},
"test_generic2":{"enabled":"FALSE","command":"test_generic.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type iperf3 --debug"},
"test_generic3":{"enabled":"FALSE","command":"test_generic.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type lfcurl --dest 10.40.0.1 --file_output /home/lanforge/Documents/lfcurl_output.txt --debug"},
"testgroup":{"enabled":"FALSE","command":"testgroup.py","args":"--mgr 192.168.100.116 --group_name group1 --add_group --list_groups --debug"},
"testgroup5":{"enabled":"FALSE","command":"testgroup.py","args":"--mgr 192.168.100.116 --num_stations 4 --ssid lanforge --passwd password --security wpa2 --radio wiphy0 --group_name group0 --add_group"},
"test_ip_connection-ipv4":{"enabled":"TRUE","command":"test_ip_connection.py","args":"--mgr 192.168.100.116 --radio wiphy1 --num_stations 4 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"test_ip_variable_time0-ipv4":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format excel --layer3_cols name,tx_bytes,rx_bytes,dropped --traffic_type lf_udp --debug"},
"test_ip_variable_time1-ipv4":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format csv --layer3_cols name,tx_bytes,rx_bytes,dropped --traffic_type lf_udp --debug"},
"test_ip_connection-ipv6":{"enabled":"FALSE","command":"test_ip_connection.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --ipv6 --debug"},
"test_ip_variable_time0-ipv6":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format excel --layer3_cols name,tx_bytes,rx_bytes,dropped --ipv6 --traffic_type lf_udp --debug"},
"test_ip_variable_time1-ipv6":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format csv --layer3_cols name,tx_bytes,rx_bytes,dropped --ipv6 --traffic_type lf_udp --debug"},
"test_l4_bytes-rd":{"enabled":"TRUE","command":"test_l4.py","args":"--mgr 192.168.100.116 --radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --test_type bytes-rd --test_duration 15s --url 'dl http://10.40.0.1 /dev/null' --debug"},
"test_l4_bytes-wr":{"enabled":"FALSE","command":"test_l4.py","args":"--mgr 192.168.100.116 --radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --test_type bytes-wr --test_duration 15s --url 'ul http://10.40.0.1' --debug"},
"test_l4_urls_s":{"enabled":"TRUE","command":"test_l4.py","args":"--mgr 192.168.100.116 --radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --test_type urls --test_duration 15s --requests_per_ten 600 --target_per_ten 600 --url 'dl http://10.40.0.1 /dev/null' --debug"},
"test_l4_ftp_bytes-rd":{"enabled":"TRUE","command":"test_l4.py","args":"--mgr 192.168.100.116 --radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type bytes-rd --test_duration 15s --url 'dl ftp://10.40.0.1 /dev/null' --debug"},
"test_l4_ftp_bytes-wr":{"enabled":"FALSE","command":"test_l4.py","args":"--mgr 192.168.100.116 --radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type bytes-wr --test_duration 15s --url 'ul ftp://10.40.0.1' --debug"},
"test_l4_ftp_urls_s":{"enabled":"TRUE","command":"test_l4.py","args":"--mgr 192.168.100.116 --radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type urls --requests_per_ten 600 --target_per_ten 600 --test_duration 15s --url 'dl ftp://10.40.0.1 /dev/null' --debug"},
"test_l3_longevity_1":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--mgr 192.168.100.116 --test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy0,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"},
"test_l3_powersave_traffic":{"enabled":"FALSE","command":"test_l3_powersave_traffic.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"test_status_msg":{"enabled":"TRUE","command":"test_status_msg.py","args":"--mgr 192.168.100.116 --action run_test"},
"test_wanlink":{"enabled":"TRUE","command":"test_wanlink.py","args":"--mgr 192.168.100.116 --debug"},
"create_bridge":{"enabled":"TRUE","command":"create_bridge.py","args":"--mgr 192.168.100.116 --radio wiphy1 --upstream_port eth2 --target_device sta0000 --debug"},
"create_l3":{"enabled":"TRUE","command":"create_l3.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_l4":{"enabled":"TRUE","command":"create_l4.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_macvlan":{"enabled":"TRUE","command":"create_macvlan.py","args":"--mgr 192.168.100.116 --radio wiphy1 --macvlan_parent eth2 --debug"},
"create_station":{"enabled":"TRUE","command":"create_station.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_vap":{"enabled":"TRUE","command":"create_vap.py","args":"--mgr 192.168.100.116 --radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_qvlan":{"enabled":"TRUE","command":"create_qvlan.py","args":"--mgr 192.168.100.116 --radio wiphy1 --qvlan_parent eth2"},
"wlan_capacity_calculator1":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11abg -t Voice -p 48 -m 106 -e WEP -q Yes -b 1 2 5.5 11 -pre Long -s N/A -co G.711 -r Yes -c Yes"},
"wlan_capacity_calculator2":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11n -t Voice -d 17 -ch 40 -gu 800 -high 9 -e WEP -q Yes -ip 5 -mc 42 -b 6 9 12 24 -m 1538 -co G.729 -pl Greenfield -cw 15 -r Yes -c Yes"},
"wlan_capacity_calculator3":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11ac -t Voice -d 9 -spa 3 -ch 20 -gu 800 -high 1 -e TKIP -q Yes -ip 3 -mc 0 -b 6 12 24 54 -m 1518 -co Greenfield -cw 15 -rc Yes"}
}
}
}

View File

@@ -14,7 +14,7 @@
"lf_mgr_ip": "192.168.100.200", "lf_mgr_ip": "192.168.100.200",
"lf_mgr_port": "8080", "lf_mgr_port": "8080",
"dut_name": "Ruckus-R750", "dut_name": "Ruckus-R750",
"dut_bssid": "4c:b1:cd:18:e8:ec", "dut_bssid_5g": "4c:b1:cd:18:e8:ec",
"test_timeout": 1200, "test_timeout": 1200,
"load_blank_db": false, "load_blank_db": false,
"load_factory_default_db": true, "load_factory_default_db": true,
@@ -22,7 +22,7 @@
"custom_db": "DFLT_ETH1_GEN", "custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com", "email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com",
"host_ip_production": "192.168.100.201", "host_ip_production": "192.168.100.201",
"email_list_test": "chuck.rekiere@candelatech.com,logan.lipke@candelatech.com,matthew.stidham@candelatech.com", "email_list_test": "chuck.rekiere@candelatech.com",
"host_ip_test": "192.168.100.201", "host_ip_test": "192.168.100.201",
"email_title_txt": "Lanforge QA Testing CT-US-002", "email_title_txt": "Lanforge QA Testing CT-US-002",
"email_txt": "Lanforge QA Testing CT-US-002" "email_txt": "Lanforge QA Testing CT-US-002"
@@ -39,7 +39,7 @@
"security_used": "wpa2", "security_used": "wpa2",
"num_sta": 4, "num_sta": 4,
"col_names": "name,tx_byptes,rx_bytes,dropped", "col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth2" "upstream_port": "1.1.eth2"
}, },
"test_database":{ "test_database":{
"database_config": "True", "database_config": "True",
@@ -73,17 +73,25 @@
}, },
"test_suites":{ "test_suites":{
"suite_two":{ "suite_two":{
"test_l3_longevity":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==ct523c-vap,ssid_pw==ct523c-vap,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"} "test_l3_longevity":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port UPSTREAM_PORT --radio 'radio==wiphy1,stations==4,ssid==ct523c-vap,ssid_pw==ct523c-vap,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
}, },
"auto_suite":{ "auto_suite":{
"CT-US-002_create_chamberview_dut_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview_dut.py","args":"", "CT-US-002_create_chamberview_dut_1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[ "args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME", " --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'", " --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'", " --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version '3.5.4' --hw_version 5.12.14+ --serial_num ct523c-3b7b --model_num DUT_NAME" " --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]}, ]},
"CT-US-002_create_chamberview_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview.py","args":"", "CT-US-002_create_chamberview_1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[ "args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario", " --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ", " --create_scenario ucentral-scenario ",
@@ -95,21 +103,26 @@
"CT-US-002_lf_ap_auto_test": { "CT-US-002_lf_ap_auto_test": {
"enabled": "TRUE", "enabled": "TRUE",
"command": "lf_ap_auto_test.py", "command": "lf_ap_auto_test.py",
"timeout":"4800",
"args": "", "args": "",
"args_list":[ "args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge", " --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream 1.1.eth1", " --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID (1)'", " --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1", " --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 0", " --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 1 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,", " --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1", " --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH", " --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG", " --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME" " --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
] ]
}, },
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"", "GHOST":{
"enabled":"TRUE",
"load_db":"skip",
"command":"ghost_profile.py",
"args":"",
"args_list":[ "args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER", " --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN", " --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
@@ -120,35 +133,73 @@
} }
}, },
"suite_wc_dp":{ "suite_wc_dp":{
"CT-US-002_create_chamberview_dut_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview_dut.py","args":"", "CT-US-002_create_chamberview_dut_1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[ "args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME", " --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'", " --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'", " --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version '3.5.4' --hw_version 5.12.14+ --serial_num ct523c-3b7b --model_num DUT_NAME" " --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]}, ]},
"CT-US-002_create_chamberview_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview.py","args":"", "CT-US-002_create_chamberview_ATH10k_sta64":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[ "args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario", " --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ", " --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ", " --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" " " --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
] ]
}, },
"CT-US-002_wifi_capacity_ATH10k(9984)":{"enabled":"TRUE","load_db":"skip","command":"lf_wifi_capacity_test.py","args":"", "CT-US-002_wifi_capacity_ATH10k(9984)":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[ "args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct", " --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000", " --upstream UPSTREAM_PORT --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan1", " --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG", " --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME" " --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
] ]
}, },
"CT-US-002_dataplane_ATH10k(9984) CT-US-002":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"", "CT-US-002_create_chamberview_dut_ATH10K_wan1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-002_create_chamberview_ATH10k_wan1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-002_dataplane_ATH10k(9984) CT-US-002":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[ "args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt", " --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut DUT_NAME --duration 30s --station 1.1.wlan1", " --config_name test_con --upstream UPSTREAM_PORT --dut DUT_NAME --duration 30s --station 1.1.wlan1",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ", " --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' ", " --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' ",
" --raw_line 'spatial_streams: 4' --pull_report --local_lf_report_dir REPORT_PATH ", " --raw_line 'spatial_streams: 4' --pull_report --local_lf_report_dir REPORT_PATH ",
@@ -156,35 +207,73 @@
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME" " --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
] ]
}, },
"CT-US-002_create_chamberview_dut_2":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview_dut.py","args":"", "CT-US-002_create_chamberview_dut_2":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[ "args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME", " --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'", " --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'", " --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version '3.5.4' --hw_version 5.12.14+ --serial_num ct523c-3b7b --model_num DUT_NAME" " --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]}, ]},
"CT-US-002_create_chamberview_2":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview.py","args":"", "CT-US-002_create_chamberview_AX200_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[ "args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario", " --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ", " --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ", " --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" " " --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
] ]
}, },
"CT-US-002_wifi_capacity_AX200 CT-US-002":{"enabled":"TRUE","load_db":"skip","command":"lf_wifi_capacity_test.py","args":"", "CT-US-002_wifi_capacity_AX200 CT-US-002":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[ "args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct", " --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000", " --upstream UPSTREAM_PORT --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan4 ", " --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan4 ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG", " --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME" " --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
] ]
}, },
"CT-US-002_dataplane_AX200":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"", "CT-US-002_create_chamberview_dut_AX200_wan1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-002_create_chamberview_AX200_wan1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-002_dataplane_AX200":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[ "args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt", " --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut DUT_NAME --duration 30s --station 1.1.wlan4", " --config_name test_con --upstream UPSTREAM_PORT --dut DUT_NAME --duration 30s --station 1.1.wlan4",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ", " --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'", " --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 4' --pull_report --local_lf_report_dir REPORT_PATH", " --raw_line 'spatial_streams: 4' --pull_report --local_lf_report_dir REPORT_PATH",
@@ -192,20 +281,46 @@
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME" " --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
] ]
}, },
"CT-US-002_create_chamberview_dut_auto":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-002_create_chamberview_auto":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-002_lf_ap_auto_test": { "CT-US-002_lf_ap_auto_test": {
"enabled": "TRUE", "enabled": "FALSE",
"command": "lf_ap_auto_test.py", "command": "lf_ap_auto_test.py",
"timeout": "1200",
"args": "", "args": "",
"args_list":[ "args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge", " --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
"--instance_name ap-auto-instance --config_name test_con --upstream 1.1.eth1", " --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
"--dut5_0 'DUT_NAME lanforge DUT_BSSID (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID (1)'", " --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
"--max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1", " --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
"--radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1", " --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
"--set 'Multi Band Performance' 1 --set 'Skip 2.4 Ghz Tests' 1 --pull_report --local_lf_report_dir REPORT_PATH", " --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
"--test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG", " --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
"--influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME" " --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
] ]
}, },
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"", "GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",

View File

@@ -0,0 +1,327 @@
{
"ct_us_004":{
"Notes":[
"The json is used to orchastrate the tests to be run on testbed ct_us_004",
"This json file is used as an input to the ./lf_check.py file",
"The variables that are all capitalized below are replaced with configuration",
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
"The replacement is loosely coupled so the upper and lower case convention is used",
"to identify replaced strings in the lf_check.py code."
]
},
"test_parameters":{
"test_bed": "CT-US-004",
"lf_mgr_ip": "192.168.100.194",
"lf_mgr_port": "8080",
"dut_name": "NetgearAX12",
"dut_bssid_5g": "94:a6:7e:54:d4:33",
"test_timeout": 300,
"load_blank_db": false,
"load_factory_default_db": true,
"load_custom_db": false,
"custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com",
"host_ip_production": "192.168.100.201",
"email_list_test": "chuck.rekiere@candelatech.com",
"host_ip_test": "192.168.100.201",
"email_title_txt": "Lanforge QA Testing CT-US-004",
"email_txt": "Lanforge QA Testing CT-US-004 "
},
"test_network":{
"http_test_ip": "10.40.0.10",
"ftp_test_ip": "10.40.0.10",
"test_ip": "192.168.0.104"
},
"test_generic":{
"radio_used": "wiphy1",
"ssid_used": "NETGEAR59-5G",
"ssid_pw_used": "crispynest798",
"security_used": "wpa2",
"num_sta": 1,
"col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth2"
},
"test_database":{
"database_config": "True",
"database_host": "192.168.100.201",
"database_port": "8086",
"database_token": "-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ==",
"database_org": "Candela",
"database_bucket": "lanforge_qa_testing",
"dut_set_name": "DUT_NAME NetgearAX12",
"database_tag": "testbed CT-US-004",
"test_rig": "CT-US-004"
},
"test_dashboard":{
"dashboard_config": "True",
"dashboard_host": "192.168.100.201",
"dashboard_token": "eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ=="
},
"test_blog":{
"blog_config": "True",
"blog_host": "192.168.100.153",
"blog_token": "60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742",
"blog_authors": "Matthew",
"blog_customer": "candela",
"blog_user_push": "lanforge",
"blog_password_push": "lanforge",
"blog_flag": "--kpi_to_ghost"
},
"radio_dict":{
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"1","SSID":"NETGEAR59-5G","PASSWD":"crispynest798","SECURITY":"wpa2"},
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"1","SSID":"NETGEAR59-5G","PASSWD":"crispynest798","SECURITY":"wpa2"}
},
"test_suites":{
"suite_l3":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"skip","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==NETGEAR59-5G,ssid_pw==crispynest798,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"auto_suite":{
"CT-US-004_create_chamberview_dut_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-004_create_chamberview_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-004_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_lf_ap_auto_test_2": {
"enabled": "FALSE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{
"enabled":"TRUE",
"load_db":"skip",
"command":"ghost_profile.py",
"args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
},
"suite_wc_dp":{
"CT-US-004_create_chamberview_dut_NetgearAX12":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-004_create_chamberview_wiphy1_sta64":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 32 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-004_wifi_capacity_wiphy1_ATH10K(9984)_sta64":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_create_chamberview_wiphy1_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-004_dataplane_wiphy1_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan1",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_create_chamberview_dut_NetgearAX12_5_2":{
"enabled":"FALSE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
"--lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-004_create_chamberview_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy3,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\" "
]
},
"CT-US-004_wifi_capacity_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan3",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_dataplane_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan3",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_create_chamberview_dut_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-004_create_chamberview_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 32 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-004_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy1 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
}
}
}

View File

@@ -31,10 +31,33 @@ Using .json:
1. copy lf_check.json to <file name>.json this will avoide .json being overwritten on git pull 1. copy lf_check.json to <file name>.json this will avoide .json being overwritten on git pull
2. update lf_check.json to enable (TRUE) tests to be run in the test suite, the default TEST_DICTIONARY 2. update lf_check.json to enable (TRUE) tests to be run in the test suite, the default TEST_DICTIONARY
NOTES: getting radio information:
1. (Using Curl) curl -H 'Accept: application/json' http://localhost:8080/radiostatus/all | json_pp | less
2. (using Python) response = self.json_get("/radiostatus/all")
GENERIC NOTES: GENERIC NOTES:
Starting LANforge:
On local or remote system: /home/lanforge/LANforgeGUI/lfclient.bash -cli-socket 3990 -s LF_MGR
On local system the -s LF_MGR will be local_host if not provided
On LANforge ~lanforge/.config/autostart/LANforge-auto.desktop is used to restart lanforge on boot.
http://www.candelatech.com/cookbook.php?vol=misc&book=Automatically+starting+LANforge+GUI+on+login
1. add server (telnet localhost 4001) build info, GUI build sha, and Kernel version to the output. 1. add server (telnet localhost 4001) build info, GUI build sha, and Kernel version to the output.
A. for build information on LANforgeGUI : /home/lanforge ./btserver --version A. for build information on LANforgeGUI : /home/lanforge ./btserver --version
B. for the kernel version uname -r (just verion ), uname -a build date B. for the kernel version uname -r (just verion ), uname -a build date
C. for getting the radio firmware: ethtool -i wlan0
# may need to build in a testbed reboot at the beginning of a day's testing...
# seeing some dhcp exhaustion and high latency values for testbeds that have been running
# for a while that appear to clear up once the entire testbed is power cycled
# issue a shutdown command on the lanforge(s)
# ssh root@lanforge reboot (need to verify) or do a shutdown
# send curl command to remote power switch to reboot testbed
# curl -s http://admin:lanforge@192.168.100.237/outlet?1=CCL -o /dev/null 2>&1
#
''' '''
import datetime import datetime
@@ -59,6 +82,7 @@ import shutil
from os import path from os import path
import shlex import shlex
import paramiko import paramiko
import pandas as pd
# lf_report is from the parent of the current file # lf_report is from the parent of the current file
dir_path = os.path.dirname(os.path.realpath(__file__)) dir_path = os.path.dirname(os.path.realpath(__file__))
@@ -124,6 +148,7 @@ class lf_check():
self.csv_results_column_headers = "" self.csv_results_column_headers = ""
self.logger = logging.getLogger(__name__) self.logger = logging.getLogger(__name__)
self.test_timeout = 120 self.test_timeout = 120
self.test_timeout_default = 120
self.use_blank_db = "FALSE" self.use_blank_db = "FALSE"
self.use_factory_default_db = "FALSE" self.use_factory_default_db = "FALSE"
self.use_custom_db = "FALSE" self.use_custom_db = "FALSE"
@@ -133,13 +158,22 @@ class lf_check():
self.host_ip_test = None self.host_ip_test = None
self.email_title_txt = "" self.email_title_txt = ""
self.email_txt = "" self.email_txt = ""
# lanforge configuration
self.lf_mgr_ip = "192.168.0.102" self.lf_mgr_ip = "192.168.0.102"
self.lf_mgr_port = "" self.lf_mgr_port = ""
self.lf_mgr_user = "lanforge" self.lf_mgr_user = "lanforge"
self.lf_mgr_pass = "lanforge" self.lf_mgr_pass = "lanforge"
self.dut_name = "" # "ASUSRT-AX88U" note this is not dut_set_name
self.dut_bssid = "" # "3c:7c:3f:55:4d:64" - this is the mac for the radio this may be seen with a scan
# dut configuration
self.dut_name = "DUT_NAME_NOT_SET" # "ASUSRT-AX88U" note this is not dut_set_name
self.dut_hw = "DUT_HW_NOT_SET"
self.dut_sw = "DUT_SW_NOT_SET"
self.dut_model = "DUT_MODEL_NOT_SET"
self.dut_serial = "DUT_SERIAL_NOT_SET"
self.dut_bssid_2g = "BSSID_2G_NOT_SET" # "3c:7c:3f:55:4d:64" - this is the mac for the 2.4G radio this may be seen with a scan
self.dut_bssid_5g = "BSSID_5G_NOT_SET" # "3c:7c:3f:55:4d:64" - this is the mac for the 5G radio this may be seen with a scan
self.dut_bssid_6g = "BSSID_6G_NOT_SET" # "3c:7c:3f:55:4d:64" - this is the mac for the 6G radio this may be seen with a scan
# NOTE: My influx token is unlucky and starts with a '-', but using the syntax below # with '=' right after the argument keyword works as hoped. # NOTE: My influx token is unlucky and starts with a '-', but using the syntax below # with '=' right after the argument keyword works as hoped.
# --influx_token= # --influx_token=
@@ -184,6 +218,20 @@ class lf_check():
scripts_git_sha = commit_hash.decode('utf-8', 'ignore') scripts_git_sha = commit_hash.decode('utf-8', 'ignore')
return scripts_git_sha return scripts_git_sha
def get_lanforge_node_version(self):
ssh = paramiko.SSHClient() # creating shh client object we use this object to connect to router
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # automatically adds the missing host key
# ssh.connect(self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass, banner_timeout=600)
ssh.connect(hostname=self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass,
banner_timeout=600)
stdin, stdout, stderr = ssh.exec_command('uname -n')
lanforge_node_version = stdout.readlines()
# print('\n'.join(output))
lanforge_node_version = [line.replace('\n', '') for line in lanforge_node_version]
ssh.close()
time.sleep(1)
return lanforge_node_version
def get_lanforge_kernel_version(self): def get_lanforge_kernel_version(self):
ssh = paramiko.SSHClient() # creating shh client object we use this object to connect to router ssh = paramiko.SSHClient() # creating shh client object we use this object to connect to router
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # automatically adds the missing host key ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # automatically adds the missing host key
@@ -191,11 +239,12 @@ class lf_check():
ssh.connect(hostname=self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass, ssh.connect(hostname=self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass,
banner_timeout=600) banner_timeout=600)
stdin, stdout, stderr = ssh.exec_command('uname -r') stdin, stdout, stderr = ssh.exec_command('uname -r')
output = stdout.readlines() lanforge_kernel_version = stdout.readlines()
# print('\n'.join(output)) # print('\n'.join(output))
lanforge_kernel_version = [line.replace('\n', '') for line in lanforge_kernel_version]
ssh.close() ssh.close()
time.sleep(1) time.sleep(1)
return output return lanforge_kernel_version
def get_lanforge_gui_version(self): def get_lanforge_gui_version(self):
output = "" output = ""
@@ -204,11 +253,12 @@ class lf_check():
ssh.connect(hostname=self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass, ssh.connect(hostname=self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass,
banner_timeout=600) banner_timeout=600)
stdin, stdout, stderr = ssh.exec_command('./btserver --version | grep Version') stdin, stdout, stderr = ssh.exec_command('./btserver --version | grep Version')
output = stdout.readlines() lanforge_gui_version = stdout.readlines()
# print('\n'.join(output)) # print('\n'.join(output))
lanforge_gui_version = [line.replace('\n', '') for line in lanforge_gui_version]
ssh.close() ssh.close()
time.sleep(1) time.sleep(1)
return output return lanforge_gui_version
# NOT complete : will send the email results # NOT complete : will send the email results
def send_results_email(self, report_file=None): def send_results_email(self, report_file=None):
@@ -393,6 +443,7 @@ blog: http://{blog}:2368
def read_test_parameters(self): def read_test_parameters(self):
if "test_timeout" in self.json_data["test_parameters"]: if "test_timeout" in self.json_data["test_parameters"]:
self.test_timeout = self.json_data["test_parameters"]["test_timeout"] self.test_timeout = self.json_data["test_parameters"]["test_timeout"]
self.test_timeout_default = self.test_timeout
else: else:
self.logger.info("test_timeout not in test_parameters json") self.logger.info("test_timeout not in test_parameters json")
exit(1) exit(1)
@@ -457,10 +508,34 @@ blog: http://{blog}:2368
self.dut_name = self.json_data["test_parameters"]["dut_name"] self.dut_name = self.json_data["test_parameters"]["dut_name"]
else: else:
self.logger.info("dut_name not in test_parameters json") self.logger.info("dut_name not in test_parameters json")
if "dut_bssid" in self.json_data["test_parameters"]: if "dut_hw" in self.json_data["test_parameters"]:
self.dut_bssid = self.json_data["test_parameters"]["dut_bssid"] self.dut_hw = self.json_data["test_parameters"]["dut_hw"]
else: else:
self.logger.info("dut_bssid not in test_parameters json") self.logger.info("dut_hw not in test_parameters json")
if "dut_sw" in self.json_data["test_parameters"]:
self.dut_sw = self.json_data["test_parameters"]["dut_sw"]
else:
self.logger.info("dut_sw not in test_parameters json")
if "dut_model" in self.json_data["test_parameters"]:
self.dut_model = self.json_data["test_parameters"]["dut_model"]
else:
self.logger.info("dut_model not in test_parameters json")
if "dut_serial" in self.json_data["test_parameters"]:
self.dut_serial = self.json_data["test_parameters"]["dut_serial"]
else:
self.logger.info("dut_serial not in test_parameters json")
if "dut_bssid_2g" in self.json_data["test_parameters"]:
self.dut_bssid_2g = self.json_data["test_parameters"]["dut_bssid_2g"]
else:
self.logger.info("dut_bssid_2G not in test_parameters json")
if "dut_bssid_5g" in self.json_data["test_parameters"]:
self.dut_bssid_5g = self.json_data["test_parameters"]["dut_bssid_5g"]
else:
self.logger.info("dut_bssid_5g not in test_parameters json")
if "dut_bssid_6g" in self.json_data["test_parameters"]:
self.dut_bssid_6g = self.json_data["test_parameters"]["dut_bssid_6g"]
else:
self.logger.info("dut_bssid_6g not in test_parameters json")
def read_test_network(self): def read_test_network(self):
if "http_test_ip" in self.json_data["test_network"]: if "http_test_ip" in self.json_data["test_network"]:
@@ -737,7 +812,7 @@ blog: http://{blog}:2368
self.logger.info("test: {} skipped".format(test)) self.logger.info("test: {} skipped".format(test))
# load the default database # load the default database
elif self.test_dict[test]['enabled'] == "TRUE": elif self.test_dict[test]['enabled'] == "TRUE":
# if args key has a value of an empty scring then need to manipulate the args_list to args # if args key has a value of an empty string then need to manipulate the args_list to args
# list does not have replace only stings do to args_list will be joined and converted to a string and placed # list does not have replace only stings do to args_list will be joined and converted to a string and placed
# in args. Then the replace below will work. # in args. Then the replace below will work.
if self.test_dict[test]['args'] == "": if self.test_dict[test]['args'] == "":
@@ -770,8 +845,26 @@ blog: http://{blog}:2368
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('LF_MGR_IP', self.lf_mgr_ip) self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('LF_MGR_IP', self.lf_mgr_ip)
if 'LF_MGR_PORT' in self.test_dict[test]['args']: if 'LF_MGR_PORT' in self.test_dict[test]['args']:
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('LF_MGR_PORT', self.lf_mgr_port) self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('LF_MGR_PORT', self.lf_mgr_port)
if 'DUT_NAME' in self.test_dict[test]['args']: if 'DUT_NAME' in self.test_dict[test]['args']:
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('DUT_NAME', self.dut_name) self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('DUT_NAME', self.dut_name)
if 'DUT_HW' in self.test_dict[test]['args']:
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('DUT_HW', self.dut_hw)
if 'DUT_SW' in self.test_dict[test]['args']:
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('DUT_SW', self.dut_sw)
if 'DUT_MODEL' in self.test_dict[test]['args']:
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('DUT_MODEL', self.dut_model)
if 'DUT_SERIAL' in self.test_dict[test]['args']:
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('DUT_SERIAL', self.dut_serial)
if 'DUT_BSSID_2G' in self.test_dict[test]['args']:
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('DUT_BSSID_2G',
self.dut_bssid_2g)
if 'DUT_BSSID_5G' in self.test_dict[test]['args']:
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('DUT_BSSID_5G',
self.dut_bssid_5g)
if 'DUT_BSSID_6G' in self.test_dict[test]['args']:
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('DUT_BSSID_6G',
self.dut_bssid_6g)
if 'RADIO_USED' in self.test_dict[test]['args']: if 'RADIO_USED' in self.test_dict[test]['args']:
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('RADIO_USED', self.radio_lf) self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('RADIO_USED', self.radio_lf)
@@ -852,6 +945,12 @@ blog: http://{blog}:2368
if 'BLOG_FLAG' in self.test_dict[test]['args']: if 'BLOG_FLAG' in self.test_dict[test]['args']:
self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('BLOG_FLAG', self.blog_flag) self.test_dict[test]['args'] = self.test_dict[test]['args'].replace('BLOG_FLAG', self.blog_flag)
if 'timeout' in self.test_dict[test]:
self.logger.info("timeout : {}".format(self.test_dict[test]['timeout']))
self.test_timeout = int(self.test_dict[test]['timeout'])
else:
self.test_timeout = self.test_timeout_default
if 'load_db' in self.test_dict[test]: if 'load_db' in self.test_dict[test]:
self.logger.info("load_db : {}".format(self.test_dict[test]['load_db'])) self.logger.info("load_db : {}".format(self.test_dict[test]['load_db']))
if str(self.test_dict[test]['load_db']).lower() != "none" and str( if str(self.test_dict[test]['load_db']).lower() != "none" and str(
@@ -1029,6 +1128,13 @@ Example :
else: else:
print("EXITING: NOTFOUND TEST CONFIG : {} ".format(config_ini)) print("EXITING: NOTFOUND TEST CONFIG : {} ".format(config_ini))
exit(1) exit(1)
# Test-rig information information
lanforge_node_version = 'NO_LF_NODE_VER'
scripts_git_sha = 'NO_GIT_SHA'
lanforge_kernel_version = 'NO_KERNEL_VER'
lanforge_gui_version = 'NO_LF_GUI_VER'
# select test suite # select test suite
test_suite = args.suite test_suite = args.suite
@@ -1067,25 +1173,7 @@ Example :
exit_code = process.wait() exit_code = process.wait()
git_sha = commit_hash.decode('utf-8', 'ignore') git_sha = commit_hash.decode('utf-8', 'ignore')
try: # set up logging
scripts_git_sha = check.get_scripts_git_sha()
print("git_sha {sha}".format(sha=scripts_git_sha))
except:
print("git_sha read exception ")
try:
lanforge_kernel_version = check.get_lanforge_kernel_version()
print("lanforge_kernel_version {kernel_ver}".format(kernel_ver=lanforge_kernel_version))
except:
print("lanforge_kernel_version exception")
try:
lanforge_gui_version = check.get_lanforge_gui_version()
print("lanforge_gui_version {gui_ver}".format(gui_ver=lanforge_gui_version))
except:
print("lanforge_gui_version exception")
# set up logging
logfile = args.logfile[:-4] logfile = args.logfile[:-4]
print("logfile: {}".format(logfile)) print("logfile: {}".format(logfile))
logfile = "{}-{}.log".format(logfile, current_time) logfile = "{}-{}.log".format(logfile, current_time)
@@ -1107,20 +1195,58 @@ Example :
check.read_config() check.read_config()
check.run_script_test() check.run_script_test()
# read lanforge # get sha and lanforge informaiton for results
# Need to do this after reading the configuration
try:
scripts_git_sha = check.get_scripts_git_sha()
print("git_sha {sha}".format(sha=scripts_git_sha))
except:
print("git_sha read exception ")
try:
lanforge_node_version = check.get_lanforge_node_version()
print("lanforge_node_version {node_ver}".format(node_node=lanforge_node_version))
except:
print("lanforge_node_version exception")
try:
lanforge_kernel_version = check.get_lanforge_kernel_version()
print("lanforge_kernel_version {kernel_ver}".format(kernel_ver=lanforge_kernel_version))
except:
print("lanforge_kernel_version exception")
try:
lanforge_gui_version = check.get_lanforge_gui_version()
print("lanforge_gui_version {gui_ver}".format(gui_ver=lanforge_gui_version))
except:
print("lanforge_gui_version exception")
# LANforge and scripts config
lf_test_setup = pd.DataFrame({
'LANforge': lanforge_node_version,
'kernel version': lanforge_kernel_version,
'GUI version': lanforge_gui_version,
'scripts git sha': scripts_git_sha
})
# generate output reports # generate output reports
report.set_title("LF Check: lf_check.py") report.set_title("LF Check: lf_check.py")
report.build_banner() report.build_banner_left()
report.start_content_div() report.start_content_div2()
report.set_obj_html("Objective", "Run QA Tests")
report.build_objective()
report.set_text("LANforge")
report.build_text()
report.set_table_dataframe(lf_test_setup)
report.build_table()
report.set_table_title("LF Check Test Results") report.set_table_title("LF Check Test Results")
report.build_table_title() report.build_table_title()
report.set_text("lanforge-scripts git sha: {}".format(git_sha)) # report.set_text("lanforge-scripts git sha: {}".format(git_sha))
report.build_text() # report.build_text()
html_results = check.get_html_results() html_results = check.get_html_results()
report.set_custom_html(html_results) report.set_custom_html(html_results)
report.build_custom() report.build_custom()
report.build_footer_no_png() report.build_footer()
html_report = report.write_html_with_timestamp() html_report = report.write_html_with_timestamp()
print("html report: {}".format(html_report)) print("html report: {}".format(html_report))
try: try:
@@ -1140,6 +1266,8 @@ Example :
banner_dest_png = parent_report_dir + "/banner.png" banner_dest_png = parent_report_dir + "/banner.png"
CandelaLogo_src_png = report_path + "/CandelaLogo2-90dpi-200x90-trans.png" CandelaLogo_src_png = report_path + "/CandelaLogo2-90dpi-200x90-trans.png"
CandelaLogo_dest_png = parent_report_dir + "/CandelaLogo2-90dpi-200x90-trans.png" CandelaLogo_dest_png = parent_report_dir + "/CandelaLogo2-90dpi-200x90-trans.png"
CandelaLogo_small_src_png = report_path + "/candela_swirl_small-72h.png"
CandelaLogo_small_dest_png = parent_report_dir + "/candela_swirl_small-72h.png"
report_src_css = report_path + "/report.css" report_src_css = report_path + "/report.css"
report_dest_css = parent_report_dir + "/report.css" report_dest_css = parent_report_dir + "/report.css"
custom_src_css = report_path + "/custom.css" custom_src_css = report_path + "/custom.css"
@@ -1163,12 +1291,13 @@ Example :
print("check permissions on {lf_check_latest_html}".format(lf_check_latest_html=lf_check_latest_html)) print("check permissions on {lf_check_latest_html}".format(lf_check_latest_html=lf_check_latest_html))
shutil.copyfile(html_report, lf_check_html_report) shutil.copyfile(html_report, lf_check_html_report)
# copy banner and logo # copy banner and logo up one directory,
shutil.copyfile(banner_src_png, banner_dest_png) shutil.copyfile(banner_src_png, banner_dest_png)
shutil.copyfile(CandelaLogo_src_png, CandelaLogo_dest_png) shutil.copyfile(CandelaLogo_src_png, CandelaLogo_dest_png)
shutil.copyfile(report_src_css, report_dest_css) shutil.copyfile(report_src_css, report_dest_css)
shutil.copyfile(custom_src_css, custom_dest_css) shutil.copyfile(custom_src_css, custom_dest_css)
shutil.copyfile(font_src_woff, font_dest_woff) shutil.copyfile(font_src_woff, font_dest_woff)
shutil.copyfile(CandelaLogo_small_src_png, CandelaLogo_small_dest_png)
# print out locations of results # print out locations of results
print("lf_check_latest.html: " + lf_check_latest_html) print("lf_check_latest.html: " + lf_check_latest_html)

View File

@@ -1,17 +1,32 @@
{ {
"script_qa":{
"Notes":[
"The json is used to orchastrate the tests to be run on testbed ct_us_001",
"This json file is used as an input to the ./lf_check.py file",
"The variables that are all capitalized below are replaced with configuration",
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
"The replacement is loosely coupled so the upper and lower case convention is used",
"to identify replaced strings in the lf_check.py code."
]
},
"test_parameters":{ "test_parameters":{
"test_timeout": 200, "test_bed": "CT-US-001_QA",
"lf_mgr_ip": "192.168.100.116",
"lf_mgr_port": "8080",
"dut_name": "ASUSRT-AX88U",
"dut_bssid_5G": "3c:7c:3f:55:4d:64",
"dut_sw": "3.0.0.4.386_42820",
"test_timeout": 300,
"load_blank_db": false, "load_blank_db": false,
"load_factory_default_db": true, "load_factory_default_db": true,
"load_custom_db": false, "load_custom_db": false,
"custom_db": "DFLT_ETH1_GEN", "custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com", "email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com",
"host_ip_production": "192.168.95.6", "host_ip_production": "192.168.100.201",
"email_list_test": "chuck.rekiere@candelatech.com,logan.lipke@candelatech.com", "email_list_test": "chuck.rekiere@candelatech.com,logan.lipke@candelatech.com",
"host_ip_test": "192.168.95.6", "host_ip_test": "192.168.100.201",
"lf_mgr": "192.168.100.116", "email_title_txt": "Lanforge Script QA Testing CT-US-001",
"email_title_txt": "Lanforge QA Testing CT-US-001 - Scripts", "email_txt": "Lanforge Script QA Testing CT-US-001 "
"email_txt": "Lanforge QA Testing CT-US-001 - Scripts"
}, },
"test_network":{ "test_network":{
"http_test_ip": "10.40.0.10", "http_test_ip": "10.40.0.10",
@@ -25,7 +40,7 @@
"security_used": "wpa2", "security_used": "wpa2",
"num_sta": 4, "num_sta": 4,
"col_names": "name,tx_byptes,rx_bytes,dropped", "col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth1" "upstream_port": "eth2"
}, },
"radio_dict":{ "radio_dict":{
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"4","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"}, "RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"4","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"},
@@ -40,22 +55,53 @@
"test_l3_longevity":{"enabled":"TRUE","load_db":"NONE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth1 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"} "test_l3_longevity":{"enabled":"TRUE","load_db":"NONE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth1 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
}, },
"suite_daily":{ "suite_daily":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"NONE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth1 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}, "test_l3_longevity":{
"example_security_connection0":{"enabled":"TRUE","command":"example_security_connection.py","args":"--num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"}, "enabled":"TRUE",
"example_security_connection1":{"enabled":"TRUE","command":"example_security_connection.py","args":"--num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"}, "load_db":"NONE",
"example_security_connection2":{"enabled":"TRUE","command":"example_security_connection.py","args":"--num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"}, "command":"test_l3_longevity.py",
"example_security_connection3":{"enabled":"TRUE","command":"example_security_connection.py","args":"--num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"}, "args":"",
"sta_connect2":{"enabled":"TRUE","command":"sta_connect2.py","args":"--dut_ssid asus11ax-5 --dut_passwd hello123 --dut_security wpa2"}, "args_list":[
"sta_connect_example":{"enabled":"FALSE","command":"sta_connect_example.py","args":""}, " --test_duration 15s --polling_interval 5s --upstream_port UPSTREAM_PORT",
"test_fileio":{"enabled":"TRUE","command":"test_fileio.py","args":"--macvlan_parent eth2 --num_ports 3 --use_macvlans --first_mvlan_ip 192.168.92.13 --netmask 255.255.255.0 --test_duration 30s --gateway 192.168.92.1"}, " --radio 'radio==RADIO_USED,stations==4,ssid==SSID_USED,ssid_pw==SSID_PS_USED,security==SECURITY_USED'",
"test_generic0":{"enabled":"FALSE","command":"test_generic.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type lfping --dest 10.40.0.1 --debug"}, " --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"
"test_generic1":{"enabled":"FALSE","command":"test_generic.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type speedtest --speedtest_min_up 20 --speedtest_min_dl 20 --speedtest_max_ping 150 --security wpa2 --debug"}, ]
"test_generic2":{"enabled":"FALSE","command":"test_generic.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type iperf3 --debug"}, },
"test_generic3":{"enabled":"FALSE","command":"test_generic.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type lfcurl --dest 10.40.0.1 --file_output /home/lanforge/Documents/lfcurl_output.txt --debug"}, "example_security_connection0":{
"testgroup":{"enabled":"FALSE","command":"testgroup.py","args":"--group_name group1 --add_group --list_groups --debug"}, "enabled":"TRUE",
"testgroup5":{"enabled":"FALSE","command":"testgroup.py","args":"--num_stations 4 --ssid lanforge --passwd password --security wpa2 --radio wiphy0 --group_name group0 --add_group"}, "command":"example_security_connection.py",
"test_ip_connection-ipv4":{"enabled":"TRUE","command":"test_ip_connection.py","args":"--radio wiphy1 --num_stations 4 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"}, "args":"",
"test_ip_variable_time0-ipv4":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format excel --layer3_cols name,tx_bytes,rx_bytes,dropped --traffic_type lf_udp --debug"}, "args_list":[
" --num_stations 4 --ssid SSID_USED --passwd SSID_PW_USED --radio RADIO_USED --security SECURITY_USED --debug"
]
},
"sta_connect2":{
"enabled":"TRUE",
"command":"sta_connect2.py",
"args":" --dut_ssid SSID_USED --dut_passwd SSID_PW_USED --dut_security SECURITY_USED"
},
"test_fileio":{
"enabled":"TRUE",
"command":"test_fileio.py",
"args":"",
"args_list":[
" --macvlan_parent eth2 --num_ports 3 --use_macvlans --first_mvlan_ip 192.168.92.13",
" --netmask 255.255.255.0 --test_duration 30s --gateway 192.168.92.1"
]
},
"test_ip_connection-ipv4":{
"enabled":"TRUE",
"command":"test_ip_connection.py",
"args":"--radio RADIO_USED --num_stations 4 --ssid SSID_USED --passwd SSID_PS_USED --security SECURITY_USED --debug"},
"test_ip_variable_time0-ipv4":{
"enabled":"TRUE",
"command":"test_ip_variable_time.py",
"args":"",
"args_list":[
" --radio RADIO_USED --ssid SSID_USED --passwd SSID_PW_USED --security SECURITY_USED",
" --test_duration 15s --output_format excel --layer3_cols name,tx_bytes,rx_bytes,dropped",
" --traffic_type lf_udp --debug"
]
},
"test_ip_variable_time1-ipv4":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format csv --layer3_cols name,tx_bytes,rx_bytes,dropped --traffic_type lf_udp --debug"}, "test_ip_variable_time1-ipv4":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format csv --layer3_cols name,tx_bytes,rx_bytes,dropped --traffic_type lf_udp --debug"},
"test_ip_connection-ipv6":{"enabled":"FALSE","command":"test_ip_connection.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --ipv6 --debug"}, "test_ip_connection-ipv6":{"enabled":"FALSE","command":"test_ip_connection.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --ipv6 --debug"},
"test_ip_variable_time0-ipv6":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format excel --layer3_cols name,tx_bytes,rx_bytes,dropped --ipv6 --traffic_type lf_udp --debug"}, "test_ip_variable_time0-ipv6":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format excel --layer3_cols name,tx_bytes,rx_bytes,dropped --ipv6 --traffic_type lf_udp --debug"},
@@ -66,17 +112,17 @@
"test_l4_ftp_bytes-rd":{"enabled":"TRUE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type bytes-rd --test_duration 15s --url 'dl ftp://10.40.0.1 /dev/null' --debug"}, "test_l4_ftp_bytes-rd":{"enabled":"TRUE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type bytes-rd --test_duration 15s --url 'dl ftp://10.40.0.1 /dev/null' --debug"},
"test_l4_ftp_bytes-wr":{"enabled":"FALSE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type bytes-wr --test_duration 15s --url 'ul ftp://10.40.0.1' --debug"}, "test_l4_ftp_bytes-wr":{"enabled":"FALSE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type bytes-wr --test_duration 15s --url 'ul ftp://10.40.0.1' --debug"},
"test_l4_ftp_urls_s":{"enabled":"TRUE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type urls --requests_per_ten 600 --target_per_ten 600 --test_duration 15s --url 'dl ftp://10.40.0.1 /dev/null' --debug"}, "test_l4_ftp_urls_s":{"enabled":"TRUE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type urls --requests_per_ten 600 --target_per_ten 600 --test_duration 15s --url 'dl ftp://10.40.0.1 /dev/null' --debug"},
"test_l3_longevity_1":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth1 --radio 'radio==wiphy0,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}, "test_l3_longevity_1":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy0,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"},
"test_l3_powersave_traffic":{"enabled":"TRUE","command":"test_l3_powersave_traffic.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"}, "test_l3_powersave_traffic":{"enabled":"TRUE","command":"test_l3_powersave_traffic.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"test_status_msg":{"enabled":"TRUE","command":"test_status_msg.py","args":"--action run_test"}, "test_status_msg":{"enabled":"TRUE","command":"test_status_msg.py","args":"--action run_test"},
"test_wanlink":{"enabled":"TRUE","command":"test_wanlink.py","args":"--debug"}, "test_wanlink":{"enabled":"TRUE","command":"test_wanlink.py","args":"--debug"},
"create_bridge":{"enabled":"TRUE","command":"create_bridge.py","args":"--radio wiphy1 --upstream_port eth1 --target_device sta0000 --debug"}, "create_bridge":{"enabled":"TRUE","command":"create_bridge.py","args":"--radio wiphy1 --upstream_port eth2 --target_device sta0000 --debug"},
"create_l3":{"enabled":"TRUE","command":"create_l3.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"}, "create_l3":{"enabled":"TRUE","command":"create_l3.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_l4":{"enabled":"TRUE","command":"create_l4.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"}, "create_l4":{"enabled":"TRUE","command":"create_l4.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_macvlan":{"enabled":"TRUE","command":"create_macvlan.py","args":"--radio wiphy1 --macvlan_parent eth1 --debug"}, "create_macvlan":{"enabled":"TRUE","command":"create_macvlan.py","args":"--radio wiphy1 --macvlan_parent eth2 --debug"},
"create_station":{"enabled":"TRUE","command":"create_station.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"}, "create_station":{"enabled":"TRUE","command":"create_station.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_vap":{"enabled":"TRUE","command":"create_vap.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"}, "create_vap":{"enabled":"TRUE","command":"create_vap.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_qvlan":{"enabled":"TRUE","command":"create_qvlan.py","args":"--radio wiphy1 --qvlan_parent eth1"}, "create_qvlan":{"enabled":"TRUE","command":"create_qvlan.py","args":"--radio wiphy1 --qvlan_parent eth2"},
"wlan_capacity_calculator1":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11abg -t Voice -p 48 -m 106 -e WEP -q Yes -b 1 2 5.5 11 -pre Long -s N/A -co G.711 -r Yes -c Yes"}, "wlan_capacity_calculator1":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11abg -t Voice -p 48 -m 106 -e WEP -q Yes -b 1 2 5.5 11 -pre Long -s N/A -co G.711 -r Yes -c Yes"},
"wlan_capacity_calculator2":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11n -t Voice -d 17 -ch 40 -gu 800 -high 9 -e WEP -q Yes -ip 5 -mc 42 -b 6 9 12 24 -m 1538 -co G.729 -pl Greenfield -cw 15 -r Yes -c Yes"}, "wlan_capacity_calculator2":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11n -t Voice -d 17 -ch 40 -gu 800 -high 9 -e WEP -q Yes -ip 5 -mc 42 -b 6 9 12 24 -m 1538 -co G.729 -pl Greenfield -cw 15 -r Yes -c Yes"},
"wlan_capacity_calculator3":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11ac -t Voice -d 9 -spa 3 -ch 20 -gu 800 -high 1 -e TKIP -q Yes -ip 3 -mc 0 -b 6 12 24 54 -m 1518 -co Greenfield -cw 15 -rc Yes"} "wlan_capacity_calculator3":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11ac -t Voice -d 9 -spa 3 -ch 20 -gu 800 -high 1 -e TKIP -q Yes -ip 3 -mc 0 -b 6 12 24 54 -m 1518 -co Greenfield -cw 15 -rc Yes"}