merge latest lanforge-scripts repo

Signed-off-by: shivamcandela <shivam.thakur@candelatech.com>
This commit is contained in:
shivamcandela
2021-09-22 18:41:00 +05:30
200 changed files with 32236 additions and 3884 deletions

1
.gitignore vendored
View File

@@ -4,3 +4,4 @@
*~
*.iml
**/*.iml
.idea

View File

@@ -1,30 +1,31 @@
#!/usr/bin/env python3
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Class holds default settings for json requests to Ghost -
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
import os
import sys
import os
import importlib
import requests
import jwt
from datetime import datetime
import json
import subprocess
from scp import SCPClient
import paramiko
import time
from collections import Counter
import shutil
import itertools
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
import requests
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
import jwt
from datetime import datetime
from dateutil import tz
import json
import subprocess
from scp import SCPClient
import paramiko
from GrafanaRequest import GrafanaRequest
from influx2 import RecordInflux
import time
from collections import Counter
import shutil
GrafanaRequest = importlib.import_module("py-dashboard.GrafanaRequest")
InfluxRequest = importlib.import_module("py-dashboard.InfluxRequest")
RecordInflux = InfluxRequest.RecordInflux
class CSVReader:
@@ -36,8 +37,6 @@ class CSVReader:
for x in df:
if len(x) > 0:
rows.append(x.split(sep))
length = list(range(0, len(df[0])))
columns = dict(zip(df[0], length))
return rows
def get_column(self,
@@ -74,7 +73,7 @@ class CSVReader:
for row in df:
for item in row:
html = html + ('<td style="border:1px solid #ddd">%s</td>' % item)
html = html + ('</tr>\n<tr>')
html = html + '</tr>\n<tr>'
html = html + ('</tbody>'
'</table>')
return html
@@ -83,35 +82,24 @@ class CSVReader:
target_index = df[0].index(column)
counter = 0
targets = [0]
for row in df:
for row in df[1:]:
try:
if expression == 'less than':
if float(row[target_index]) < target:
targets.append(counter)
counter += 1
else:
counter += 1
if expression == 'greater than':
if float(row[target_index]) > target:
targets.append(counter)
counter += 1
else:
counter += 1
if expression == 'greater than or equal to':
if float(row[target_index]) >= target:
targets.append(counter)
counter += 1
else:
counter += 1
except:
counter += 1
finally:
pass
counter += 1
return list(map(df.__getitem__, targets))
def concat(self, dfs):
final_df = dfs[0]
for df in dfs[1:]:
final_df = final_df + df[1:]
return final_df
return list(itertools.chain.from_iterable(dfs))
class GhostRequest:
@@ -165,8 +153,6 @@ class GhostRequest:
def create_post(self,
title=None,
text=None,
tags=None,
authors=None,
status="published"):
ghost_json_url = self.ghost_json_url + '/admin/posts/?source=html'
post = dict()
@@ -199,9 +185,9 @@ class GhostRequest:
ghost_json_url = self.ghost_json_url + '/admin/images/upload/'
token = self.encode_token()
bashCommand = "curl -X POST -F 'file=@%s' -H \"Authorization: Ghost %s\" %s" % (image, token, ghost_json_url)
bash_command = "curl -X POST -F 'file=@%s' -H \"Authorization: Ghost %s\" %s" % (image, token, ghost_json_url)
proc = subprocess.Popen(bashCommand, shell=True, stdout=subprocess.PIPE)
proc = subprocess.Popen(bash_command, shell=True, stdout=subprocess.PIPE)
output = proc.stdout.read().decode('utf-8')
if self.debug:
print(output)
@@ -221,17 +207,12 @@ class GhostRequest:
authors,
title='custom'):
self.upload_images(folder)
head = '''<p>This is a custom post created via a script</p>'''
head = '''This is a custom post created via a script'''
for picture in self.images:
head = head + '<img src="%s"></img>' % picture
head = head + '''<p>This is the end of the example</p>'''
head = head + '''This is the end of the example'''
self.create_post(title=title,
text=head,
tags='custom',
authors=authors)
def list_append(self, list_1, value):
list_1.append(value)
text=head)
def kpi_to_ghost(self,
authors,
@@ -258,11 +239,6 @@ class GhostRequest:
text = ''
csvreader = CSVReader()
if grafana_token is not None:
grafana = GrafanaRequest(grafana_token,
grafana_host,
grafanajson_port=grafana_port
)
if self.debug:
print('Folders: %s' % folders)
@@ -296,7 +272,7 @@ class GhostRequest:
target_folders.append(folder)
testbeds = list()
webpagesandpdfs = list()
web_pages_and_pdfs = list()
high_priority_list = list()
low_priority_list = list()
images = list()
@@ -305,21 +281,25 @@ class GhostRequest:
subtest_pass_fail = list()
subtest_pass_total = 0
subtest_fail_total = 0
test_tag = dict()
test_tag_1 = list()
columns = ['test-rig', 'dut-hw-version', 'dut-sw-version',
'dut-model-num', 'dut-serial-num']
duts = dict()
for target_folder in target_folders:
try:
target_file = '%s/kpi.csv' % target_folder
df = csvreader.read_csv(file=target_file, sep='\t')
test_rig = csvreader.get_column(df, 'test-rig')[0]
test_id = csvreader.get_column(df, 'test-id')[0]
test_tag[test_id] = (csvreader.get_column(df, 'test-tag')[0])
for column in columns:
try:
column_data = csvreader.get_column(df, column)[0]
duts[column] = column_data
except:
print('no column named %s' % column)
test_tag_1.append([test_id, list(set(csvreader.get_column(df, 'test-tag')))])
pass_fail = Counter(csvreader.get_column(df, 'pass/fail'))
test_pass_fail.append(pass_fail)
dut_hw = csvreader.get_column(df, 'dut-hw-version')[0]
dut_sw = csvreader.get_column(df, 'dut-sw-version')[0]
dut_model = csvreader.get_column(df, 'dut-model-num')[0]
dut_serial = csvreader.get_column(df, 'dut-serial-num')[0]
subtest_pass = csvreader.get_column(df, 'Subtest-Pass')
subtest_fail = csvreader.get_column(df, 'Subtest-Fail')
for result in subtest_pass:
@@ -330,8 +310,10 @@ class GhostRequest:
subtest_pass_fail_list['PASS'] = subtest_pass_total
subtest_pass_fail_list['FAIL'] = subtest_fail_total
subtest_pass_fail.append(subtest_pass_fail_list)
duts = [dut_serial, dut_hw, dut_sw, dut_model, test_rig, test_tag]
times_append = csvreader.get_column(df, 'Date')
if len(times_append) == 0:
print(LookupError("%s/kpi.csv has no time points" % target_folder))
break
for target_time in times_append:
times.append(float(target_time) / 1000)
if pass_fail['PASS'] + pass_fail['FAIL'] > 0:
@@ -343,13 +325,12 @@ class GhostRequest:
text = text + 'Tests passed: 0<br />' \
'Tests failed : 0<br />' \
'Percentage of tests passed: Not Applicable<br />'
testbeds.append(test_rig)
testbeds.append(duts['test-rig'])
if testbed is None:
testbed = test_rig
testbed = duts['test-rig']
if test_run is None:
test_run = now.strftime('%B-%d-%Y-%I-%M-%p-report')
local_path = '/home/%s/%s/%s/%s' % (user_push, customer, testbed, test_run)
transport = paramiko.Transport(ghost_host, port)
@@ -382,9 +363,9 @@ class GhostRequest:
url = 'http://%s/%s/%s/%s/%s/%s' % (
ghost_host, customer.strip('/'), testbed, test_run, target_folder, 'index.html')
webpages.append('<a href="%s">HTML</a>' % url)
webpagesandpdfsappend = dict()
webpagesandpdfsappend[test_id] = pdfs + webpages
webpagesandpdfs.append(webpagesandpdfsappend)
web_pages_and_pdfs_append = dict()
web_pages_and_pdfs_append[test_id] = pdfs + webpages
web_pages_and_pdfs.append(web_pages_and_pdfs_append)
scp_push.close()
self.upload_images(target_folder)
for image in self.images:
@@ -404,18 +385,27 @@ class GhostRequest:
pass
low_priority = csvreader.filter_df(results, 'test-priority', 'less than', 94)
if self.debug:
print('Low Priority results %s' % len(low_priority))
high_priority = csvreader.filter_df(results, 'test-priority', 'greater than or equal to', 95)
high_priority_list.append(high_priority)
low_priority_list.append(low_priority)
except:
print("Failure")
print("Failed to process %s" % target_folder)
target_folders.remove(target_folder)
failuredict = dict()
failuredict[target_folder] = ['Failure']
webpagesandpdfs.append(failuredict)
web_pages_and_pdfs.append(failuredict)
test_tag = dict()
for x in list(set([x[0] for x in test_tag_1])):
l3 = list()
for sublist in test_tag_1:
if sublist[0] == x:
l3 += sublist[1]
test_tag[x] = l3
if len(times) == 0:
return ArithmeticError("There are no datapoints in any folders passed into Ghost")
test_pass_fail_results = sum((Counter(test) for test in test_pass_fail), Counter())
subtest_pass_fail_results = sum((Counter(test) for test in subtest_pass_fail), Counter())
@@ -432,14 +422,17 @@ class GhostRequest:
high_priority = csvreader.concat(high_priority_list)
low_priority = csvreader.concat(low_priority_list)
high_priority = csvreader.get_columns(high_priority,
['Short Description', 'Score', 'Test Details'])
if len(high_priority) > 0:
high_priority = csvreader.get_columns(high_priority,
['Short Description', 'Score', 'Test Details'])
low_priority = csvreader.get_columns(low_priority,
['Short Description', 'Score', 'Test Details'])
high_priority.append(['Total Passed', test_pass_fail_results['PASS'], 'Total subtests passed during this run'])
high_priority.append(['Total Failed', test_pass_fail_results['FAIL'], 'Total subtests failed during this run'])
high_priority.append(['Subtests Passed', subtest_pass_fail_results['PASS'], 'Total subtests passed during this run'])
high_priority.append(['Subtests Failed', subtest_pass_fail_results['FAIL'], 'Total subtests failed during this run'])
high_priority.append(
['Subtests Passed', subtest_pass_fail_results['PASS'], 'Total subtests passed during this run'])
high_priority.append(
['Subtests Failed', subtest_pass_fail_results['FAIL'], 'Total subtests failed during this run'])
if title is None:
title = end_time.strftime('%B %d, %Y %I:%M %p report')
@@ -447,84 +440,99 @@ class GhostRequest:
# create Grafana Dashboard
target_files = []
for folder in target_folders:
target_files.append(folder.split('/')[-1] + '/kpi.csv')
target_file=folder.split('/')[-1] + '/kpi.csv'
try:
open(target_file)
target_files.append(target_file)
except:
pass
if self.debug:
print('Target files: %s' % target_files)
grafana.create_custom_dashboard(target_csvs=target_files,
title=title,
datasource=grafana_datasource,
bucket=grafana_bucket,
from_date=start_time,
to_date=end_time.strftime('%Y-%m-%d %H:%M:%S'),
pass_fail='GhostRequest',
testbed=testbeds[0],
test_tag=test_tag)
text = 'Testbed: %s<br />' % testbeds[0]
if self.influx_token is not None:
influxdb = RecordInflux(_influx_host=self.influx_host,
_influx_port=self.influx_port,
_influx_org=self.influx_org,
_influx_token=self.influx_token,
_influx_bucket=self.influx_bucket)
short_description = 'Tests passed' # variable name
numeric_score = test_pass_fail_results['PASS'] # value
tags = dict()
print(datetime.utcfromtimestamp(max(times)))
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'PASS'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
try:
short_description = 'Tests passed' # variable name
numeric_score = test_pass_fail_results['PASS'] # value
tags = dict()
if self.debug:
print(datetime.utcfromtimestamp(max(times)))
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'PASS'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
short_description = 'Tests failed' # variable name
numeric_score = test_pass_fail_results['FAIL'] # value
tags = dict()
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'FAIL'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
short_description = 'Tests failed' # variable name
numeric_score = test_pass_fail_results['FAIL'] # value
tags = dict()
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'FAIL'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
short_description = 'Subtests passed' # variable name
numeric_score = subtest_pass_fail_results['PASS'] # value
tags = dict()
print(datetime.utcfromtimestamp(max(times)))
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'Subtest PASS'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
short_description = 'Subtests passed' # variable name
numeric_score = subtest_pass_fail_results['PASS'] # value
tags = dict()
if self.debug:
print(datetime.utcfromtimestamp(max(times)))
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'Subtest PASS'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
short_description = 'Subtests failed' # variable name
numeric_score = subtest_pass_fail_results['FAIL'] # value
tags = dict()
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'Subtest FAIL'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
short_description = 'Subtests failed' # variable name
numeric_score = subtest_pass_fail_results['FAIL'] # value
tags = dict()
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'Subtest FAIL'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
except Exception as err:
influx_error = err
text += '''InfluxDB Error: %s<br />
Influx Host: %s<br />
Influx Port: %s<br />
Influx Organization: %s<br />
Influx Bucket: %s<br />''' % (influx_error, self.influx_host, self.influx_port, self.influx_org, self.influx_bucket)
text = 'Testbed: %s<br />' % testbeds[0]
raw_test_tags = list()
test_tag_table = ''
for tag in list(set(test_tag.values())):
print(tag)
for tag in test_tag.values():
for value in tag:
raw_test_tags.append(value)
for value in list(set(raw_test_tags)):
test_tag_table += (
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Test Tag</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' % tag)
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Test Tag</td><td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' % value)
dut_table_column_names = {'test-rig': 'Testbed',
'dut-hw-version': 'DUT HW',
'dut-sw-version': 'DUT SW',
'dut-model-num': 'DUT Model',
'dut-serial-num': 'DUT Serial'}
dut_table_columns = ''
for column in columns:
if column in dut_table_column_names.keys():
column_name = dut_table_column_names[column]
else:
column_name = column
dut_table_columns += (
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td><td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' % (
column_name, duts[column])
)
dut_table = '<table width="700px" border="1" cellpadding="2" cellspacing="0" ' \
'style="border-color: gray; border-style: solid; border-width: 1px; "><tbody>' \
'<tr><th colspan="2">Test Information</th></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Testbed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'%s' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT_HW</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT_SW</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT model</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT Serial</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'%s' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Tests passed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Tests failed</td>' \
@@ -532,14 +540,13 @@ class GhostRequest:
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Subtests passed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Subtests failed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' % (
duts[4], test_tag_table, duts[1], duts[2], duts[3], duts[0], test_pass_fail_results['PASS'],
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'</tbody></table>' % (
dut_table_columns, test_tag_table, test_pass_fail_results['PASS'],
test_pass_fail_results['FAIL'], subtest_pass_total, subtest_fail_total)
dut_table = dut_table + '</tbody></table>'
text = text + dut_table
for dictionary in webpagesandpdfs:
for dictionary in web_pages_and_pdfs:
text += list(dictionary.keys())[0] + ' report: '
for value in dictionary.values():
for webpage in value:
@@ -554,18 +561,40 @@ class GhostRequest:
text = text + 'High priority results: %s' % csvreader.to_html(high_priority)
if grafana_token is not None:
# get the details of the dashboard through the API, and set the end date to the youngest KPI
grafana.list_dashboards()
grafana = GrafanaRequest(grafana_token,
grafana_host,
grafanajson_port=grafana_port,
debug_=self.debug
)
if self.debug:
print('Test Tag: %s' % test_tag)
try:
grafana.create_custom_dashboard(target_csvs=target_files,
title=title,
datasource=grafana_datasource,
bucket=grafana_bucket,
from_date=start_time,
to_date=end_time.strftime('%Y-%m-%d %H:%M:%S'),
pass_fail='GhostRequest',
testbed=testbeds[0],
test_tag=test_tag)
# get the details of the dashboard through the API, and set the end date to the youngest KPI
grafana.list_dashboards()
grafana.create_snapshot(title='Testbed: ' + title)
time.sleep(3)
snapshot = grafana.list_snapshots()[-1]
text = text + '<iframe src="http://%s:3000/dashboard/snapshot/%s" width="100%s" height=1500></iframe><br />' % (
grafana_host, snapshot['key'], '%')
grafana.create_snapshot(title='Testbed: ' + title)
time.sleep(3)
snapshot = grafana.list_snapshots()[-1]
text = text + '<iframe src="http://%s:3000/dashboard/snapshot/%s" width="100%s" height=1500></iframe><br />' % (
grafana_host, snapshot['key'], '%')
except Exception as err:
grafana_error = err
text = text + '''Grafana Error: %s<br />
Grafana credentials:<br />
Grafana Host: %s<br />
Grafana Bucket: %s<br />
Grafana Database: %s<br />''' % (grafana_error, grafana_host, grafana_bucket, grafana_datasource)
text = text + 'Low priority results: %s' % csvreader.to_html(low_priority)
self.create_post(title=title,
text=text,
tags='custom',
authors=authors)
text=text)

View File

@@ -170,8 +170,8 @@ class GrafanaRequest:
if graph_group is not None:
graphgroup = ('|> filter(fn: (r) => r["Graph-Group"] == "%s")\n' % graph_group)
query += graphgroup
if test_tag is not None:
graphgroup = ('|> filter(fn: (r) => r["Test-Tag"] == "%s")\n' % test_tag)
if test_tag is not None and len(test_tag) > 0:
graphgroup = ('|> filter(fn: (r) => r["Test Tag"] == "%s")\n' % test_tag)
query += graphgroup
if testbed is not None:
query += ('|> filter(fn: (r) => r["testbed"] == "%s")\n' % testbed)
@@ -209,7 +209,7 @@ class GrafanaRequest:
from_date='now-1y',
to_date='now',
graph_height=8,
graph__width=12,
graph_width=12,
pass_fail=None,
test_tag=None):
options = string.ascii_lowercase + string.ascii_uppercase + string.digits
@@ -248,126 +248,37 @@ class GrafanaRequest:
if pass_fail is not None:
graph_groups[pass_fail] = ['PASS', 'FAIL']
print('Test Tag in Grafana: %s' % test_tag)
for scriptname in graph_groups.keys():
print(scriptname)
for graph_group in graph_groups[scriptname]:
panel = dict()
gridpos = dict()
gridpos['h'] = graph_height
gridpos['w'] = graph__width
gridpos['x'] = 0
gridpos['y'] = 0
legend = dict()
legend['avg'] = False
legend['current'] = False
legend['max'] = False
legend['min'] = False
legend['show'] = True
legend['total'] = False
legend['values'] = False
options = dict()
options['alertThreshold'] = True
groupBy = list()
groupBy.append(self.groupby('$__interval', 'time'))
groupBy.append(self.groupby('null', 'fill'))
targets = list()
counter = 0
try:
new_target = self.maketargets(bucket,
scriptname,
groupBy,
counter,
graph_group,
testbed,
test_tag=test_tag[scriptname])
except:
new_target = self.maketargets(bucket, scriptname, groupBy, counter, graph_group, testbed)
targets.append(new_target)
fieldConfig = dict()
fieldConfig['defaults'] = dict()
fieldConfig['overrides'] = list()
transformation = dict()
transformation['id'] = "renameByRegex"
transformation_options = dict()
transformation_options['regex'] = "(.*) value.*"
transformation_options['renamePattern'] = "$1"
transformation['options'] = transformation_options
xaxis = dict()
xaxis['buckets'] = None
xaxis['mode'] = "time"
xaxis['name'] = None
xaxis['show'] = True
xaxis['values'] = list()
yaxis = dict()
yaxis['format'] = 'short'
try:
yaxis['label'] = self.units[scriptname][graph_group]
except:
pass
yaxis['logBase'] = 1
yaxis['max'] = None
yaxis['min'] = None
yaxis['show'] = True
yaxis1 = dict()
yaxis1['align'] = False
yaxis1['alignLevel'] = None
panel['aliasColors'] = dict()
panel['bars'] = False
panel['dashes'] = False
panel['dashLength'] = 10
panel['datasource'] = datasource
panel['fieldConfig'] = fieldConfig
panel['fill'] = 0
panel['fillGradient'] = 0
panel['gridPos'] = gridpos
panel['hiddenSeries'] = False
panel['id'] = index
panel['legend'] = legend
panel['lines'] = True
panel['linewidth'] = 1
panel['nullPointMode'] = 'null'
panel['options'] = options
panel['percentage'] = False
panel['pluginVersion'] = '7.5.4'
panel['pointradius'] = 2
panel['points'] = True
panel['renderer'] = 'flot'
panel['seriesOverrides'] = list()
panel['spaceLength'] = 10
panel['stack'] = False
panel['steppedLine'] = False
panel['targets'] = targets
panel['thresholds'] = list()
panel['timeFrom'] = None
panel['timeRegions'] = list()
panel['timeShift'] = None
if graph_group is not None:
panel['title'] = scriptname + ' ' + graph_group
else:
panel['title'] = scriptname
print(panel['title'])
panel['transformations'] = list()
panel['transformations'].append(transformation)
panel['type'] = "graph"
panel['xaxis'] = xaxis
panel['yaxes'] = list()
panel['yaxes'].append(yaxis)
panel['yaxes'].append(yaxis)
panel['yaxis'] = yaxis1
if scriptname in test_tag.keys():
for tag in test_tag[scriptname]:
print('Script: %s, Tag: %s' % (scriptname, tag))
panel = self.create_panel(graph_groups,
graph_height,
graph_width,
scriptname,
bucket,
testbed,
tag,
datasource,
index)
panels.append(panel)
index = index + 1
else:
panel = self.create_panel(graph_groups,
graph_height,
graph_width,
scriptname,
bucket,
testbed,
None,
datasource,
index)
panels.append(panel)
index = index + 1
input1['annotations'] = annot
input1['editable'] = True
input1['gnetId'] = None
@@ -387,16 +298,144 @@ class GrafanaRequest:
input1['version'] = 11
return self.create_dashboard_from_dict(dictionary=json.dumps(input1))
# def create_custom_dashboard(self,
# datastore=None):
# data = json.dumps(datastore, indent=4)
# return requests.post(self.grafanajson_url, headers=self.headers, data=data, verify=False)
def create_panel(self,
graph_groups,
graph_height,
graph_width,
scriptname,
bucket,
testbed,
test_tag,
datasource,
index):
print('Test Tag: %s' % test_tag)
for graph_group in graph_groups[scriptname]:
panel = dict()
gridpos = dict()
gridpos['h'] = graph_height
gridpos['w'] = graph_width
gridpos['x'] = 0
gridpos['y'] = 0
legend = dict()
legend['avg'] = False
legend['current'] = False
legend['max'] = False
legend['min'] = False
legend['show'] = True
legend['total'] = False
legend['values'] = False
options = dict()
options['alertThreshold'] = True
groupBy = list()
groupBy.append(self.groupby('$__interval', 'time'))
groupBy.append(self.groupby('null', 'fill'))
targets = list()
counter = 0
try:
new_target = self.maketargets(bucket,
scriptname,
groupBy,
counter,
graph_group,
testbed,
test_tag=test_tag)
except:
new_target = self.maketargets(bucket, scriptname, groupBy, counter, graph_group, testbed)
targets.append(new_target)
fieldConfig = dict()
fieldConfig['defaults'] = dict()
fieldConfig['overrides'] = list()
transformation = dict()
transformation['id'] = "renameByRegex"
transformation_options = dict()
transformation_options['regex'] = "(.*) value.*"
transformation_options['renamePattern'] = "$1"
transformation['options'] = transformation_options
xaxis = dict()
xaxis['buckets'] = None
xaxis['mode'] = "time"
xaxis['name'] = None
xaxis['show'] = True
xaxis['values'] = list()
yaxis = dict()
yaxis['format'] = 'short'
try:
yaxis['label'] = self.units[scriptname][graph_group]
except:
pass
yaxis['logBase'] = 1
yaxis['max'] = None
yaxis['min'] = None
yaxis['show'] = True
yaxis1 = dict()
yaxis1['align'] = False
yaxis1['alignLevel'] = None
panel['aliasColors'] = dict()
panel['bars'] = False
panel['dashes'] = False
panel['dashLength'] = 10
panel['datasource'] = datasource
panel['fieldConfig'] = fieldConfig
panel['fill'] = 0
panel['fillGradient'] = 0
panel['gridPos'] = gridpos
panel['hiddenSeries'] = False
panel['id'] = index
panel['legend'] = legend
panel['lines'] = True
panel['linewidth'] = 1
panel['nullPointMode'] = 'null'
panel['options'] = options
panel['percentage'] = False
panel['pluginVersion'] = '7.5.4'
panel['pointradius'] = 2
panel['points'] = True
panel['renderer'] = 'flot'
panel['seriesOverrides'] = list()
panel['spaceLength'] = 10
panel['stack'] = False
panel['steppedLine'] = False
panel['targets'] = targets
panel['thresholds'] = list()
panel['timeFrom'] = None
panel['timeRegions'] = list()
panel['timeShift'] = None
if graph_group is not None:
scriptname = '%s: %s' % (scriptname, graph_group)
if test_tag is not None:
scriptname = '%s: %s' % (scriptname, test_tag)
scriptname = '%s: %s' % (scriptname, testbed)
panel['title'] = scriptname
if self.debug:
print(panel['title'])
panel['transformations'] = list()
panel['transformations'].append(transformation)
panel['type'] = "graph"
panel['xaxis'] = xaxis
panel['yaxes'] = list()
panel['yaxes'].append(yaxis)
panel['yaxes'].append(yaxis)
panel['yaxis'] = yaxis1
return panel
def create_snapshot(self, title):
print('create snapshot')
grafanajson_url = self.grafanajson_url + '/api/snapshots'
data = self.get_dashboard(title)
data['expires'] = 360000
data['expires'] = False
data['external'] = False
data['timeout'] = 15
if self.debug:
@@ -427,4 +466,4 @@ class GrafanaRequest:
d = dict()
d[maxunit] = maxtest
print(maxunit, maxtest)
return d
return d

41
py-scripts/influx2.py → py-dashboard/InfluxRequest.py Executable file → Normal file
View File

@@ -7,6 +7,8 @@
import sys
import os
import pandas as pd
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
@@ -20,11 +22,20 @@ import requests
import json
import influxdb_client
from influxdb_client.client.write_api import SYNCHRONOUS
import datetime
#from LANforge.lfcli_base import LFCliBase
import time
class RecordInflux:
import datetime
def influx_add_parser_args(parser):
parser.add_argument('--influx_host', help='Hostname for the Influx database', default=None)
parser.add_argument('--influx_port', help='IP Port for the Influx database', default=8086)
parser.add_argument('--influx_org', help='Organization for the Influx database', default=None)
parser.add_argument('--influx_token', help='Token for the Influx database', default=None)
parser.add_argument('--influx_bucket', help='Name of the Influx bucket', default=None)
parser.add_argument('--influx_tag', action='append', nargs=2,
help='--influx_tag <key> <val> Can add more than one of these.', default=[])
class RecordInflux():
def __init__(self,
_influx_host="localhost",
_influx_port=8086,
@@ -38,7 +49,7 @@ class RecordInflux:
self.influx_org = _influx_org
self.influx_token = _influx_token
self.influx_bucket = _influx_bucket
self.url = "http://%s:%s"%(self.influx_host, self.influx_port)
self.url = "http://%s:%s" % (self.influx_host, self.influx_port)
self.client = influxdb_client.InfluxDBClient(url=self.url,
token=self.influx_token,
org=self.influx_org,
@@ -54,6 +65,28 @@ class RecordInflux:
p.field("value", value)
self.write_api.write(bucket=self.influx_bucket, org=self.influx_org, record=p)
def csv_to_influx(self, csv):
df = pd.read_csv(csv, sep='\t')
df['Date'] = [datetime.datetime.utcfromtimestamp(int(date) / 1000).isoformat() for date in df['Date']]
items = list(df.reset_index().transpose().to_dict().values())
influx_variables = ['script', 'short-description', 'test_details', 'Graph-Group',
'DUT-HW-version', 'DUT-SW-version', 'DUT-Serial-Num', 'testbed', 'Test Tag', 'Units']
csv_variables = ['test-id', 'short-description', 'test details', 'Graph-Group',
'dut-hw-version', 'dut-sw-version', 'dut-serial-num', 'test-rig', 'test-tag', 'Units']
csv_vs_influx = dict(zip(csv_variables, influx_variables))
columns = list(df.columns)
for item in items:
tags = dict()
short_description = item['short-description']
numeric_score = item['numeric-score']
date = item['Date']
for variable in csv_variables:
if variable in columns:
influx_variable = csv_vs_influx[variable]
tags[influx_variable] = item[variable]
self.post_to_influx(short_description, numeric_score, tags, date)
def set_bucket(self, b):
self.influx_bucket = b

View File

@@ -1,22 +1,23 @@
#!/usr/bin/env python3
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Class holds default settings for json requests -
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
import sys
import os
import importlib
import urllib
from urllib import request
import json
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
import pprint
import urllib
import time
import traceback
from urllib import request
from urllib import error
from urllib import parse
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
import json
from LANforge import LFUtils
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
class LFRequest:
@@ -33,7 +34,8 @@ class LFRequest:
debug_=False,
die_on_error_=False):
self.debug = debug_
self.die_on_error = die_on_error_;
self.die_on_error = die_on_error_
self.error_list = []
# please see this discussion on ProxyHandlers:
# https://docs.python.org/3/library/urllib.request.html#urllib.request.ProxyHandler
@@ -124,31 +126,25 @@ class LFRequest:
resp = urllib.request.urlopen(myrequest)
responses.append(resp)
return responses[0]
except urllib.error.HTTPError as error:
if (show_error):
print("----- LFRequest::formPost:76 HTTPError: --------------------------------------------")
print("%s: %s; URL: %s"%(error.code, error.reason, myrequest.get_full_url()))
LFUtils.debug_printer.pprint(error.headers)
#print("Error: ", sys.exc_info()[0])
#print("Request URL:", request.get_full_url())
print("Request Content-type:", myrequest.get_header('Content-type'))
print("Request Accept:", myrequest.get_header('Accept'))
print("Request Data:")
LFUtils.debug_printer.pprint(myrequest.data)
if (len(responses) > 0):
print("----- Response: --------------------------------------------------------")
LFUtils.debug_printer.pprint(responses[0].reason)
print("------------------------------------------------------------------------")
if die_on_error_:
exit(1)
except urllib.error.HTTPError as error:
print_diagnostics(url_=self.requested_url,
request_=myrequest,
responses_=responses,
error_=error,
error_list_=self.error_list,
debug_=debug)
except urllib.error.URLError as uerror:
if show_error:
print("----- LFRequest::formPost:94 URLError: ---------------------------------------------")
print("Reason: %s; URL: %s"%(uerror.reason, myrequest.get_full_url()))
print("------------------------------------------------------------------------")
if (die_on_error_ == True) or (self.die_on_error == True):
exit(1)
print_diagnostics(url_=self.requested_url,
request_=myrequest,
responses_=responses,
error_=uerror,
error_list_=self.error_list,
debug_=debug)
if (die_on_error_ == True) or (self.die_on_error == True):
exit(1)
return None
def jsonPost(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None):
@@ -161,8 +157,8 @@ class LFRequest:
die_on_error_ = True
responses = []
if (self.proxies is not None) and (len(self.proxies) > 0):
opener = request.build_opener(request.ProxyHandler(self.proxies))
request.install_opener(opener)
opener = urllib.request.build_opener(request.ProxyHandler(self.proxies))
urllib.request.install_opener(opener)
if ((self.post_data != None) and (self.post_data is not self.No_Data)):
myrequest = request.Request(url=self.requested_url,
@@ -178,7 +174,7 @@ class LFRequest:
# https://stackoverflow.com/a/59635684/11014343
try:
resp = request.urlopen(myrequest)
resp = urllib.request.urlopen(myrequest)
resp_data = resp.read().decode('utf-8')
if (debug and die_on_error_):
print("----- LFRequest::json_post:128 debug: --------------------------------------------")
@@ -199,39 +195,23 @@ class LFRequest:
print("-------------------------------------------------")
response_json_list_.append(j)
return responses[0]
except urllib.error.HTTPError as error:
if show_error or die_on_error_ or (error.code != 404):
print("----- LFRequest::json_post:147 HTTPError: --------------------------------------------")
print("<%s> HTTP %s: %s" % (myrequest.get_full_url(), error.code, error.reason ))
print_diagnostics(url_=self.requested_url,
request_=myrequest,
responses_=responses,
error_=error,
debug_=debug)
print("Error: ", sys.exc_info()[0])
print("Request URL:", myrequest.get_full_url())
print("Request Content-type:", myrequest.get_header('Content-type'))
print("Request Accept:", myrequest.get_header('Accept'))
print("Request Data:")
LFUtils.debug_printer.pprint(myrequest.data)
if error.headers:
# the HTTPError is of type HTTPMessage a subclass of email.message
# print(type(error.keys()))
for headername in sorted(error.headers.keys()):
print ("Response %s: %s "%(headername, error.headers.get(headername)))
if len(responses) > 0:
print("----- Response: --------------------------------------------------------")
LFUtils.debug_printer.pprint(responses[0].reason)
print("------------------------------------------------------------------------")
if die_on_error_:
traceback.print_stack(limit=15)
exit(1)
except urllib.error.URLError as uerror:
if show_error:
print("----- LFRequest::json_post:171 URLError: ---------------------------------------------")
print("Reason: %s; URL: %s"%(uerror.reason, myrequest.get_full_url()))
print("------------------------------------------------------------------------")
if (die_on_error_ == True) or (self.die_on_error == True):
traceback.print_stack(limit=15)
exit(1)
print_diagnostics(url_=self.requested_url,
request_=myrequest,
responses_=responses,
error_=uerror,
debug_=debug)
if die_on_error_ == True:
exit(1)
return None
def json_put(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None):
@@ -267,39 +247,25 @@ class LFRequest:
try:
myresponses.append(request.urlopen(myrequest))
return myresponses[0]
except urllib.error.HTTPError as error:
if debug:
if error.code == 404:
print("HTTP 404: <%s>" % myrequest.get_full_url())
else:
print("----- LFRequest::get:181 HTTPError: --------------------------------------------")
print("<%s> HTTP %s: %s"%(myrequest.get_full_url(), error.code, error.reason, ))
print("Error: ", sys.exc_info()[0])
print("E Request URL:", myrequest.get_full_url())
print("E Request Content-type:", myrequest.get_header('Content-type'))
print("E Request Accept:", myrequest.get_header('Accept'))
print("E Request Data:")
LFUtils.debug_printer.pprint(myrequest.data)
if (error.code != 404) and error.headers:
# the HTTPError is of type HTTPMessage a subclass of email.message
# print(type(error.keys()))
for headername in sorted(error.headers.keys()):
print ("H Response %s: %s "%(headername, error.headers.get(headername)))
if (error.code != 404) and (len(myresponses) > 0):
print("----- Response: --------------------------------------------------------")
LFUtils.debug_printer.pprint(myresponses[0].reason)
print("------------------------------------------------------------------------")
if (error.code != 404) and (die_on_error_ == True):
traceback.print_stack(limit=15)
exit(1)
print_diagnostics(url_=self.requested_url,
request_=myrequest,
responses_=myresponses,
error_=error,
error_list_=self.error_list,
debug_=debug)
except urllib.error.URLError as uerror:
if debug:
print("----- LFRequest::get:205 URLError: ---------------------------------------------")
print("Reason: %s; URL: %s"%(uerror.reason, myrequest.get_full_url()))
print("------------------------------------------------------------------------")
if die_on_error_ == True:
traceback.print_stack(limit=15)
exit(1)
print_diagnostics(url_=self.requested_url,
request_=myrequest,
responses_=myresponses,
error_=uerror,
error_list_=self.error_list,
debug_=debug)
if die_on_error_ == True:
exit(1)
return None
def getAsJson(self, die_on_error_=False, debug_=False):
@@ -310,6 +276,8 @@ class LFRequest:
j = self.get(debug=debug_, die_on_error_=die_on_error_, method_=method_)
responses.append(j)
if len(responses) < 1:
if debug_ and self.has_errors():
self.print_errors()
return None
if responses[0] == None:
if debug_:
@@ -322,8 +290,22 @@ class LFRequest:
self.add_post_data(data=data)
def add_post_data(self, data):
"""
TODO: this is a setter and should be named 'set_post_data'
:param data: dictionary of parameters for post
:return: nothing
"""
self.post_data = data
def has_errors(self):
return (True, False)[len(self.error_list)>0]
def print_errors(self):
if not self.has_errors:
print("---------- no errors ----------")
return
for err in self.error_list:
print("error: %s" % err)
def plain_get(url_=None, debug_=False, die_on_error_=False, proxies_=None):
"""
@@ -345,38 +327,101 @@ def plain_get(url_=None, debug_=False, die_on_error_=False, proxies_=None):
return myresponses[0]
except urllib.error.HTTPError as error:
if debug_:
print("----- LFRequest::get:181 HTTPError: --------------------------------------------")
print("<%s> HTTP %s: %s"%(myrequest.get_full_url(), error.code, error.reason))
if error.code != 404:
print("Error: ", sys.exc_info()[0])
print("Request URL:", myrequest.get_full_url())
print("Request Content-type:", myrequest.get_header('Content-type'))
print("Request Accept:", myrequest.get_header('Accept'))
print("Request Data:")
LFUtils.debug_printer.pprint(myrequest.data)
print_diagnostics(url_=url_,
request_=request,
responses_=myresponses,
error_=error,
debug_=debug_)
if error.headers:
# the HTTPError is of type HTTPMessage a subclass of email.message
# print(type(error.keys()))
for headername in sorted(error.headers.keys()):
print ("Response %s: %s "%(headername, error.headers.get(headername)))
if len(myresponses) > 0:
print("----- Response: --------------------------------------------------------")
LFUtils.debug_printer.pprint(myresponses[0].reason)
print("------------------------------------------------------------------------")
if die_on_error_ == True:
# print("--------------------------------------------- s.doe %s v doe %s ---------------------------" % (self.die_on_error, die_on_error_))
exit(1)
except urllib.error.URLError as uerror:
if debug_:
print("----- LFRequest::get:205 URLError: ---------------------------------------------")
print("Reason: %s; URL: %s"%(uerror.reason, myrequest.get_full_url()))
print("------------------------------------------------------------------------")
if die_on_error_ == True:
exit(1)
print_diagnostics(url_=url_,
request_=request,
responses_=myresponses,
error_=uerror,
debug_=debug_)
if die_on_error_ == True:
exit(1)
return None
def print_diagnostics(url_=None, request_=None, responses_=None, error_=None, error_list_=None, debug_=False):
if debug_:
print("LFRequest::print_diagnostics: error_.__class__: %s"%error_.__class__)
LFUtils.debug_printer.pprint(error_)
if url_ is None:
print("WARNING LFRequest::print_diagnostics: url_ is None")
if request_ is None:
print("WARNING LFRequest::print_diagnostics: request_ is None")
if error_ is None:
print("WARNING LFRequest::print_diagnostics: error_ is None")
method = 'NA'
if (hasattr(request_, 'method')):
method = request_.method
err_code = 0
err_reason = 'NA'
err_headers = []
err_full_url = url_
if hasattr(error_, 'code'):
err_code = error_.code
if hasattr(error_, 'reason'):
err_reason = error_.reason
if hasattr(error_, 'headers'):
err_headers = error_.headers
if hasattr(error_, 'get_full_url'):
err_full_url = error_.get_full_url()
xerrors = []
if err_code == 404:
xerrors.append("[%s HTTP %s] <%s> : %s" % (method, err_code, err_full_url, err_reason))
else:
if (len(err_headers) > 0):
for headername in sorted(err_headers.keys()):
if headername.startswith("X-Error-"):
xerrors.append("%s: %s" % (headername, err_headers.get(headername)))
if len(xerrors) > 0:
print(" = = LANforge Error Messages = =")
for xerr in xerrors:
print(xerr)
if (error_list_ is not None) and isinstance(error_list_, list):
error_list_.append(xerr)
print(" = = = = = = = = = = = = = = = =")
if (error_.__class__ is urllib.error.HTTPError):
if debug_:
print("----- LFRequest: HTTPError: --------------------------------------------")
print("%s <%s> HTTP %s: %s" % (method, err_full_url, err_code, err_reason))
if err_code == 404:
if (error_list_ is not None) and isinstance(error_list_, list):
error_list_.append("[%s HTTP %s] <%s> : %s" % (method, err_code, err_full_url, err_reason))
else:
if debug_:
print(" Content-type:[%s] Accept[%s]" % (request_.get_header('Content-type'), request_.get_header('Accept')))
if hasattr(request_, "data") and (request_.data is not None):
print(" Data:")
LFUtils.debug_printer.pprint(request_.data)
elif debug_:
print(" <no request data>")
if debug_ and (len(err_headers) > 0):
# the HTTPError is of type HTTPMessage a subclass of email.message
print(" Response Headers: ")
for headername in sorted(err_headers.keys()):
print(" %s: %s" % (headername, err_headers.get(headername)))
if len(responses_) > 0:
print("----- Response: --------------------------------------------------------")
LFUtils.debug_printer.pprint(responses_[0].reason)
if debug_:
print("------------------------------------------------------------------------")
return
if (error_.__class__ is urllib.error.URLError):
print("----- LFRequest: URLError: ---------------------------------------------")
print("%s <%s> HTTP %s: %s" % (method, err_full_url, err_code, err_reason))
print("------------------------------------------------------------------------")
# ~LFRequest

View File

@@ -1,25 +1,30 @@
#!/usr/bin/env python3
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Define useful common methods -
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
import sys
import os
import importlib
import pprint
import time
from time import sleep
from random import seed, randint
import re
import ipaddress
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
import os
import pprint
import time
from time import sleep
from random import seed
import re
import ipaddress
seed(int(round(time.time() * 1000)))
from random import randint
from LANforge import LFRequest
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
debug_printer = pprint.PrettyPrinter(indent=2)
seed(int(round(time.time() * 1000)))
NA = "NA" # used to indicate parameter to skip
ADD_STA_FLAGS_DOWN_WPA2 = 68719477760
REPORT_TIMER_MS_FAST = 1500
@@ -276,10 +281,10 @@ def port_name_series(prefix="sta", start_id=0, end_id=1, padding_number=10000, r
the padding_number is added to the start and end numbers and the resulting sum
has the first digit trimmed, so f(0, 1, 10000) => {"0000", "0001"}
@deprecated -- please use port_name_series
:param prefix_: defaults to 'sta'
:param start_id_: beginning id
:param end_id_: ending_id
:param padding_number_: used for width of resulting station number
:param prefix: defaults to 'sta'
:param start_id: beginning id
:param end_id: ending_id
:param padding_number: used for width of resulting station number
:return: list of stations
"""
@@ -478,10 +483,10 @@ def wait_until_ports_admin_up(resource_id=1, base_url="http://localhost:8080", p
sleep(1)
return None
def waitUntilPortsDisappear(base_url="http://localhost:8080", port_list=[], debug=False):
def waitUntilPortsDisappear(base_url="http://localhost:8080", port_list=(), debug=False):
wait_until_ports_disappear(base_url, port_list, debug)
def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=[], debug=False):
def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=(), debug=False):
if (port_list is None) or (len(port_list) < 1):
if debug:
print("LFUtils: wait_until_ports_disappear: empty list, zipping back")
@@ -520,7 +525,7 @@ def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=[], d
lf_r = LFRequest.LFRequest(base_url, check_url, debug_=debug)
json_response = lf_r.get_as_json(debug_=debug, die_on_error_=False)
if (json_response == None):
print("Request returned None")
print("LFUtils::wait_until_ports_disappear:: Request returned None: [{}]".format(base_url + check_url))
else:
if debug:
pprint.pprint(("wait_until_ports_disappear json_response:", json_response))
@@ -559,7 +564,7 @@ def name_to_eid(input, non_port=False):
info = input.split('.')
if len(info) == 1:
rv[2] = info[0]; # just port name
rv[2] = info[0] # just port name
return rv
if (len(info) == 2) and info[0].isnumeric() and not info[1].isnumeric(): # resource.port-name
@@ -596,7 +601,7 @@ def name_to_eid(input, non_port=False):
rv[1] = int(info[1])
rv[2] = info[2]+"."+info[3]
return rv;
return rv
def wait_until_ports_appear(base_url="http://localhost:8080", port_list=(), debug=False):
"""
@@ -647,7 +652,7 @@ def wait_until_endps(base_url="http://localhost:8080", endp_list=(), debug=False
"""
:param base_url:
:param port_list:
:param endp_list:
:param debug:
:return:
"""
@@ -658,7 +663,7 @@ def wait_until_endps(base_url="http://localhost:8080", endp_list=(), debug=False
if base_url.endswith('/'):
port_url = port_url[1:]
ncshow_url = ncshow_url[1:]
found_stations = []
while len(found_stations) < len(endp_list):
found_stations = []
for port_eid in endp_list:

View File

@@ -2,6 +2,22 @@
from enum import Enum
from collections import namedtuple
add_dut_flags = {
'STA_MODE' : 0x1, # (1) DUT acts as Station.,
'AP_MODE' : 0x2, # (2) DUT acts as AP.
'INACTIVE' : 0x4, # (3) Ignore this in ChamberView, etc
'WEP' : 0x8, # Use WEP encryption on all ssids, deprecated, see add_dut_ssid.
'WPA' : 0x10, # Use WPA encryption on all ssids, deprecated, see add_dut_ssid.
'WPA2' : 0x20, # Use WPA2 encryption on all ssids, deprecated, see add_dut_ssid.
'DHCPD-LAN' : 0x40, # Provides DHCP server on LAN port
'DHCPD-WAN' : 0x80, # Provides DHCP server on WAN port
'WPA3' : 0x100, # Use WPA3 encryption on all ssids, deprecated, see add_dut_extras.
'11r' : 0x200, # Use .11r connection logic on all ssids, deprecated, see add_dut_ssid.
'EAP-TTLS' : 0x400, # Use EAP-TTLS connection logic on all ssids, deprecated, see add_dut_ssid.
'EAP-PEAP' : 0x800, # Use EAP-PEAP connection logic on all ssids, deprecated, see add_dut_ssid.
'NOT-DHCPCD' : 0x1000, # Station/edge device that is NOT using DHCP.
# Otherwise, automation logic assumes it is using dhcp client.'
}
class dut_params(namedtuple("dut_params", "key maxlen"), Enum):
name = "name", 48
flags = "flags", 256

View File

@@ -1,7 +1,7 @@
# flags relating to adding a monitor
flags = {
"disable_ht40" : 0x800,
"disable_ht80" : 0x8000000,
"ht160_enable" : 0x100000000,
"disable_ht40": 0x800,
"disable_ht80": 0x8000000,
"ht160_enable": 0x100000000,
}
default_flags_mask = 0xFFFFFFFFFFFF

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
#!env /usr/bin/python
import sys
import signal
import os
import importlib
import traceback
# Extend this class to use common set of debug and request features for your script
from pprint import pprint
@@ -10,13 +10,16 @@ import random
import string
import datetime
import argparse
import LANforge.LFUtils
from LANforge.LFUtils import *
from LANforge import LFRequest
import LANforge.LFRequest
import csv
import pandas as pd
import os
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
class LFCliBase:
@@ -201,7 +204,7 @@ class LFCliBase:
lf_r.addPostData(_data)
if debug_:
LANforge.LFUtils.debug_printer.pprint(_data)
LFUtils.debug_printer.pprint(_data)
json_response = lf_r.json_post(show_error=debug_,
debug=debug_,
response_json_list_=response_json_list_,
@@ -239,7 +242,7 @@ class LFCliBase:
die_on_error_=self.exit_on_error)
lf_r.addPostData(_data)
if debug_:
LANforge.LFUtils.debug_printer.pprint(_data)
LFUtils.debug_printer.pprint(_data)
json_response = lf_r.json_put(show_error=self.debug,
debug=debug_,
response_json_list_=response_json_list_,
@@ -272,8 +275,13 @@ class LFCliBase:
die_on_error_=self.exit_on_error)
json_response = lf_r.get_as_json(debug_=debug_, die_on_error_=False)
#debug_printer.pprint(json_response)
if (json_response is None) and debug_:
print("LFCliBase.json_get: no entity/response, probabily status 404")
if (json_response is None):
if debug_:
if hasattr(lf_r, 'print_errors'):
lf_r.print_errors()
else:
print("LFCliBase.json_get: no entity/response, check other errors")
time.sleep(10)
return None
except ValueError as ve:
if debug_ or self.exit_on_error:
@@ -649,118 +657,3 @@ class LFCliBase:
abgnAX : 12,
bgnAX : 13
"""
#================ Pandas Dataframe Functions ======================================
#takes any dataframe and returns the specified file extension of it
def df_to_file(self, output_f=None,dataframe=None, save_path=None):
if output_f.lower() == 'hdf':
import tables
dataframe.to_hdf(save_path.replace('csv','h5',1), 'table', append=True)
if output_f.lower() == 'parquet':
import pyarrow as pa
dataframe.to_parquet(save_path.replace('csv','parquet',1), engine='pyarrow')
if output_f.lower() == 'png':
fig = dataframe.plot().get_figure()
fig.savefig(save_path.replace('csv','png',1))
if output_f.lower() == 'xlsx':
dataframe.to_excel(save_path.replace('csv','xlsx',1))
if output_f.lower() == 'json':
dataframe.to_json(save_path.replace('csv','json',1))
if output_f.lower() == 'stata':
dataframe.to_stata(save_path.replace('csv','dta',1))
if output_f.lower() == 'pickle':
dataframe.to_pickle(save_path.replace('csv','pkl',1))
if output_f.lower() == 'html':
dataframe.to_html(save_path.replace('csv','html',1))
#takes any format of a file and returns a dataframe of it
def file_to_df(self,file_name):
if file_name.split('.')[-1] == 'csv':
return pd.read_csv(file_name)
#only works for test_ipv4_variable_time at the moment
def compare_two_df(self,dataframe_one=None,dataframe_two=None):
#df one = current report
#df two = compared report
pd.set_option("display.max_rows", None, "display.max_columns", None)
#get all of common columns besides Timestamp, Timestamp milliseconds
common_cols = list(set(dataframe_one.columns).intersection(set(dataframe_two.columns)))
cols_to_remove = ['Timestamp milliseconds epoch','Timestamp','LANforge GUI Build: 5.4.3']
com_cols = [i for i in common_cols if i not in cols_to_remove]
#check if dataframes have the same endpoints
if dataframe_one.name.unique().tolist().sort() == dataframe_two.name.unique().tolist().sort():
endpoint_names = dataframe_one.name.unique().tolist()
if com_cols is not None:
dataframe_one = dataframe_one[[c for c in dataframe_one.columns if c in com_cols]]
dataframe_two = dataframe_two[[c for c in dataframe_one.columns if c in com_cols]]
dataframe_one = dataframe_one.loc[:, ~dataframe_one.columns.str.startswith('Script Name:')]
dataframe_two = dataframe_two.loc[:, ~dataframe_two.columns.str.startswith('Script Name:')]
lowest_duration=min(dataframe_one['Duration elapsed'].max(),dataframe_two['Duration elapsed'].max())
print("The max duration in the new dataframe will be... " + str(lowest_duration))
compared_values_dataframe = pd.DataFrame(columns=[col for col in com_cols if not col.startswith('Script Name:')])
cols = compared_values_dataframe.columns.tolist()
cols=sorted(cols, key=lambda L: (L.lower(), L))
compared_values_dataframe= compared_values_dataframe[cols]
print(compared_values_dataframe)
for duration_elapsed in range(lowest_duration):
for endpoint in endpoint_names:
#check if value has a space in it or is a str.
# if value as a space, only take value before space for calc, append that calculated value after space.
#if str. check if values match from 2 df's. if values do not match, write N/A
for_loop_df1 = dataframe_one.loc[(dataframe_one['name'] == endpoint) & (dataframe_one['Duration elapsed'] == duration_elapsed)]
for_loop_df2 = dataframe_two.loc[(dataframe_one['name'] == endpoint) & (dataframe_two['Duration elapsed'] == duration_elapsed)]
# print(for_loop_df1)
# print(for_loop_df2)
cols_to_loop = [i for i in com_cols if i not in ['Duration elapsed', 'Name', 'Script Name: test_ipv4_variable_time']]
cols_to_loop=sorted(cols_to_loop, key=lambda L: (L.lower(), L))
print(cols_to_loop)
row_to_append={}
row_to_append["Duration elapsed"] = duration_elapsed
for col in cols_to_loop:
print(col)
print(for_loop_df1)
#print(for_loop_df2)
print(for_loop_df1.at[0, col])
print(for_loop_df2.at[0, col])
if type(for_loop_df1.at[0, col]) == str and type(for_loop_df2.at[0, col]) == str:
if (' ' in for_loop_df1.at[0,col]) == True:
#do subtraction
new_value = float(for_loop_df1.at[0, col].split(" ")[0]) - float(for_loop_df2.at[0, col].split(" ")[0])
#add on last half of string
new_value = str(new_value)+ for_loop_df2.at[0, col].split(" ")[1]
# print(new_value)
row_to_append[col] = new_value
else:
if for_loop_df1.at[0, col] != for_loop_df2.at[0, col]:
row_to_append[col] = 'NaN'
else:
row_to_append[col] = for_loop_df1.at[0,col]
elif type(for_loop_df1.at[0, col]) == int and type(for_loop_df2.at[0, col]) == int or type(for_loop_df1.at[0, col]) == float and type(for_loop_df2.at[0,col]) == float:
new_value = for_loop_df1.at[0, col] - for_loop_df2.at[0, col]
row_to_append[col] = new_value
compared_values_dataframe = compared_values_dataframe.append(row_to_append, ignore_index=True,)
print(compared_values_dataframe)
#add col name to new df
print(dataframe_one)
print(dataframe_two)
print(compared_values_dataframe)
else:
ValueError("Unable to execute report comparison due to inadequate file commonalities. ")
exit(1)
else:
ValueError("Two files do not have the same endpoints. Please try file comparison with files that have the same endpoints.")
exit(1)
#take those columns and separate those columns from others in DF.
pass
#return compared_df
def append_df_to_file(self,dataframe, file_name):
pass
# ~class

View File

@@ -0,0 +1,118 @@
#!/usr/bin/env pythonn3
import pandas as pd
class pandas_extensions:
# ================ Pandas Dataframe Functions ======================================
# takes any dataframe and returns the specified file extension of it
def df_to_file(self, output_f=None, dataframe=None, save_path=None):
if output_f.lower() == 'hdf':
dataframe.to_hdf(save_path.replace('csv', 'h5', 1), 'table', append=True)
if output_f.lower() == 'parquet':
dataframe.to_parquet(save_path.replace('csv', 'parquet', 1), engine='pyarrow')
if output_f.lower() == 'png':
fig = dataframe.plot().get_figure()
fig.savefig(save_path.replace('csv', 'png', 1))
if output_f.lower() == 'xlsx':
dataframe.to_excel(save_path.replace('csv', 'xlsx', 1))
if output_f.lower() == 'json':
dataframe.to_json(save_path.replace('csv', 'json', 1))
if output_f.lower() == 'stata':
dataframe.to_stata(save_path.replace('csv', 'dta', 1))
if output_f.lower() == 'pickle':
dataframe.to_pickle(save_path.replace('csv', 'pkl', 1))
if output_f.lower() == 'html':
dataframe.to_html(save_path.replace('csv', 'html', 1))
# takes any format of a file and returns a dataframe of it
def file_to_df(self, file_name):
if file_name.split('.')[-1] == 'csv':
return pd.read_csv(file_name)
# only works for test_ipv4_variable_time at the moment
def compare_two_df(self, dataframe_one=None, dataframe_two=None):
# df one = current report
# df two = compared report
pd.set_option("display.max_rows", None, "display.max_columns", None)
# get all of common columns besides Timestamp, Timestamp milliseconds
common_cols = list(set(dataframe_one.columns).intersection(set(dataframe_two.columns)))
cols_to_remove = ['Timestamp milliseconds epoch', 'Timestamp', 'LANforge GUI Build: 5.4.3']
com_cols = [i for i in common_cols if i not in cols_to_remove]
# check if dataframes have the same endpoints
if dataframe_one.name.unique().tolist().sort() == dataframe_two.name.unique().tolist().sort():
endpoint_names = dataframe_one.name.unique().tolist()
if com_cols is not None:
dataframe_one = dataframe_one[[c for c in dataframe_one.columns if c in com_cols]]
dataframe_two = dataframe_two[[c for c in dataframe_one.columns if c in com_cols]]
dataframe_one = dataframe_one.loc[:, ~dataframe_one.columns.str.startswith('Script Name:')]
dataframe_two = dataframe_two.loc[:, ~dataframe_two.columns.str.startswith('Script Name:')]
lowest_duration = min(dataframe_one['Duration elapsed'].max(), dataframe_two['Duration elapsed'].max())
print("The max duration in the new dataframe will be... " + str(lowest_duration))
compared_values_dataframe = pd.DataFrame(
columns=[col for col in com_cols if not col.startswith('Script Name:')])
cols = compared_values_dataframe.columns.tolist()
cols = sorted(cols, key=lambda L: (L.lower(), L))
compared_values_dataframe = compared_values_dataframe[cols]
print(compared_values_dataframe)
for duration_elapsed in range(lowest_duration):
for endpoint in endpoint_names:
# check if value has a space in it or is a str.
# if value as a space, only take value before space for calc, append that calculated value after space.
# if str. check if values match from 2 df's. if values do not match, write N/A
for_loop_df1 = dataframe_one.loc[(dataframe_one['name'] == endpoint) & (
dataframe_one['Duration elapsed'] == duration_elapsed)]
for_loop_df2 = dataframe_two.loc[(dataframe_one['name'] == endpoint) & (
dataframe_two['Duration elapsed'] == duration_elapsed)]
# print(for_loop_df1)
# print(for_loop_df2)
cols_to_loop = [i for i in com_cols if
i not in ['Duration elapsed', 'Name', 'Script Name: test_ipv4_variable_time']]
cols_to_loop = sorted(cols_to_loop, key=lambda L: (L.lower(), L))
print(cols_to_loop)
row_to_append = {}
row_to_append["Duration elapsed"] = duration_elapsed
for col in cols_to_loop:
print(col)
print(for_loop_df1)
# print(for_loop_df2)
print(for_loop_df1.at[0, col])
print(for_loop_df2.at[0, col])
if type(for_loop_df1.at[0, col]) == str and type(for_loop_df2.at[0, col]) == str:
if (' ' in for_loop_df1.at[0, col]) == True:
# do subtraction
new_value = float(for_loop_df1.at[0, col].split(" ")[0]) - float(
for_loop_df2.at[0, col].split(" ")[0])
# add on last half of string
new_value = str(new_value) + for_loop_df2.at[0, col].split(" ")[1]
# print(new_value)
row_to_append[col] = new_value
else:
if for_loop_df1.at[0, col] != for_loop_df2.at[0, col]:
row_to_append[col] = 'NaN'
else:
row_to_append[col] = for_loop_df1.at[0, col]
elif type(for_loop_df1.at[0, col]) == int and type(for_loop_df2.at[0, col]) == int or type(
for_loop_df1.at[0, col]) == float and type(for_loop_df2.at[0, col]) == float:
new_value = for_loop_df1.at[0, col] - for_loop_df2.at[0, col]
row_to_append[col] = new_value
compared_values_dataframe = compared_values_dataframe.append(row_to_append, ignore_index=True, )
print(compared_values_dataframe)
# add col name to new df
print(dataframe_one)
print(dataframe_two)
print(compared_values_dataframe)
else:
ValueError("Unable to execute report comparison due to inadequate file commonalities. ")
exit(1)
else:
ValueError(
"Two files do not have the same endpoints. Please try file comparison with files that have the same endpoints.")
exit(1)
# take those columns and separate those columns from others in DF.
pass
# return compared_df

View File

@@ -1,100 +1,122 @@
set_port_current_flags = {
"if_down": 0x1, # Interface Down
"fixed_10bt_hd": 0x2, # Fixed-10bt-HD (half duplex)
"fixed_10bt_fd": 0x4, # Fixed-10bt-FD
"fixed_100bt_hd": 0x8, # Fixed-100bt-HD
"fixed_100bt_fd": 0x10, # Fixed-100bt-FD
"auto_neg": 0x100, # auto-negotiate
"adv_10bt_hd": 0x100000, # advert-10bt-HD
"adv_10bt_fd": 0x200000, # advert-10bt-FD
"adv_100bt_hd": 0x400000, # advert-100bt-HD
"adv_100bt_fd": 0x800000, # advert-100bt-FD
"adv_flow_ctl": 0x8000000, # advert-flow-control
"promisc": 0x10000000, # PROMISC
"use_dhcp": 0x80000000, # USE-DHCP
"adv_10g_hd": 0x400000000, # advert-10G-HD
"adv_10g_fd": 0x800000000, # advert-10G-FD
"tso_enabled": 0x1000000000, # TSO-Enabled
"lro_enabled": 0x2000000000, # LRO-Enabled
"gro_enabled": 0x4000000000, # GRO-Enabled
"ufo_enabled": 0x8000000000, # UFO-Enabled
"gso_enabled": 0x10000000000, # GSO-Enabled
"use_dhcpv6": 0x20000000000, # USE-DHCPv6
"rxfcs": 0x40000000000, # RXFCS
"no_dhcp_rel": 0x80000000000, # No-DHCP-Release
"staged_ifup": 0x100000000000, # Staged-IFUP
"http_enabled": 0x200000000000, # Enable HTTP (nginx) service for this port.
"ftp_enabled": 0x400000000000, # Enable FTP (vsftpd) service for this port.
"aux_mgt": 0x800000000000, # Enable Auxillary-Management flag for this port.
"no_dhcp_restart": 0x1000000000000, # Disable restart of DHCP on link connect (ie, wifi).
# This should usually be enabled when testing wifi
# roaming so that the wifi station can roam
# without having to re-acquire a DHCP lease each
# time it roams.
"ignore_dhcp": 0x2000000000000, # Don't set DHCP acquired IP on interface,
# instead print CLI text message. May be useful
# in certain wifi-bridging scenarios where external
# traffic-generator cannot directly support DHCP.
import sys
import os
import importlib
"no_ifup_post": 0x4000000000000, # Skip ifup-post script if we can detect that we
# have roamed. Roaming is considered true if
# the IPv4 address has not changed.
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
"radius_enabled": 0x20000000000000, # Enable RADIUS service (using hostapd as radius server)
"ipsec_client": 0x40000000000000, # Enable client IPSEC xfrm on this port.
"ipsec_concentrator": 0x80000000000000, # Enable concentrator (upstream) IPSEC xfrm on this port.
"service_dns": 0x100000000000000, # Enable DNS (dnsmasq) service on this port.
}
set_port_cmd_flags = {
"reset_transceiver": 0x1, # Reset transciever
"restart_link_neg": 0x2, # Restart link negotiation
"force_MII_probe": 0x4, # Force MII probe
"no_hw_probe": 0x8, # Don't probe hardware
"probe_wifi": 0x10, # Probe WIFI
"new_gw_probe": 0x20, # Force new GW probe
"new_gw_probe_dev": 0x40, # Force new GW probe for ONLY this interface
"from_user": 0x80, # from_user (Required to change Mgt Port config
# (IP, DHCP, etc)
"skip_port_bounce": 0x100, # skip-port-bounce (Don't ifdown/up
# interface if possible.)
"from_dhcp": 0x200, # Settings come from DHCP client.
"abort_if_scripts": 0x400, # Forceably abort all ifup/down scripts on this Port.
"use_pre_ifdown": 0x800, # Call pre-ifdown script before bringing interface down.
}
set_port_interest_flags = {
"command_flags" : 0x1, # apply command flags
"current_flags" : 0x2, # apply current flags
"ip_address" : 0x4, # IP address
"ip_Mask" : 0x8, # IP mask
"ip_gateway" : 0x10, # IP gateway
"mac_address" : 0x20, # MAC address
"supported_flags" : 0x40, # apply supported flags
"link_speed" : 0x80, # Link speed
"mtu" : 0x100, # MTU
"tx_queue_length" : 0x200, # TX Queue Length
"promisc_mode" : 0x400, # PROMISC mode
"interal_use_1" : 0x800, # (INTERNAL USE)
"alias" : 0x1000, # Port alias
"rx_all" : 0x2000, # Rx-ALL
"dhcp" : 0x4000, # including client-id.
"rpt_timer" : 0x8000, # Report Timer
"bridge" : 0x10000, # BRIDGE
"ipv6_addrs" : 0x20000, # IPv6 Address
"bypass" : 0x40000, # Bypass
"gen_offload" : 0x80000, # Generic offload flags, everything but LRO
"cpu_mask" : 0x100000, # CPU Mask, useful for pinning process to CPU core
"lro_offload" : 0x200000, # LRO (Must be disabled when used in Wanlink,
# and probably in routers)
"sta_br_id" : 0x400000, # WiFi Bridge identifier. 0 means no bridging.
"ifdown" : 0x800000, # Down interface
"dhcpv6" : 0x1000000, # Use DHCPv6
"rxfcs" : 0x2000000, # RXFCS
"dhcp_rls" : 0x4000000, # DHCP release
"svc_httpd" : 0x8000000, # Enable/disable HTTP Service for a port
"svc_ftpd" : 0x10000000, # Enable/disable FTP Service for a port
"aux_mgt" : 0x20000000, # Enable/disable Auxillary-Management for a port
"no_dhcp_conn" : 0x40000000, # Enable/disable NO-DHCP-ON-CONNECT flag for a port
"no_apply_dhcp" : 0x80000000, # Enable/disable NO-APPLY-DHCP flag for a port
"skip_ifup_roam" : 0x100000000, # Enable/disable SKIP-IFUP-ON-ROAM flag for a port
}
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
lf_json_autogen = importlib.import_module("py-json.LANforge.lf_json_autogen")
LFJsonPost = lf_json_autogen.LFJsonPost
if os.environ.get("LF_USE_AUTOGEN") == 1:
set_port_current_flags = LFJsonPost.SetPortCurrentFlags.__members__
set_port_cmd_flags = LFJsonPost.SetPortCmdFlags.__members__
set_port_interest_flags = LFJsonPost.SetPortInterest.__members__
else:
set_port_current_flags = {
"if_down": 0x1, # Interface Down
"fixed_10bt_hd": 0x2, # Fixed-10bt-HD (half duplex)
"fixed_10bt_fd": 0x4, # Fixed-10bt-FD
"fixed_100bt_hd": 0x8, # Fixed-100bt-HD
"fixed_100bt_fd": 0x10, # Fixed-100bt-FD
"auto_neg": 0x100, # auto-negotiate
"adv_10bt_hd": 0x100000, # advert-10bt-HD
"adv_10bt_fd": 0x200000, # advert-10bt-FD
"adv_100bt_hd": 0x400000, # advert-100bt-HD
"adv_100bt_fd": 0x800000, # advert-100bt-FD
"adv_flow_ctl": 0x8000000, # advert-flow-control
"promisc": 0x10000000, # PROMISC
"use_dhcp": 0x80000000, # USE-DHCP
"adv_10g_hd": 0x400000000, # advert-10G-HD
"adv_10g_fd": 0x800000000, # advert-10G-FD
"tso_enabled": 0x1000000000, # TSO-Enabled
"lro_enabled": 0x2000000000, # LRO-Enabled
"gro_enabled": 0x4000000000, # GRO-Enabled
"ufo_enabled": 0x8000000000, # UFO-Enabled
"gso_enabled": 0x10000000000, # GSO-Enabled
"use_dhcpv6": 0x20000000000, # USE-DHCPv6
"rxfcs": 0x40000000000, # RXFCS
"no_dhcp_rel": 0x80000000000, # No-DHCP-Release
"staged_ifup": 0x100000000000, # Staged-IFUP
"http_enabled": 0x200000000000, # Enable HTTP (nginx) service for this port.
"ftp_enabled": 0x400000000000, # Enable FTP (vsftpd) service for this port.
"aux_mgt": 0x800000000000, # Enable Auxillary-Management flag for this port.
"no_dhcp_restart": 0x1000000000000, # Disable restart of DHCP on link connect (ie, wifi).
# This should usually be enabled when testing wifi
# roaming so that the wifi station can roam
# without having to re-acquire a DHCP lease each
# time it roams.
"ignore_dhcp": 0x2000000000000, # Don't set DHCP acquired IP on interface,
# instead print CLI text message. May be useful
# in certain wifi-bridging scenarios where external
# traffic-generator cannot directly support DHCP.
"no_ifup_post": 0x4000000000000, # Skip ifup-post script if we can detect that we
# have roamed. Roaming is considered true if
# the IPv4 address has not changed.
"radius_enabled": 0x20000000000000, # Enable RADIUS service (using hostapd as radius server)
"ipsec_client": 0x40000000000000, # Enable client IPSEC xfrm on this port.
"ipsec_concentrator": 0x80000000000000, # Enable concentrator (upstream) IPSEC xfrm on this port.
"service_dns": 0x100000000000000, # Enable DNS (dnsmasq) service on this port.
}
set_port_cmd_flags = {
"reset_transceiver": 0x1, # Reset transciever
"restart_link_neg": 0x2, # Restart link negotiation
"force_MII_probe": 0x4, # Force MII probe
"no_hw_probe": 0x8, # Don't probe hardware
"probe_wifi": 0x10, # Probe WIFI
"new_gw_probe": 0x20, # Force new GW probe
"new_gw_probe_dev": 0x40, # Force new GW probe for ONLY this interface
"from_user": 0x80, # from_user (Required to change Mgt Port config
# (IP, DHCP, etc)
"skip_port_bounce": 0x100, # skip-port-bounce (Don't ifdown/up
# interface if possible.)
"from_dhcp": 0x200, # Settings come from DHCP client.
"abort_if_scripts": 0x400, # Forceably abort all ifup/down scripts on this Port.
"use_pre_ifdown": 0x800, # Call pre-ifdown script before bringing interface down.
}
set_port_interest_flags = {
"command_flags" : 0x1, # apply command flags
"current_flags" : 0x2, # apply current flags
"ip_address" : 0x4, # IP address
"ip_Mask" : 0x8, # IP mask
"ip_gateway" : 0x10, # IP gateway
"mac_address" : 0x20, # MAC address
"supported_flags" : 0x40, # apply supported flags
"link_speed" : 0x80, # Link speed
"mtu" : 0x100, # MTU
"tx_queue_length" : 0x200, # TX Queue Length
"promisc_mode" : 0x400, # PROMISC mode
"interal_use_1" : 0x800, # (INTERNAL USE)
"alias" : 0x1000, # Port alias
"rx_all" : 0x2000, # Rx-ALL
"dhcp" : 0x4000, # including client-id.
"rpt_timer" : 0x8000, # Report Timer
"bridge" : 0x10000, # BRIDGE
"ipv6_addrs" : 0x20000, # IPv6 Address
"bypass" : 0x40000, # Bypass
"gen_offload" : 0x80000, # Generic offload flags, everything but LRO
"cpu_mask" : 0x100000, # CPU Mask, useful for pinning process to CPU core
"lro_offload" : 0x200000, # LRO (Must be disabled when used in Wanlink,
# and probably in routers)
"sta_br_id" : 0x400000, # WiFi Bridge identifier. 0 means no bridging.
"ifdown" : 0x800000, # Down interface
"dhcpv6" : 0x1000000, # Use DHCPv6
"rxfcs" : 0x2000000, # RXFCS
"dhcp_rls" : 0x4000000, # DHCP release
"svc_httpd" : 0x8000000, # Enable/disable HTTP Service for a port
"svc_ftpd" : 0x10000000, # Enable/disable FTP Service for a port
"aux_mgt" : 0x20000000, # Enable/disable Auxillary-Management for a port
"no_dhcp_conn" : 0x40000000, # Enable/disable NO-DHCP-ON-CONNECT flag for a port
"no_apply_dhcp" : 0x80000000, # Enable/disable NO-APPLY-DHCP flag for a port
"skip_ifup_roam" : 0x100000000, # Enable/disable SKIP-IFUP-ON-ROAM flag for a port
}
#

View File

@@ -1,5 +0,0 @@
from .LANforge import LFUtils
from .LANforge import LFRequest
from .LANforge import lfcli_base
from .LANforge.lfcli_base import LFCliBase

View File

@@ -1,15 +1,8 @@
#!/usr/bin/env python3
import re
import time
import pprint
import csv
import datetime
import random
import string
import pprint
from pprint import pprint
#from LANforge.lfcriteria import LFCriteria
class BaseProfile:
def __init__(self, local_realm, debug=False):
@@ -126,4 +119,4 @@ class BaseProfile:
# else:
# compared_rept=args.compared_report

View File

@@ -4,208 +4,235 @@
# Updated by: Erin Grimes
import sys
import urllib
import importlib
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
import time
exit(1)
import os
from time import sleep
from urllib import error
import pprint
import LANforge
from LANforge import LFRequest
from LANforge import LFUtils
from LANforge.LFUtils import NA
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
j_printer = pprint.PrettyPrinter(indent=2)
# todo: this needs to change
resource_id = 1
def main(base_url="http://localhost:8080", args={}):
json_post = ""
json_response = ""
num_wanlinks = -1
# see if there are old wanlinks to remove
lf_r = LFRequest.LFRequest(base_url+"/wl/list")
print(lf_r.get_as_json())
# ports to set as endpoints
port_a ="rd0a"
port_b ="rd1a"
try:
json_response = lf_r.getAsJson()
LFUtils.debug_printer.pprint(json_response)
for key,value in json_response.items():
if (isinstance(value, dict) and "_links" in value):
num_wanlinks = 1
except urllib.error.HTTPError as error:
num_wanlinks = 0;
def main(base_url, args={}):
print(base_url)
json_post = ""
json_response = ""
num_wanlinks = -1
# remove old wanlinks
if (num_wanlinks > 0):
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_cx")
lf_r.addPostData({
# see if there are old wanlinks to remove
lf_r = LFRequest.LFRequest(base_url+"/wl/list")
print(lf_r.get_as_json())
# remove old wanlinks
if (num_wanlinks > 0):
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_cx")
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': 'wl_eg1'
})
lf_r.jsonPost()
sleep(0.05)
'cx_name': args['name']
})
lf_r.jsonPost()
sleep(0.05)
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp")
lf_r.addPostData({
'endp_name': 'wl_eg1-A'
})
lf_r.jsonPost()
sleep(0.05)
try:
json_response = lf_r.getAsJson()
LFUtils.debug_printer.pprint(json_response)
for key, value in json_response.items():
if (isinstance(value, dict) and "_links" in value):
num_wanlinks = 1
except urllib.error.HTTPError as error:
print("Error code "+error.code)
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp")
lf_r.addPostData({
'endp_name': 'wl_eg1-B'
})
lf_r.jsonPost()
sleep(0.05)
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp")
lf_r.addPostData({
'endp_name': args['name']+"-A"
})
lf_r.jsonPost()
sleep(0.05)
# create wanlink 1a
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp")
lf_r.addPostData({
'alias': 'wl_eg1-A',
'shelf': 1,
'resource': '1',
'port': port_a,
'latency': args['latency_A'],
'max_rate': args['rate_A']
})
lf_r.jsonPost()
sleep(0.05)
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp")
lf_r.addPostData({
'endp_name': args['name']+"-B"
})
lf_r.jsonPost()
sleep(0.05)
# create wanlink 1b
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp")
lf_r.addPostData({
'alias': 'wl_eg1-B',
'shelf': 1,
'resource': '1',
'port': port_b,
'latency': args['latency_B'],
'max_rate': args['rate_B']
})
lf_r.jsonPost()
sleep(0.05)
# create wanlink endpoint A
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp")
lf_r.addPostData({
'alias': args['name']+"-A",
'shelf': 1,
'resource': '1',
'port': args['port_A'],
'latency': args['latency_A'],
'max_rate': args['rate_A'],
})
lf_r.jsonPost()
sleep(0.05)
# create cx
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_cx")
lf_r.addPostData({
'alias': 'wl_eg1',
'test_mgr': 'default_tm',
'tx_endp': 'wl_eg1-A',
'rx_endp': 'wl_eg1-B',
})
lf_r.jsonPost()
sleep(0.05)
# create wanlink endpoint B
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp")
lf_r.addPostData({
'alias': args['name']+"-B",
'shelf': 1,
'resource': '1',
'port': args['port_B'],
'latency': args['latency_B'],
'max_rate': args['rate_B'],
})
lf_r.jsonPost()
sleep(0.05)
# start wanlink once we see it
seen = 0
while (seen < 1):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1?fields=name,state,_links")
try:
json_response = lf_r.getAsJson()
if (json_response is None):
# create cx
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_cx")
lf_r.addPostData({
'alias': args['name'],
'test_mgr': 'default_tm',
'tx_endp': args['name']+"-A",
'rx_endp': args['name']+"-B",
})
lf_r.jsonPost()
sleep(0.05)
# modify wanlink endpoint A
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_wanlink_info")
lf_r.addPostData({
'name': args['name']+"-A",
'max_jitter': args['jitter_A'],
'jitter_freq': args['jitter_freq_A'],
'drop_freq': args['drop_A']
})
lf_r.jsonPost()
sleep(0.05)
# modify wanlink endpoint B
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_wanlink_info")
lf_r.addPostData({
'name': args['name']+"-B",
'max_jitter': args['jitter_B'],
'jitter_freq': args['jitter_freq_B'],
'drop_freq': args['drop_B']
})
lf_r.jsonPost()
sleep(0.05)
# start wanlink once we see it
seen = 0
while (seen < 1):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,state,_links")
try:
json_response = lf_r.getAsJson()
if (json_response is None):
continue
LFUtils.debug_printer.pprint(json_response)
for key, value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == args['name']):
seen = 1
else:
pass
# else:
# print(" name was not wl_eg1")
# else:
# print("value lacks _links")
# else:
# print("value not a dict")
except urllib.error.HTTPError as error:
print("Error code "+error.code)
continue
LFUtils.debug_printer.pprint(json_response)
for key,value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == "wl_eg1"):
seen = 1
#else:
# print(" name was not wl_eg1")
#else:
# print("value lacks _links")
#else:
# print("value not a dict")
except urllib.error.HTTPError as error:
print("Error code "+error.code)
continue
print("starting wanlink:")
# print("the latency is {laten}".format(laten=latency))
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': args['name'],
'cx_state': 'RUNNING'
})
lf_r.jsonPost()
print("starting wanlink:")
# print("the latency is {laten}".format(laten=latency))
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': 'wl_eg1',
'cx_state': 'RUNNING'
})
lf_r.jsonPost()
running = 0
while (running < 1):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,state,_links")
try:
json_response = lf_r.getAsJson()
if (json_response is None):
continue
for key, value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == args['name']):
if (value["state"].startswith("Run")):
LFUtils.debug_printer.pprint(json_response)
running = 1
running = 0
while (running < 1):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1?fields=name,state,_links")
try:
json_response = lf_r.getAsJson()
if (json_response is None):
except urllib.error.HTTPError as error:
print("Error code "+error.code)
continue
for key,value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == "wl_eg1"):
if (value["state"].startswith("Run")):
LFUtils.debug_printer.pprint(json_response)
running = 1
except urllib.error.HTTPError as error:
print("Error code "+error.code)
continue
print("Wanlink is running")
print("Wanlink is running")
# stop wanlink
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': args['name'],
'cx_state': 'STOPPED'
})
lf_r.jsonPost()
running = 1
while (running > 0):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,eid,state,_links")
LFUtils.debug_printer.pprint(json_response)
try:
json_response = lf_r.getAsJson()
if (json_response is None):
continue
for key, value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == args['name']):
if (value["state"].startswith("Stop")):
LFUtils.debug_printer.pprint(json_response)
running = 0
# stop wanlink
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': 'wl_eg1',
'cx_state': 'STOPPED'
})
lf_r.jsonPost()
running = 1
while (running > 0):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1?fields=name,eid,state,_links")
LFUtils.debug_printer.pprint(json_response)
try:
json_response = lf_r.getAsJson()
if (json_response is None):
except urllib.error.HTTPError as error:
print("Error code "+error.code)
continue
for key,value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == "wl_eg1"):
if (value["state"].startswith("Stop")):
LFUtils.debug_printer.pprint(json_response)
running = 0
except urllib.error.HTTPError as error:
print("Error code "+error.code)
continue
print("Wanlink is stopped.")
print("Wanlink is stopped.")
# print("Wanlink info:")
# lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# print("Wanlink info:")
# lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-A")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-A")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-B")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-B")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if __name__ == '__main__':
main()

View File

@@ -1,8 +1,19 @@
import time
# !/usr/bin/env python3
# ---- ---- ---- ---- LANforge Base Imports ---- ---- ---- ----
from LANforge.lfcli_base import LFCliBase
import sys
import os
import importlib
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
add_dut = importlib.import_module("py-json.LANforge.add_dut")
add_dut_flags = add_dut.add_dut_flags
class cv_dut(LFCliBase):
@@ -13,6 +24,8 @@ class cv_dut(LFCliBase):
hw_version="NA",
serial_num="NA",
model_num="NA",
desired_dut_flags=None,
desired_dut_flags_mask=None
):
super().__init__(_lfjson_host=lfclient_host,
_lfjson_port=lfclient_port)
@@ -27,6 +40,30 @@ class cv_dut(LFCliBase):
self.lan_port = "[BLANK]"
self.api_id = "0"
self.flags_mask = "NA"
if desired_dut_flags is not None:
self.dut_flags = desired_dut_flags
self.dut_flags_mask = desired_dut_flags_mask
def add_named_flags(self, desired_list, command_ref):
if desired_list is None:
raise ValueError("addNamedFlags wants a list of desired flag names")
if len(desired_list) < 1:
print("addNamedFlags: empty desired list")
return 0
if (command_ref is None) or (len(command_ref) < 1):
raise ValueError("addNamedFlags wants a maps of flag values")
result = 0
for name in desired_list:
if (name is None) or (name == ""):
continue
if name not in command_ref:
if self.debug:
print(command_ref)
raise ValueError("flag %s not in map" % name)
result += command_ref[name]
return result
def create_dut(self,
ssid1="[BLANK]",
@@ -43,6 +80,11 @@ class cv_dut(LFCliBase):
top_left_x="NA",
top_left_y="NA",
):
try:
self.flags = self.add_named_flags(self.dut_flags, add_dut_flags)
self.flags_mask = self.add_named_flags(self.dut_flags_mask, add_dut_flags)
except:
pass
response_json = []
req_url = "/cli-json/add_dut"
data = {

View File

@@ -3,16 +3,30 @@ Note: This script is working as library for chamberview tests.
It holds different commands to automate test.
"""
import sys
import os
import importlib
import time
from LANforge.lfcli_base import LFCliBase
from realm import Realm
import json
from pprint import pprint
import argparse
from cv_test_reports import lanforge_reports as lf_rpt
from csv_to_influx import *
import os.path
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
cv_test_reports = importlib.import_module("py-json.cv_test_reports")
lf_rpt = cv_test_reports.lanforge_reports
InfluxRequest = importlib.import_module("py-dashboard.InfluxRequest")
influx_add_parser_args = InfluxRequest.influx_add_parser_args
RecordInflux = InfluxRequest.RecordInflux
def cv_base_adjust_parser(args):
@@ -412,20 +426,17 @@ class cv_test(Realm):
_influx_bucket=args.influx_bucket)
# lf_wifi_capacity_test.py may be run / initiated by a remote system against a lanforge
# the local_lf_report_dir is data is stored, if there is no local_lf_report_dir then the test is run directly on lanforge
# the local_lf_report_dir is where data is stored, if there is no local_lf_report_dir then the test is run directly on lanforge
if self.local_lf_report_dir == "":
path = "%s/kpi.csv" % (self.lf_report_dir)
csv_path = "%s/kpi.csv" % (self.lf_report_dir)
else:
kpi_location = self.local_lf_report_dir + "/" + os.path.basename(self.lf_report_dir)
# the local_lf_report_dir is the parent directory, need to get the directory name
path = "%s/kpi.csv" % (kpi_location)
csv_path = "%s/kpi.csv" % (kpi_location)
print("Attempt to submit kpi: ", path)
csvtoinflux = CSVtoInflux(influxdb=influxdb,
target_csv=path,
_influx_tag=args.influx_tag)
print("Attempt to submit kpi: ", csv_path)
print("Posting to influx...\n")
csvtoinflux.post_to_influx()
influxdb.csv_to_influx(csv_path)
print("All done posting to influx.\n")

View File

@@ -3,8 +3,15 @@
Library to Run Dataplane Test: Using lf_cv_base class
"""
import sys
import os
import importlib
from lf_cv_base import ChamberViewBase
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lf_cv_base = importlib.import_module("py-json.lf_cv_base")
ChamberViewBase = lf_cv_base.ChamberViewBase
class DataPlaneTest(ChamberViewBase):

View File

@@ -1,12 +1,17 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
from LANforge import add_dut
import pprint
import sys
import os
import importlib
from pprint import pprint
import time
import base64
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
add_dut = importlib.import_module("py-json.LANforge.add_dut")
class DUTProfile(LFCliBase):
def __init__(self, lfclient_host, lfclient_port, local_realm, debug_=False):

View File

@@ -1,15 +1,19 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
import pprint
from pprint import pprint
import time
# Class: FIOEndpProfile(LFCliBase)
#
# Class: FIOEndpProfile(LFCliBase)
# Written by Candela Technologies Inc.
# Updated by:
#
import sys
import os
import importlib
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
class FIOEndpProfile(LFCliBase):
"""
@@ -187,4 +191,4 @@ class FIOEndpProfile(LFCliBase):
url = "/cli-json/add_cx"
self.local_realm.json_post(url, cx_data, debug_=debug_,
suppress_related_commands_=suppress_related_commands_)
time.sleep(sleep_time)
time.sleep(sleep_time)

View File

@@ -1,16 +1,22 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
import pprint
import sys
import os
import importlib
from pprint import pprint
from LANforge.lfcli_base import LFCliBase
import csv
import pandas as pd
import time
import datetime
import json
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
pandas_extensions = importlib.import_module("py-json.LANforge.pandas_extensions")
class GenCXProfile(LFCliBase):
def __init__(self, lfclient_host, lfclient_port, local_realm, debug_=False):
super().__init__(lfclient_host, lfclient_port, debug_)
@@ -389,9 +395,9 @@ class GenCXProfile(LFCliBase):
def choose_iperf3_command(self):
gen_results = self.json_get("generic/list?fields=name,last+results", debug_=self.debug)
if gen_results['endpoints'] is not None:
pprint.pprint(gen_results['endpoints'])
pprint(gen_results['endpoints'])
#for name in gen_results['endpoints']:
# pprint.pprint(name.items)
# pprint(name.items)
#for k,v in name.items():
exit(1)
@@ -591,12 +597,12 @@ class GenCXProfile(LFCliBase):
# comparison to last report / report inputted
if compared_report is not None:
compared_df = self.compare_two_df(dataframe_one=self.file_to_df(report_file),
dataframe_two=self.file_to_df(compared_report))
compared_df = pandas_extensions.compare_two_df(dataframe_one=pandas_extensions.file_to_df(report_file),
dataframe_two=pandas_extensions.file_to_df(compared_report))
exit(1)
# append compared df to created one
if output_format.lower() != 'csv':
self.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
else:
if output_format.lower() != 'csv':
self.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)

View File

@@ -1,11 +1,18 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
import port_utils
from port_utils import PortUtils
from pprint import pprint
import pprint
import sys
import os
import importlib
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
port_utils = importlib.import_module("py-json.port_utils")
PortUtils = port_utils.PortUtils
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
class HTTPProfile(LFCliBase):
def __init__(self, lfclient_host, lfclient_port, local_realm, debug_=False):
super().__init__(lfclient_host, lfclient_port, debug_)

View File

@@ -1,12 +1,20 @@
# !/usr/bin/env python3
import pprint
import sys
import os
import importlib
from pprint import pprint
from LANforge.lfcli_base import LFCliBase
import csv
import pandas as pd
import time
import datetime
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
pandas_extensions = importlib.import_module("py-json.LANforge.pandas_extensions")
class L3CXProfile(LFCliBase):
def __init__(self,
@@ -280,15 +288,15 @@ class L3CXProfile(LFCliBase):
# comparison to last report / report inputted
if compared_report is not None:
compared_df = self.compare_two_df(dataframe_one=self.file_to_df(report_file),
dataframe_two=self.file_to_df(compared_report))
compared_df = pandas_extensions.compare_two_df(dataframe_one=pandas_extensions.file_to_df(report_file),
dataframe_two=pandas_extensions.file_to_df(compared_report))
exit(1)
# append compared df to created one
if output_format.lower() != 'csv':
self.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
else:
if output_format.lower() != 'csv':
self.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
def refresh_cx(self):
for cx_name in self.created_cx.keys():

View File

@@ -1,19 +1,19 @@
#!/usr/bin/env python3
import re
import time
import pprint
from lfdata import LFDataCollection
from base_profile import BaseProfile
import sys
import os
import datetime
import base64
import importlib
import csv
from pprint import pprint
import time
import random
import string
import datetime
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfdata = importlib.import_module("py-json.lfdata")
LFDataCollection = lfdata.LFDataCollection
base_profile = importlib.import_module("py-json.base_profile")
BaseProfile = base_profile.BaseProfile
class L3CXProfile2(BaseProfile):
@@ -668,7 +668,7 @@ class L3CXProfile2(BaseProfile):
end_time = start_time + datetime.timedelta(seconds=duration_sec)
#create lf data object
lf_data_collection= LFDataCollection(local_realm=self.local_realm,debug=self.debug)
lf_data_collection = LFDataCollection(local_realm=self.local_realm,debug=self.debug)
while datetime.datetime.now() < end_time:
csvwriter.writerow(lf_data_collection.monitor_interval(start_time_=start_time,sta_list_=sta_list_edit, created_cx_=created_cx, layer3_fields_=layer3_fields,port_mgr_fields_=",".join(port_mgr_cols)))
time.sleep(monitor_interval_ms)

View File

@@ -1,15 +1,19 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
import pprint
from pprint import pprint
import sys
import os
import importlib
import requests
import pandas as pd
import time
import datetime
import ast
import csv
import os
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
class L4CXProfile(LFCliBase):
def __init__(self, lfclient_host, lfclient_port, local_realm, debug_=False):

View File

@@ -1,9 +1,17 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
from LANforge import LFRequest
from LANforge import LFUtils
import sys
import os
import importlib
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
class ATTENUATORProfile(LFCliBase):
def __init__(self, lfclient_host, lfclient_port, local_realm, debug_=False):

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python3
"""
Base Class to be used for Chamber View Tests
@@ -8,10 +7,16 @@ Methods:
2.) Remove a CV Profile
3.) Add a DUT
4.) Show a CV Profile
"""
import sys
import os
import importlib
from LANforge.lfcli_base import LFCliBase
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
class ChamberViewBase(LFCliBase):

View File

@@ -1,16 +1,5 @@
#!/usr/bin/env python3
import re
import time
import pprint
from pprint import pprint
import os
import datetime
import base64
import xlsxwriter
import pandas as pd
import requests
import ast
import csv
# LFData class actions:

View File

@@ -1,15 +1,18 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
from LANforge import LFRequest
from LANforge import LFUtils
from LANforge import set_port
import pprint
import sys
import os
import importlib
from pprint import pprint
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
set_port = importlib.import_module("py-json.LANforge.set_port")
class MACVLANProfile(LFCliBase):

View File

@@ -1,9 +1,15 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
import pprint
import sys
import os
import importlib
from pprint import pprint
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
class MULTICASTProfile(LFCliBase):
def __init__(self, lfclient_host, lfclient_port, local_realm,

View File

@@ -1,15 +1,17 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
from LANforge import LFRequest
from LANforge import LFUtils
from LANforge import set_port
import pprint
import sys
import os
import importlib
from pprint import pprint
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
class QVLANProfile(LFCliBase):

View File

@@ -1,44 +1,66 @@
#!/usr/bin/env python3
# The Realm Class is inherited by most python tests. Realm Class inherites from LFCliBase.
# The Realm Class contains the configurable components for LANforge,
# The Realm Class contains the configurable components for LANforge,
# For example L3 / L4 cross connects, stations. Also contains helper methods
# http://www.candelatech.com/cookbook.php?vol=cli&book=Python_Create_Test_Scripts_With_the_Realm_Class
#
# Written by Candela Technologies Inc.
# Updated by:
#
# ---- ---- ---- ---- LANforge Base Imports ---- ---- ---- ----
from LANforge import LFRequest
from LANforge import LFUtils
from LANforge.lfcli_base import LFCliBase
# ---- ---- ---- ---- Profile Imports ---- ---- ---- ----
from l3_cxprofile import L3CXProfile
from l3_cxprofile2 import L3CXProfile2
from l4_cxprofile import L4CXProfile
from lf_attenmod import ATTENUATORProfile
from multicast_profile import MULTICASTProfile
from http_profile import HTTPProfile
from station_profile import StationProfile
from fio_endp_profile import FIOEndpProfile
from test_group_profile import TestGroupProfile
from dut_profile import DUTProfile
from vap_profile import VAPProfile
from mac_vlan_profile import MACVLANProfile
from wifi_monitor_profile import WifiMonitor
from gen_cxprofile import GenCXProfile
from qvlan_profile import QVLANProfile
from port_utils import PortUtils
from lfdata import LFDataCollection
# ---- ---- ---- ---- Other Imports ---- ---- ---- ----
import sys
import os
import importlib
import re
import time
import pprint
from pprint import pprint
# ---- ---- ---- ---- LANforge Base Imports ---- ---- ---- ----
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LANforge = importlib.import_module("py-json.LANforge")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
# ---- ---- ---- ---- Profile Imports ---- ---- ---- ----
l3_cxprofile = importlib.import_module("py-json.l3_cxprofile")
L3CXProfile = l3_cxprofile.L3CXProfile
l3_cxprofile2 = importlib.import_module("py-json.l3_cxprofile2")
L3CXProfile2 = l3_cxprofile2.L3CXProfile2
l4_cxprofile = importlib.import_module("py-json.l4_cxprofile")
L4CXProfile = l4_cxprofile.L4CXProfile
lf_attenmod = importlib.import_module("py-json.lf_attenmod")
ATTENUATORProfile = lf_attenmod.ATTENUATORProfile
multicast_profile = importlib.import_module("py-json.multicast_profile")
MULTICASTProfile = multicast_profile.MULTICASTProfile
http_profile = importlib.import_module("py-json.http_profile")
HTTPProfile = http_profile.HTTPProfile
station_profile = importlib.import_module("py-json.station_profile")
StationProfile = station_profile.StationProfile
fio_endp_profile = importlib.import_module("py-json.fio_endp_profile")
FIOEndpProfile = fio_endp_profile.FIOEndpProfile
test_group_profile = importlib.import_module("py-json.test_group_profile")
TestGroupProfile = test_group_profile.TestGroupProfile
dut_profile = importlib.import_module("py-json.dut_profile")
DUTProfile = dut_profile.DUTProfile
vap_profile = importlib.import_module("py-json.vap_profile")
VAPProfile = vap_profile.VAPProfile
mac_vlan_profile = importlib.import_module("py-json.mac_vlan_profile")
MACVLANProfile = mac_vlan_profile.MACVLANProfile
wifi_monitor_profile = importlib.import_module("py-json.wifi_monitor_profile")
WifiMonitor = wifi_monitor_profile.WifiMonitor
gen_cxprofile = importlib.import_module("py-json.gen_cxprofile")
GenCXProfile = gen_cxprofile.GenCXProfile
qvlan_profile = importlib.import_module("py-json.qvlan_profile")
QVLANProfile = qvlan_profile.QVLANProfile
port_utils = importlib.import_module("py-json.port_utils")
PortUtils = port_utils.PortUtils
lfdata = importlib.import_module("py-json.lfdata")
LFDataCollection = lfdata.LFDataCollection
def wpa_ent_list():
return [
@@ -844,15 +866,13 @@ class Realm(LFCliBase):
def new_l3_cx_profile(self, ver=1):
if ver == 1:
import l3_cxprofile
cx_prof = l3_cxprofile.L3CXProfile(self.lfclient_host,
cx_prof = L3CXProfile(self.lfclient_host,
self.lfclient_port,
local_realm=self,
debug_=self.debug,
report_timer_=3000)
elif ver == 2:
import l3_cxprofile2
cx_prof = l3_cxprofile2.L3CXProfile2(self.lfclient_host,
cx_prof = L3CXProfile2(self.lfclient_host,
self.lfclient_port,
local_realm=self,
debug_=self.debug,
@@ -860,7 +880,7 @@ class Realm(LFCliBase):
return cx_prof
def new_l4_cx_profile(self, ver=1):
if ver == 1 :
if ver == 1:
cx_prof = L4CXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
#elif ver == 2:
# import l4_cxprofile2

View File

@@ -1,11 +1,17 @@
#!/usr/bin/env python3
import pprint
import sys
import os
import importlib
import time
from pprint import pprint
import realm
from realm import Realm
import LANforge
from LANforge import LFUtils
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
localrealm = Realm("localhost", 8080, True)
@@ -130,4 +136,4 @@ except Exception as x:
pprint(x)
exit(1)
#
exit(0)
exit(0)

View File

@@ -4,13 +4,18 @@
# the syntax of the request is /port/<shelf=1>/<resource=1>/<list|all|portid> -
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
import sys
import os
import importlib
import pprint
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
import json
import pprint
from LANforge import LFRequest
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
def main():

View File

@@ -1,25 +1,38 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
from LANforge import LFRequest
from LANforge import LFUtils
from LANforge import set_port
from LANforge import add_sta
import pprint
# !/usr/bin/env python3
import sys
import os
import importlib
from pprint import pprint
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
set_port = importlib.import_module("py-json.LANforge.set_port")
add_sta = importlib.import_module("py-json.LANforge.add_sta")
# Uncomment below to include autogen library.
# if os.environ.get("LF_USE_AUTOGEN") == 1:
# lf_json_autogen = importlib.import_module("py-json.LANforge.lf_json_autogen")
# LFJsonPost = jf_json_autogen.LFJsonPost
# use the station profile to set the combination of features you want on your stations
# once this combination is configured, build the stations with the build(resource, radio, number) call
# build() calls will fail if the station already exists. Please survey and clean your resource
# before calling build()
# survey = Realm.findStations(resource=1)
# Realm.removeStations(survey)
# profile = Realm.newStationProfile()
# profile.set...
# profile.build(resource, radio, 64)
#
# realm = importlib.import_module("py-json.realm")
# Realm = realm.Realm
# survey = Realm.findStations(resource=1)
# Realm.removeStations(survey)
# profile = Realm.newStationProfile()
# profile.set...
# profile.build(resource, radio, 64)
class StationProfile:
def __init__(self, lfclient_url, local_realm,
ssid="NA",
@@ -60,6 +73,7 @@ class StationProfile:
"mode": 0,
"mac": "xx:xx:xx:xx:*:xx",
"flags": 0, # (0x400 + 0x20000 + 0x1000000000) # create admin down
"flags_mask": 0
}
self.desired_set_port_cmd_flags = []
self.desired_set_port_current_flags = ["if_down"]
@@ -88,6 +102,20 @@ class StationProfile:
"realm": None,
"domain": None
}
self.wifi_txo_data_modified = False
self.wifi_txo_data = {
"shelf": 1,
"resource": 1,
"port": None,
"txo_enable": None,
"txo_txpower": None,
"txo_pream": None,
"txo_mcs": None,
"txo_nss": None,
"txo_bw": None,
"txo_retries": None,
"txo_sgi": None
}
self.reset_port_extra_data = {
"shelf": 1,
@@ -102,6 +130,24 @@ class StationProfile:
"seconds_till_reset": 0
}
def set_wifi_txo(self, txo_ena=1,
tx_power=255,
pream=0,
mcs=0,
nss=0,
bw=0,
retries=1,
sgi=0):
self.wifi_txo_data_modified = True
self.wifi_txo_data["txo_enable"] = txo_ena
self.wifi_txo_data["txo_txpower"] = tx_power
self.wifi_txo_data["txo_pream"] = pream
self.wifi_txo_data["txo_mcs"] = mcs
self.wifi_txo_data["txo_nss"] = nss
self.wifi_txo_data["txo_bw"] = bw
self.wifi_txo_data["txo_retries"] = retries
self.wifi_txo_data["txo_sgi"] = sgi
def set_wifi_extra(self, key_mgmt="WPA-EAP",
pairwise="CCMP TKIP",
group="CCMP TKIP",
@@ -193,7 +239,7 @@ class StationProfile:
self.set_command_param("add_sta", "ieee80211w", 2)
# self.add_sta_data["key"] = passwd
def station_mode_to_number(self,mode):
def station_mode_to_number(self, mode):
modes = ['a', 'b', 'g', 'abg', 'an', 'abgn', 'bgn', 'bg', 'abgn-AC', 'bgn-AC', 'an-AC']
return modes.index(mode) + 1
@@ -383,6 +429,8 @@ class StationProfile:
set_port.set_port_interest_flags)
self.wifi_extra_data["resource"] = radio_resource
self.wifi_extra_data["shelf"] = radio_shelf
self.wifi_txo_data["resource"] = radio_resource
self.wifi_txo_data["shelf"] = radio_shelf
self.reset_port_extra_data["resource"] = radio_resource
self.reset_port_extra_data["shelf"] = radio_shelf
@@ -391,6 +439,7 @@ class StationProfile:
add_sta_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/add_sta", debug_=debug)
set_port_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_port", debug_=debug)
wifi_extra_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_wifi_extra", debug_=debug)
wifi_txo_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_wifi_txo", debug_=debug)
my_sta_names = []
# add radio here
if (num_stations > 0) and (len(sta_names_) < 1):
@@ -466,9 +515,14 @@ class StationProfile:
self.wifi_extra_data["resource"] = radio_resource
self.wifi_extra_data["port"] = name
self.wifi_txo_data["resource"] = radio_resource
self.wifi_txo_data["port"] = name
if self.wifi_extra_data_modified:
wifi_extra_r.addPostData(self.wifi_extra_data)
json_response = wifi_extra_r.jsonPost(debug)
if self.wifi_txo_data_modified:
wifi_txo_r.addPostData(self.wifi_txo_data)
json_response = wifi_txo_r.jsonPost(debug)
# append created stations to self.station_names
self.station_names.append("%s.%s.%s" % (radio_shelf, radio_resource, name))
@@ -491,4 +545,24 @@ class StationProfile:
if self.debug:
print("created %s stations" % num)
#
def modify(self, radio):
for station in self.station_names:
print(station)
self.add_sta_data["flags"] = self.add_named_flags(self.desired_add_sta_flags, add_sta.add_sta_flags)
self.add_sta_data["flags_mask"] = self.add_named_flags(self.desired_add_sta_flags_mask,
add_sta.add_sta_flags)
self.add_sta_data["radio"] = radio
self.add_sta_data["sta_name"] = station
self.add_sta_data["ssid"] = 'NA'
self.add_sta_data["key"] = 'NA'
self.add_sta_data['mac'] = 'NA'
self.add_sta_data['mode'] = 'NA'
self.add_sta_data['suppress_preexec_cli'] = 'NA'
self.add_sta_data['suppress_preexec_method'] = 'NA'
add_sta_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/add_sta")
if self.debug:
print(self.lfclient_url + "/cli_json/add_sta")
print(self.add_sta_data)
add_sta_r.addPostData(self.add_sta_data)
json_response = add_sta_r.jsonPost(self.debug)

View File

@@ -1,6 +1,4 @@
#!/usr/bin/env python3
from lfdata import LFDataCollection
#import lfreporting
class TestBase:
def __init__(self):

View File

@@ -1,8 +1,13 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
import pprint
from pprint import pprint
import time
import sys
import os
import importlib
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
class TestGroupProfile(LFCliBase):

View File

@@ -2,9 +2,15 @@
""" ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
internal test driving LFUtils.expand_endp_histogram
----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- """
import LANforge
from LANforge import LFUtils
import pprint
import sys
import os
import importlib
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
distrib_load = {
"histo_category_width" : 3,

View File

@@ -1,33 +1,28 @@
"""
Candela Technologies Inc.
Info : Standard Script for Webconsole Test Utility
Date :
"""
import sys
from pprint import pprint
from uuid import uuid1
if 'py-json' not in sys.path:
sys.path.append('../py-json')
from LANforge import LFUtils
from LANforge import lfcli_base
from LANforge.lfcli_base import LFCliBase
from LANforge.LFUtils import *
import realm
from realm import PortUtils
import argparse
import datetime
import os
import importlib
import time
import matplotlib.pyplot as plt
import threading
import re
import json
import os
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
realm = importlib.import_module("py-json.realm")
PortUtils = realm.PortUtils
webconsole_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.getcwd())))
updates_path = webconsole_dir + "/web_json/updates.js"

View File

@@ -1,14 +1,20 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
from LANforge import LFRequest
from LANforge import add_vap
from LANforge import set_port
from LANforge import LFUtils
import pprint
import sys
import os
import importlib
from pprint import pprint
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
add_vap = importlib.import_module("py-json.LANforge.add_vap")
set_port = importlib.import_module("py-json.LANforge.set_port")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
class VAPProfile(LFCliBase):
def __init__(self, lfclient_host, lfclient_port, local_realm,

View File

@@ -1,11 +1,18 @@
#!/usr/bin/env python3
import sys
import os
import importlib
import time
from pprint import pprint
from random import randint
from geometry import Rect, Group
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
from LANforge import LFUtils
from base_profile import BaseProfile
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
base_profile = importlib.import_module("py-json.base_profile")
BaseProfile = base_profile.BaseProfile
class VRProfile(BaseProfile):
@@ -64,31 +71,6 @@ class VRProfile(BaseProfile):
occupied_area = self.get_occupied_area(resource=resource, debug=debug)
return Rect(x=0, y=0, height=occupied_area.height, width=occupied_area.width)
def get_all_vrcx_bounds(self, resource=None, debug=False):
"""
Computes bounds of all free vrcx ports but omits Virtual Routers
:param resource:
:param debug:
:return: rectangle encompasing all free vrcx ports or None
"""
if (resource is None) or (resource < 1):
raise ValueError("get_netsmith_bounds wants resource id")
vrcx_map = self.vrcx_list(resource=resource, debug=debug)
rect_list = []
for eid,item in vrcx_map.items():
rect_list.append(self.vr_to_rect(item))
if len(rect_list) < 1:
return None
bounding_group = Group()
for item in rect_list:
bounding_group.append(item)
bounding_group.update()
return Rect(x=bounding_group.x,
y=bounding_group.y,
width=bounding_group.width,
height=bounding_group.height)
def vr_eid_to_url(self, eid_str=None, debug=False):
debug |= self.debug
@@ -787,4 +769,4 @@ class VRProfile(BaseProfile):
return (new_x, new_y)
###
###
###
###

View File

@@ -1,14 +1,19 @@
#!/usr/bin/env python3
from LANforge.lfcli_base import LFCliBase
from LANforge import add_monitor
from LANforge.add_monitor import *
from LANforge import LFUtils
import pprint
from pprint import pprint
import sys
import os
import importlib
import time
from LANforge.set_wifi_radio import set_radio_mode
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
add_monitor = importlib.import_module("py-json.LANforge.add_monitor")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
set_wifi_radio = importlib.import_module("py-json.LANforge.set_wifi_radio")
set_radio_mode = set_wifi_radio.set_radio_mode
class WifiMonitor:

View File

@@ -1,3 +1,5 @@
#!/usr/bin/env python3
'''
Candela Technologies Inc.

View File

@@ -1,5 +1,5 @@
#!/usr/bin/python3
'''
"""
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# -
# Example of how to filter messages from the :8081 websocket -
@@ -7,18 +7,14 @@
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
You will need websocket-client:
apt install python3-websocket
'''
"""
import sys
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
import os
import importlib
import argparse
import json
import logging
import pprint
import traceback
import time
from time import sleep
import websocket
import re
@@ -26,7 +22,16 @@ try:
import thread
except ImportError:
import _thread as thread
from LANforge import LFUtils
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
cre={
"phy": re.compile(r'^(1\.\d+):\s+(\S+)\s+\(phy', re.I),
@@ -322,7 +327,7 @@ def m_error(wsock, err):
# ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
def m_open(wsock):
def run(*args):
time.sleep(0.1)
sleep(0.1)
# ping = json.loads();
wsock.send('{"text":"ping"}')
@@ -353,4 +358,4 @@ if __name__ == '__main__':
####
####
####
####

View File

@@ -36,12 +36,17 @@ table {
table, td, th {
border: 1px solid gray;
padding 4px;
background: white;
}
table.noborder, table.noborder td, table.noborder th {
border: 0 none;
}
tr {
background: white;
}
td {
background: white;
padding: 6px;
}
td.ar {
text-align: right;
@@ -50,6 +55,9 @@ th {
color: rgb(42,91,41);
text-align: center;
}
td a, td a:visited {
color: #005500;
}
#lf_title {
text-align: center;
background-image: url(candela_swirl_small-72h.png);
@@ -192,7 +200,7 @@ li {
background-image:url("banner.png");
background-repeat:no-repeat;
padding: 0;
margin: 1em;
margin: 0 1em;
min-width: 1000px;
min-height: 205px;
width: 1000px;
@@ -270,7 +278,7 @@ div.Section img {
top: 50%;
transform: translateY(-50%);
}
div.FooterStyle {
footer.FooterStyle, div.FooterStyle {
width: 100%;
vertical-align: middle;
border: 0 none;
@@ -279,12 +287,12 @@ div.FooterStyle {
font-size: 12px;
margin-top: 2em;
}
div.FooterStyle img {
footer.FooterStyle img, div.FooterStyle img {
width: auto;
height: auto;
text-align: right;
}
div.FooterStyle span.Gradient {
footer.FooterStyle span.Gradient, div.FooterStyle span.Gradient {
background: white;
color: #2A5B29;
display: inline-block;
@@ -307,7 +315,7 @@ div.FooterStyle span.Gradient {
padding: 0;
vertical-align: middle;
}
div.FooterStyle a.LogoImgLink {
footer.FooterStyle a.LogoImgLink, div.FooterStyle a.LogoImgLink {
display: inline-block;
text-align: right;
float: right;
@@ -317,10 +325,52 @@ a .LogoImgLink {
a.LogoImgLink img {
}
.DateFont {
white-space: pre;
font-size: smaller;
}
.TimeFont {
white-space: pre;
}
table.dataframe {
margin: 1em;
padding: 0;
}
table.dataframe tr th {
padding: 0.5em;
}
.scriptdetails tt {
font-size: 10px;
overflow: auto;
font-family: Consolas,monaco,"Lucida Sans Typewriter","Lucida Typewriter","Courier New",monospace;
color: #777;
padding: 2px;
line-spacing: 1.05;
display: block;
margin: 0;
padding: 0;
width: inherit;
height: inherit;
background: inherit;
white-space: break-spaces;
}
.scriptdetails:hover tt {
background: #dfd;
cursor: pointer;
}
td.scriptdetails {
padding: 2px !important;
}
td.scriptdetails span.copybtn {
display: none;
}
td.scriptdetails:hover span.copybtn {
display: inline-block;
padding: 5px;
font-size: 12px;
float: left;
color: #050;
background: white;
}

View File

@@ -0,0 +1,57 @@
#!/usr/bin/env python3
import os
import pandas as pd
import argparse
def get_tag(x, tag):
try:
return x[tag]
except:
return False
def main():
parser = argparse.ArgumentParser(
prog="check_argparse.py",
formatter_class=argparse.RawTextHelpFormatter,
description='''
Check each file in py-scripts, or user defined '''
)
parser.add_argument("--path", default='.')
parser.add_argument("--output", default='argparse_results')
args = parser.parse_args()
files = [f for f in os.listdir(args.path) if '.py' in f]
results = dict()
for file in files:
text = open(os.path.join(args.path, file)).read()
results_file = dict()
results_file['argparse'] = 'argparse.' in text
if results_file['argparse'] is True:
results_file['create_basic'] = 'create_basic_argparse' in text
results_file['create_bare'] = 'create_bare_argparse' in text
results_file['prog'] = 'prog=' in text
results_file['formatter_class'] = 'formatter_class=' in text
results_file['description'] = 'description=' in text
results_file['epilog'] = 'epilog=' in text
results_file['usage'] = 'usage=' in text
results[file] = results_file
df = pd.DataFrame(results.items())
df.columns = ['File', 'results']
df['argparse'] = [x['argparse'] for x in df['results']]
for tag in ['create_basic',
'create_bare',
'prog',
'formatter_class',
'description',
'epilog',
'usage']:
df[tag] = [get_tag(x, tag) for x in df['results']]
df['details'] = df['description'] + df['epilog'] + df['usage']
df.to_csv(args.output + '.csv', index=False)
if __name__ == "__main__":
main()

View File

@@ -1,18 +1,17 @@
#
# import os
# import sys
# import base64
# import urllib.request
# from bs4 import BeautifulSoup
# import ssl
# import subprocess, os
# import subprocess
# from artifactory import ArtifactoryPath
# import tarfile
# import paramiko
# from paramiko import SSHClient
# from scp import SCPClient
# import os
# import pexpect
# from pexpect import pxssh
# import sys
# import paramiko
# from scp import SCPClient
# import pprint
@@ -23,31 +22,29 @@
# # For finding files
# # https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
# import glob
# external_results_dir=/var/tmp/lanforge
#
# local_dir=os.getenv('LOG_DIR')
# external_results_dir = / var / tmp / lanforge
#
# local_dir = os.getenv('LOG_DIR')
# print("Local Directory where all files will be copied and logged", local_dir)
# cicd_user=os.getenv('CICD_USER')
# cicd_user = os.getenv('CICD_USER')
# print("cicd_user = ", cicd_user)
# cicd_pw=os.getenv('CICD_PW')
# print("cicd pw =",cicd_pw)
# ap_pw=os.getenv('AP_PW')
# ap_user=os.getenv('AP_USER')
# tr_user=os.getenv('TR_USER')
# cicd_pw = os.getenv('CICD_PW')
# print("cicd pw =", cicd_pw)
# ap_pw = os.getenv('AP_PW')
# ap_user = os.getenv('AP_USER')
# tr_user = os.getenv('TR_USER')
# print("Testrail user id = ", tr_user)
# tr_pw=os.getenv('TR_PW')
# print ("Testrail password =", tr_pw)
# aws_host='3.96.56.0'
# aws_user='ubuntu'
#
#
#
# tr_pw = os.getenv('TR_PW')
# print("Testrail password =", tr_pw)
# aws_host = '3.96.56.0'
# aws_user = 'ubuntu'
#
# if sys.version_info[0] != 3:
# print("This script requires Python 3")
# exit(1)
# print("This script requires Python 3")
# exit(1)
# if 'py-json' not in sys.path:
# sys.path.append('../py-json')
# sys.path.append('../py-json')
#
# from LANforge.LFUtils import *
# # if you lack __init__.py in this directory you will not find sta_connect module#
@@ -60,20 +57,20 @@
# client.user = tr_user
# client.password = tr_pw
#
#
# print('Beginning file download with requests')
#
#
# class GetBuild:
# def __init__(self):
# self.user = cicd_user
# self.password = cicd_pw
# ssl._create_default_https_context = ssl._create_unverified_context
#
# def get_latest_image(self,url):
# def get_latest_image(self, url):
#
# auth = str(
# base64.b64encode(
# bytes('%s:%s' % (cicd_user,cicd_pw ), 'utf-8')
# bytes('%s:%s' % (cicd_user, cicd_pw), 'utf-8')
# ),
# 'ascii'
# ).strip()
@@ -86,21 +83,21 @@
# html = response.read()
# soup = BeautifulSoup(html, features="html.parser")
# last_link = soup.find_all('a', href=True)[-1]
# latest_file=last_link['href']
# latest_file = last_link['href']
#
# filepath = local_dir
# os.chdir(filepath)
# #file_url = url + latest_file
# # file_url = url + latest_file
#
# ''' Download the binary file from Jfrog'''
# path = ArtifactoryPath(url,auth=(cicd_user, cicd_pw))
# path = ArtifactoryPath(url, auth=(cicd_user, cicd_pw))
# path.touch()
# for file in path:
# print('File =', file)
#
# path = ArtifactoryPath(file, auth=(cicd_user, cicd_pw))
# print("file to be downloaded :" ,latest_file)
# print("File Path:",file)
# print("file to be downloaded :", latest_file)
# print("File Path:", file)
# with path.open() as des:
# with open(latest_file, "wb") as out:
# out.write(des.read())
@@ -111,9 +108,9 @@
# housing_tgz.close()
# return "pass"
# print("Extract the tar file, and copying the file to Linksys AP directory")
# #with open("/Users/syamadevi/Desktop/syama/ea8300/ap_sysupgrade_output.log", "a") as output:
# # subprocess.call("scp /Users/syamadevi/Desktop/syama/ea8300/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin root@192.100.1.1:/tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin",shell=True, stdout=output,
# # stderr=output)
# # with open("/Users/syamadevi/Desktop/syama/ea8300/ap_sysupgrade_output.log", "a") as output:
# # subprocess.call("scp /Users/syamadevi/Desktop/syama/ea8300/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin root@192.100.1.1:/tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin",shell=True, stdout=output,
# # stderr=output)
#
# print('SSH to Linksys and upgrade the file')
#
@@ -149,18 +146,22 @@
# '''
#
# def run_opensyncgw_in_docker(self):
# #user_password = 'fepv6nj9guCPeEHC'
# #my_env = os.environ.copy()
# #my_env["userpass"] = user_password
# #my_command = 'python --version'
# #subprocess.Popen('echo', env=my_env)
# with open(local_dir +"docker_jfrog_login.log", "a") as output:
# subprocess.call("docker login --username" + cicd_user + "--password" + cicd_pw + " https://tip-tip-wlan-cloud-docker-repo.jfrog.io", shell=True, stdout=output,
# stderr=output)
# with open(local_dir +"opensyncgw_upgrade.log", "a") as output:
# subprocess.call("docker pull tip-tip-wlan-cloud-docker-repo.jfrog.io/opensync-gateway-and-mqtt:0.0.1-SNAPSHOT", shell=True, stdout=output,
# stderr=output)
# with open(local_dir+"opensyncgw.log", "a") as output:
# # user_password = 'fepv6nj9guCPeEHC'
# # my_env = os.environ.copy()
# # my_env["userpass"] = user_password
# # my_command = 'python --version'
# # subprocess.Popen('echo', env=my_env)
# with open(local_dir + "docker_jfrog_login.log", "a") as output:
# subprocess.call(
# "docker login --username" + cicd_user + "--password" + cicd_pw + " https://tip-tip-wlan-cloud-docker-repo.jfrog.io",
# shell=True, stdout=output,
# stderr=output)
# with open(local_dir + "opensyncgw_upgrade.log", "a") as output:
# subprocess.call(
# "docker pull tip-tip-wlan-cloud-docker-repo.jfrog.io/opensync-gateway-and-mqtt:0.0.1-SNAPSHOT",
# shell=True, stdout=output,
# stderr=output)
# with open(local_dir + "opensyncgw.log", "a") as output:
# subprocess.call("docker run --rm -i -p 1883:1883 -p 6640:6640 -p 6643:6643 -p 4043:4043 \
# -v ~/mosquitto/data:/mosquitto/data \
# -v ~/mosquitto/log:/mosquitto/log \
@@ -168,7 +169,7 @@
# -v ~/app/log:/app/logs \
# -v ~//app/config:/app/config \
# -e OVSDB_CONFIG_FILE='/app/config/config_2_ssids.json' \
# tip-tip-wlan-cloud-docker-repo.jfrog.io/opensync-gateway-and-mqtt:0.0.1-SNAPSHOT",shell=True, stdout=output,
# tip-tip-wlan-cloud-docker-repo.jfrog.io/opensync-gateway-and-mqtt:0.0.1-SNAPSHOT", shell=True, stdout=output,
# stderr=output)
# print("opensync Gateway is running")
# return "pass"
@@ -207,7 +208,7 @@
#
# class openwrt_ap:
#
# def ap_upgrade(src,user2,host2,tgt,pwd,opts='', timeout=60):
# def ap_upgrade(src, user2, host2, tgt, pwd, opts='', timeout=60):
# ''' Performs the scp command. Transfers file(s) from local host to remote host '''
# print("AP Model getting upgarded is :", apModel)
# if apModel == "ecw5410":
@@ -218,18 +219,18 @@
# ap_firmware = 'openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin'
# AP_IP = '10.10.10.208'
# host2 = AP_IP
# src = src+ ap_firmware
# src = src + ap_firmware
# print("src =", src)
# print("AP IP ", AP_IP)
# print("AP USER =", ap_user)
# print("AP PASSWORD =", ap_pw)
# cmd = f'''/bin/bash -c "scp {opts} {src} {user2}@{AP_IP}:{tgt}"'''
# print("Executing the following cmd:",cmd,sep='\n')
# print("Executing the following cmd:", cmd, sep='\n')
#
# tmpFl = '/tmp/scp.log'
# fp = open(tmpFl,'wb')
# fp = open(tmpFl, 'wb')
# print(tmpFl)
# childP = pexpect.spawn(cmd,timeout=timeout)
# childP = pexpect.spawn(cmd, timeout=timeout)
# try:
# childP.sendline(cmd)
# childP.expect([f"{user2}@{host2}'s password:"])
@@ -238,7 +239,7 @@
# childP.expect(pexpect.EOF)
# childP.close()
# fp.close()
# fp = open(tmpFl,'r')
# fp = open(tmpFl, 'r')
# stdout = fp.read()
# fp.close()
#
@@ -253,26 +254,27 @@
# try:
# s = pxssh.pxssh()
# s.login(host2, user2, pwd)
# #s.sendline('sysupgrade /tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin&')
# # s.sendline('sysupgrade /tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin&')
# s.sendline('sysupgrade /tmp/openwrt-ipq806x-generic-edgecore_ecw5410-squashfs-nand-sysupgrade.bin&')
# #s.logout()
# #s.prompt() # match the prompt
# # s.logout()
# # s.prompt() # match the prompt
# print(s.before) # print everything before the prompt.
# sleep(100)
# #s.login(host2, user2, pwd)
# # s.login(host2, user2, pwd)
# s.prompt()
# #os.system(f"scp {local_dir}/cacert.pem root@10.10.10.207:/usr/plume/certs/ca.pem")
# #os.system(f"scp {local_dir}/clientcert.pem root@10.10.10.207:/usr/plume/certs/client.pem")
# #os.system(f"scp {local_dir}/clientkey_dec.pem root@10.10.10.207:/usr/plume/certs/client_dec.key")
# #s.sendline('service opensync restart')
# #s.prompt() # match the prompt
# #print(s.before) # print everything before the prompt.
# # os.system(f"scp {local_dir}/cacert.pem root@10.10.10.207:/usr/plume/certs/ca.pem")
# # os.system(f"scp {local_dir}/clientcert.pem root@10.10.10.207:/usr/plume/certs/client.pem")
# # os.system(f"scp {local_dir}/clientkey_dec.pem root@10.10.10.207:/usr/plume/certs/client_dec.key")
# # s.sendline('service opensync restart')
# # s.prompt() # match the prompt
# # print(s.before) # print everything before the prompt.
# s.logout()
# return "pass"
# except pxssh.ExceptionPxssh as e:
# print("ALERT !!!!!! pxssh failed on login.")
# print(e)
# def apCopyCert(src,user2,host2,tgt,pwd,opts='', timeout=60):
#
# def apCopyCert(src, user2, host2, tgt, pwd, opts='', timeout=60):
#
# print("Copying the AP Certs")
# '''
@@ -291,19 +293,19 @@
# print(s.before) # print everything before the prompt.
# s.logout()
# '''
# cacert=src+"ca.pem"
# clientcert = src+"client.pem"
# clientkey=src+"client_dec.key"
# tgt ="/usr/plume/certs"
# cacert = src + "ca.pem"
# clientcert = src + "client.pem"
# clientkey = src + "client_dec.key"
# tgt = "/usr/plume/certs"
# ap_pw
#
# print("src =", src)
# print("AP IP ", host2)
# print("AP USER =", ap_user)
# print("AP PASSWORD =", ap_pw)
# #cmd = f'''/bin/bash -c "scp {opts} {src} {user2}@{AP_IP}:{tgt}"'''
# #cmd = f'''/bin/bash -c "scp {opts} {cacert} {user2}@{AP_IP}:{tgt}"'''
# #cmd = f'''/bin/bash -c "scp {opts} {clientcert} {user2}@{AP_IP}:{tgt}"'''
# # cmd = f'''/bin/bash -c "scp {opts} {src} {user2}@{AP_IP}:{tgt}"'''
# # cmd = f'''/bin/bash -c "scp {opts} {cacert} {user2}@{AP_IP}:{tgt}"'''
# # cmd = f'''/bin/bash -c "scp {opts} {clientcert} {user2}@{AP_IP}:{tgt}"'''
# cmd = f'''/bin/bash -c "scp {opts} {cacert} {clientcert} {clientkey} {user2}@{host2}:{tgt}"'''
# print("Executing the following cmd:", cmd, sep='\n')
# tmpFl = '/tmp/cert.log'
@@ -317,19 +319,20 @@
# childP.logfile = fp
# childP.expect(pexpect.EOF)
# fp.close()
# fp = open(tmpFl,'r')
# fp = open(tmpFl, 'r')
# stdout = fp.read()
# fp.close()
#
# if childP.exitstatus != 0:
# #raise Exception(stdout)
# # raise Exception(stdout)
# print("there is an excess status non 0")
# except KeyboardInterrupt:
# childP.close()
# fp.close()
# return
# print(stdout)
# def restartGw(src,user2,host2,tgt,pwd,opts='', timeout=60):
#
# def restartGw(src, user2, host2, tgt, pwd, opts='', timeout=60):
# print("Restarting opensync GW")
# s = pxssh.pxssh()
# s.login(host2, user2, pwd)
@@ -352,7 +355,7 @@
# staConnect.radio = "wiphy1"
# staConnect.resource = 1
# staConnect.dut_ssid = "autoProvisionedSsid-5u"
# #staConnect.dut_passwd = "4C0nnectUS!"
# # staConnect.dut_passwd = "4C0nnectUS!"
# staConnect.dut_passwd = "12345678"
# staConnect.dut_security = "wpa2"
# staConnect.station_names = ["sta01010"]
@@ -362,15 +365,17 @@
# run_results = staConnect.get_result_list()
# for result in run_results:
# print("test result: " + result)
# #result = 'pass'
# print("Single Client Connectivity :",staConnect.passes)
# # result = 'pass'
# print("Single Client Connectivity :", staConnect.passes)
# if staConnect.passes() == True:
# client.update_testrail(case_id=938, run_id=rid, status_id=1, msg='client Connectivity to 5GHZ Open SSID is Passed ')
# client.update_testrail(case_id=938, run_id=rid, status_id=1,
# msg='client Connectivity to 5GHZ Open SSID is Passed ')
# else:
# client.update_testrail(case_id=938, run_id=rid, status_id=5, msg='client connectivity to 5GHZ OPEN SSID is Failed')
# client.update_testrail(case_id=938, run_id=rid, status_id=5,
# msg='client connectivity to 5GHZ OPEN SSID is Failed')
#
# def TestCase_941(self, rid):
# #MULTI CLIENT CONNECTIVITY
# # MULTI CLIENT CONNECTIVITY
# staConnect = StaConnect("10.10.10.201", 8080, _debugOn=False)
# staConnect.sta_mode = 0
# staConnect.upstream_resource = 1
@@ -397,19 +402,19 @@
#
# # Check for externally run test case results.
# def TestCase_LF_External(self, rid):
# #https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
# results = glob.glob("%s/*_CICD_RESULTS.txt"%external_results_dir)
# # https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
# results = glob.glob("%s/*_CICD_RESULTS.txt" % external_results_dir)
# for r in results:
# rfile = open(r, 'r')
# lines = rfile.readlines()
#
# # File contents looks something like:
# #CASE_ID 9999
# #RUN_ID 15
# #STATUS 1
# #MSG Test passed nicely
# #MSG Build ID: deadbeef
# #MSG Results: http://cicd.telecominfraproject.com
# # CASE_ID 9999
# # RUN_ID 15
# # STATUS 1
# # MSG Test passed nicely
# # MSG Build ID: deadbeef
# # MSG Results: http://cicd.telecominfraproject.com
#
# _case_id = -1
# _status_id = 1 # Default to pass
@@ -434,7 +439,7 @@
# _msg += "\n"
# _msg += v
# if _case_id != -1:
# client.update_testrail(case_id=_case_id, run_id=_rid, status_id=_status_id, msg=_msg)
# client.update_testrail(case_id=_case_id, run_id=_rid, status_id=_status_id, msg=_msg)
# os.unlink(r)
#
# def TestCase_939(self, rid):
@@ -442,18 +447,18 @@
# try:
# print("Counting clients in MQTT")
# s = pxssh.pxssh()
# #aws_host = os.getenv(AWS_HOST)
# #aws_user=os.getenv(AWS_USER)
# # aws_host = os.getenv(AWS_HOST)
# # aws_user=os.getenv(AWS_USER)
# os.chdir(local_dir)
# # makesure the client key file is in the fame directory to login to AWS VM
# s.login(aws_host,aws_user,ssh_key='id_key.pem')
# s.login(aws_host, aws_user, ssh_key='id_key.pem')
# s.sendline('kubectl cp tip-wlan-opensync-gw-static-f795d45-ctb5z:/app/logs/mqttData.log mqttData.log')
# # run a command
# s.prompt() # match the prompt
# print(s.before) # print everything before the prompt.
# s.sendline()
# s.logout()
# #return "pass"
# # return "pass"
# print(aws_host, aws_user)
# ssh = paramiko.SSHClient()
# ssh.load_system_host_keys()
@@ -487,15 +492,14 @@
# 'pwd': ap_pw,
# 'opts': ''
# }
# apModel= "ecw5410"
#
# apModel = "ecw5410"
#
# url = 'https://tip.jfrog.io/artifactory/tip-wlan-ap-firmware/'
# url = url + apModel
# projId = client.get_project_id(project_name= 'WLAN')
# projId = client.get_project_id(project_name='WLAN')
# print("TIP WLAN Project ID Is :", projId)
#
# rid = client.get_run_id(test_run_name= 'TIP-DEMO4')
# rid = client.get_run_id(test_run_name='TIP-DEMO4')
# print(rid)
# Test: RunTest = RunTest()
# Build: GetBuild = GetBuild()

View File

@@ -1,3 +1,5 @@
#!/usr/bin/env python3
"""TestRail API binding for Python 3.x.
"""

View File

@@ -1,14 +1,12 @@
#!/usr/bin/env python3
import base64
import urllib.request
from bs4 import BeautifulSoup
import ssl
import subprocess, os
import subprocess
from artifactory import ArtifactoryPath
import tarfile
import paramiko
from paramiko import SSHClient
from scp import SCPClient
import os
import pexpect
from pexpect import pxssh
@@ -34,7 +32,7 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append('../py-json')
sys.path.append('../py-json')
from LANforge.LFUtils import *
# if you lack __init__.py in this directory you will not find sta_connect module#

View File

@@ -5,33 +5,31 @@ Candela Technologies Inc.
Info : Standard Script for Connection Testing - Creates HTML and pdf report as a result (Used for web-console)
"""
import sys
import os
import importlib
import argparse
import datetime
import time
from test_utility import CreateHTML, StatusMsg
import pdfkit
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append('../py-json')
from LANforge import LFUtils
from LANforge import lfcli_base
from LANforge.lfcli_base import LFCliBase
from LANforge.LFUtils import *
import realm
from realm import Realm
import argparse
import datetime
import time
import os
from test_utility import CreateHTML
# from test_utility import RuntimeUpdates
from test_utility import StatusMsg
import pdfkit
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
webconsole_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.getcwd())))
class ConnectionTest(LFCliBase):
def __init__(self, lfclient_host="localhost", lfclient_port=8080, radio="wiphy1", sta_prefix="sta", start_id=0,

View File

@@ -9,23 +9,24 @@ Use './create_bond.py --help' to see command line usage and options
Copyright 2021 Candela Technologies Inc
License: Free to distribute and modify. LANforge systems must be licensed.
"""
import sys
import os
import importlib
import argparse
import time
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
import LANforge
from LANforge.lfcli_base import LFCliBase
from LANforge import LFUtils
from realm import Realm
import time
import pprint
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
class CreateBond(LFCliBase):

View File

@@ -3,23 +3,24 @@
"""
Script for creating a variable number of bridges.
"""
import sys
import os
import importlib
import pprint
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
import LANforge
from LANforge.lfcli_base import LFCliBase
from LANforge import LFUtils
from realm import Realm
import time
import pprint
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
class CreateBridge(Realm):

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python3
"""
Note: To Run this script gui should be opened with
@@ -30,11 +29,10 @@ Output:
You should see build scenario with the given arguments at the end of this script.
To verify this:
open Chamber View -> Manage scenario
"""
import sys
import os
import importlib
import argparse
import time
import re
@@ -43,10 +41,12 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
cv = cv_test_manager.cv_test
from cv_test_manager import cv_test as cv
class CreateChamberview(cv):
def __init__(self,
@@ -143,7 +143,6 @@ class CreateChamberview(cv):
); # To manage scenario
if not line and not raw_line:
raise Exception("scenario creation failed")
return False
return True
@@ -170,10 +169,10 @@ class CreateChamberview(cv):
print("completed building %s scenario" %scenario_name)
def main():
parser = argparse.ArgumentParser(
prog='create_chamberview.py',
formatter_class=argparse.RawTextHelpFormatter,
description="""
For Two line scenario use --line twice as shown in example, for multi line scenario
use --line argument to create multiple lines
@@ -203,7 +202,6 @@ def main():
help="delete scenario (by default: False)")
args = parser.parse_args()
Create_Chamberview = CreateChamberview(lfmgr=args.lfmgr,
port=args.port,
)
@@ -216,6 +214,5 @@ def main():
Create_Chamberview.build(args.create_scenario)
if __name__ == "__main__":
main()

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python3
"""
Note: To Run this script gui should be opened with
@@ -45,9 +44,9 @@ How to Run this:
Output : DUT will be created in Chamber View
"""
import sys
import os
import importlib
import argparse
import time
@@ -55,11 +54,15 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
from cv_dut_profile import cv_dut as dut
from cv_test_manager import cv_test as cvtest
# from cv_dut_profile import cv_dut as dut
cv_dut_profile = importlib.import_module("py-json.cv_dut_profile")
dut = cv_dut_profile.cv_dut
# from cv_test_manager import cv_test as cvtest
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
cvtest = cv_test_manager.cv_test
class DUT(dut):
@@ -72,6 +75,7 @@ class DUT(dut):
hw_version="NA",
serial_num="NA",
model_num="NA",
dut_flags=None,
):
super().__init__(
lfclient_host=lfmgr,
@@ -80,12 +84,15 @@ class DUT(dut):
hw_version=hw_version,
serial_num=serial_num,
model_num=model_num,
desired_dut_flags=dut_flags,
desired_dut_flags_mask=dut_flags
)
self.cv_dut_name = dut_name
self.cv_test = cvtest(lfmgr, port)
self.dut_name = dut_name
self.ssid = ssid
def setup(self):
self.create_dut()
@@ -138,6 +145,8 @@ class DUT(dut):
def main():
parser = argparse.ArgumentParser(
prog='create_chamberview_dut.py',
formatter_class=argparse.RawTextHelpFormatter,
description="""
./create_chamberview_dut -m "localhost" -o "8080" -d "dut_name"
--ssid "ssid_idx=0 ssid=NET1 security=WPA|WEP|11r|EAP-PEAP bssid=78:d2:94:bf:16:41"
@@ -156,6 +165,7 @@ def main():
parser.add_argument("--hw_version", default="NA", help="DUT Hardware version.")
parser.add_argument("--serial_num", default="NA", help="DUT Serial number.")
parser.add_argument("--model_num", default="NA", help="DUT Model Number.")
parser.add_argument('--dut_flag', help='station flags to add', default=None, action='append')
args = parser.parse_args()
new_dut = DUT(lfmgr=args.lfmgr,
@@ -166,6 +176,7 @@ def main():
hw_version = args.hw_version,
serial_num = args.serial_num,
model_num = args.model_num,
dut_flags=args.dut_flag
)
new_dut.setup()

View File

@@ -1,103 +1,72 @@
#!/usr/bin/env python3
"""
This script will create a variable number of layer3 stations each with their own set of cross-connects and endpoints.
If you want to
Use './create_l3.py --help' to see command line usage and options
"""
Create Layer-3 Cross Connection Using LANforge JSON AP : https://www.candelatech.com/cookbook.php?vol=fire&book=scripted+layer-3+test
Written by Candela Technologies Inc.
Updated by: Erin Grimes
Example Command:
./create_l3.py --endp_a 'eth1' --endp_b 'eth2' --min_rate_a '56000' --min_rate_b '40000'
"""
import sys
import os
import importlib
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
import argparse
from LANforge.lfcli_base import LFCliBase
from LANforge import LFUtils
from realm import Realm
import time
import datetime
from realm import TestGroupProfile
LANforge = importlib.import_module("py-json.LANforge")
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
TestGroupProfile = realm.TestGroupProfile
class CreateL3(Realm):
def __init__(self,
ssid, security, password, sta_list, name_prefix, upstream, radio,
host="localhost", port=8080, mode=0, ap=None,
side_a_min_rate=56, side_a_max_rate=0,
side_b_min_rate=56, side_b_max_rate=0,
number_template="00000", use_ht160=False,
name_prefix,
endp_b,
endp_a,
host="localhost", port=8080, mode=0,
min_rate_a=56, max_rate_a=0,
min_rate_b=56, max_rate_b=0,
_debug_on=False,
_exit_on_error=False,
_exit_on_fail=False):
super().__init__(host, port)
self.upstream = upstream
self.host = host
self.port = port
self.ssid = ssid
self.sta_list = sta_list
self.security = security
self.password = password
self.radio = radio
self.endp_b = endp_b
self.endp_a = endp_a
self.mode = mode
self.ap = ap
self.number_template = number_template
self.debug = _debug_on
self.name_prefix = name_prefix
self.station_profile = self.new_station_profile()
self.cx_profile = self.new_l3_cx_profile()
self.station_profile.lfclient_url = self.lfclient_url
self.station_profile.ssid = self.ssid
self.station_profile.ssid_pass = self.password
self.station_profile.security = self.security
self.station_profile.number_template_ = self.number_template
self.station_profile.debug = self.debug
self.station_profile.use_ht160 = use_ht160
if self.station_profile.use_ht160:
self.station_profile.mode = 9
self.station_profile.mode = mode
if self.ap is not None:
self.station_profile.set_command_param("add_sta", "ap", self.ap)
# self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user defined variable from terminal.
self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user defined variable from terminal.
self.cx_profile.host = self.host
self.cx_profile.port = self.port
self.cx_profile.name_prefix = self.name_prefix
self.cx_profile.side_a_min_bps = side_a_min_rate
self.cx_profile.side_a_max_bps = side_a_max_rate
self.cx_profile.side_b_min_bps = side_b_min_rate
self.cx_profile.side_b_max_bps = side_b_max_rate
self.cx_profile.side_a_min_bps = min_rate_a
self.cx_profile.side_a_max_bps = max_rate_a
self.cx_profile.side_b_min_bps = min_rate_b
self.cx_profile.side_b_max_bps = max_rate_b
def pre_cleanup(self):
self.cx_profile.cleanup_prefix()
for sta in self.sta_list:
self.rm_port(sta, check_exists=True)
def build(self):
self.station_profile.use_security(self.security,
self.ssid,
self.password)
self.station_profile.set_number_template(self.number_template)
print("Creating stations")
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
self.station_profile.create(radio=self.radio,
sta_names_=self.sta_list,
debug=self.debug)
self.cx_profile.create(endp_type="lf_udp",
side_a=self.station_profile.station_names,
side_b=self.upstream,
side_a=self.endp_a,
side_b=self.endp_b,
sleep_time=0)
self._pass("PASS: Station build finished")
self._pass("PASS: Cross-connect build finished")
def main():
@@ -105,51 +74,21 @@ def main():
prog='create_l3.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
Create stations to test connection and traffic on VAPs of varying security types (WEP, WPA, WPA2, WPA3, Open)
Generate traffic between ports
''',
description='''\
test_ipv4_variable_time.py:
--------------------
Generic command layout:
python3 ./test_ipv4_variable_time.py
--upstream_port eth1
--radio wiphy0
--num_stations 32
--security {open|wep|wpa|wpa2|wpa3} \\
--mode 1
{"auto" : "0",
"a" : "1",
"b" : "2",
"g" : "3",
"abg" : "4",
"abgn" : "5",
"bgn" : "6",
"bg" : "7",
"abgnAC" : "8",
"anAC" : "9",
"an" : "10",
"bgnAC" : "11",
"abgnAX" : "12",
"bgnAX" : "13",
--ssid netgear
--password admin123
--a_min 1000
--b_min 1000
--ap "00:0e:8e:78:e1:76"
--number_template 0000
--debug
''')
''')
required_args = None
for group in parser._action_groups:
if group.title == "required arguments":
required_args = group
break;
break
if required_args is not None:
required_args.add_argument('--a_min', help='--a_min bps rate minimum for side_a', default=256000)
required_args.add_argument('--b_min', help='--b_min bps rate minimum for side_b', default=256000)
required_args.add_argument('--min_rate_a', help='--min_rate_a bps rate minimum for side_a', default=56000)
required_args.add_argument('--min_rate_b', help='--min_rate_b bps rate minimum for side_b', default=56000)
required_args.add_argument('--endp_a', help='--endp_a station list', default=["eth1"], action="append")
required_args.add_argument('--endp_b', help='--upstream port', default="eth2")
optional_args = None
for group in parser._action_groups:
@@ -157,7 +96,7 @@ python3 ./test_ipv4_variable_time.py
optional_args = group
break;
if optional_args is not None:
optional_args.add_argument('--mode', help='Used to force mode of stations')
optional_args.add_argument('--mode', help='Used to force mode of stations', default=0)
optional_args.add_argument('--ap', help='Used to force a connection to a particular AP')
optional_args.add_argument('--number_template', help='Start the station numbering with a particular number. Default is 0000', default=0000)
args = parser.parse_args()
@@ -166,23 +105,16 @@ python3 ./test_ipv4_variable_time.py
if (args.num_stations is not None) and (int(args.num_stations) > 0):
num_sta = int(args.num_stations)
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=int(args.number_template), end_id_=num_sta+int(args.number_template) - 1, padding_number_=10000,
radio=args.radio)
# station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=int(args.number_template), end_id_=num_sta+int(args.number_template) - 1, padding_number_=10000,
# radio=args.radio)
ip_var_test = CreateL3(host=args.mgr,
port=args.mgr_port,
number_template=str(args.number_template),
sta_list=station_list,
name_prefix="VT",
upstream=args.upstream_port,
ssid=args.ssid,
password=args.passwd,
radio=args.radio,
security=args.security,
use_ht160=False,
side_a_min_rate=args.a_min,
side_b_min_rate=args.b_min,
endp_a=args.endp_a,
endp_b=args.endp_b,
min_rate_a=args.min_rate_a,
min_rate_b=args.min_rate_b,
mode=args.mode,
ap=args.ap,
_debug_on=args.debug)
ip_var_test.pre_cleanup()
@@ -190,7 +122,7 @@ python3 ./test_ipv4_variable_time.py
if not ip_var_test.passes():
print(ip_var_test.get_fail_message())
ip_var_test.exit_fail()
print('Creates %s stations and connections' % num_sta)
print('Created %s stations and connections' % num_sta)
if __name__ == "__main__":

198
py-scripts/create_l3_stations.py Executable file
View File

@@ -0,0 +1,198 @@
#!/usr/bin/env python3
"""
This script will create a variable number of layer3 stations each with their own set of cross-connects and endpoints.
Example script:
'./create_l3_stations.py --radio wiphy0 --ssid lanforge --password password --security wpa2'
"""
import sys
import os
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
import argparse
from LANforge.lfcli_base import LFCliBase
from LANforge import LFUtils
from realm import Realm
class CreateL3(Realm):
def __init__(self,
ssid, security, password, sta_list, name_prefix, upstream, radio,
host="localhost", port=8080, mode=0, ap=None,
side_a_min_rate=56, side_a_max_rate=0,
side_b_min_rate=56, side_b_max_rate=0,
number_template="00000", use_ht160=False,
_debug_on=False,
_exit_on_error=False,
_exit_on_fail=False):
super().__init__(host, port)
self.upstream = upstream
self.host = host
self.port = port
self.ssid = ssid
self.sta_list = sta_list
self.security = security
self.password = password
self.radio = radio
self.mode = mode
self.ap = ap
self.number_template = number_template
self.debug = _debug_on
self.name_prefix = name_prefix
self.station_profile = self.new_station_profile()
self.cx_profile = self.new_l3_cx_profile()
self.station_profile.lfclient_url = self.lfclient_url
self.station_profile.ssid = self.ssid
self.station_profile.ssid_pass = self.password
self.station_profile.security = self.security
self.station_profile.number_template_ = self.number_template
self.station_profile.debug = self.debug
self.station_profile.use_ht160 = use_ht160
if self.station_profile.use_ht160:
self.station_profile.mode = 9
self.station_profile.mode = mode
if self.ap is not None:
self.station_profile.set_command_param("add_sta", "ap", self.ap)
# self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user defined variable from terminal.
self.cx_profile.host = self.host
self.cx_profile.port = self.port
self.cx_profile.name_prefix = self.name_prefix
self.cx_profile.side_a_min_bps = side_a_min_rate
self.cx_profile.side_a_max_bps = side_a_max_rate
self.cx_profile.side_b_min_bps = side_b_min_rate
self.cx_profile.side_b_max_bps = side_b_max_rate
def pre_cleanup(self):
self.cx_profile.cleanup_prefix()
for sta in self.sta_list:
self.rm_port(sta, check_exists=True)
def build(self):
self.station_profile.use_security(self.security,
self.ssid,
self.password)
self.station_profile.set_number_template(self.number_template)
print("Creating stations")
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
self.station_profile.create(radio=self.radio,
sta_names_=self.sta_list,
debug=self.debug)
self.cx_profile.create(endp_type="lf_udp",
side_a=self.station_profile.station_names,
side_b=self.upstream,
sleep_time=0)
self._pass("PASS: Station build finished")
def main():
parser = LFCliBase.create_basic_argparse(
prog='create_l3_stations.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
Create stations to test connection and traffic on VAPs of varying security types (WEP, WPA, WPA2, WPA3, Open)
''',
description='''\
create_l3_stations.py:
--------------------
Generic command layout:
python3 ./create_l3_stations.py
--upstream_port eth1
--radio wiphy0
--num_stations 32
--security {open|wep|wpa|wpa2|wpa3} \\
--mode 1
{"auto" : "0",
"a" : "1",
"b" : "2",
"g" : "3",
"abg" : "4",
"abgn" : "5",
"bgn" : "6",
"bg" : "7",
"abgnAC" : "8",
"anAC" : "9",
"an" : "10",
"bgnAC" : "11",
"abgnAX" : "12",
"bgnAX" : "13",
--ssid netgear
--password admin123
--a_min 1000
--b_min 1000
--ap "00:0e:8e:78:e1:76"
--number_template 0000
--debug
''')
required_args = None
for group in parser._action_groups:
if group.title == "required arguments":
required_args = group
break;
if required_args is not None:
required_args.add_argument('--a_min', help='--a_min bps rate minimum for side_a', default=256000)
required_args.add_argument('--b_min', help='--b_min bps rate minimum for side_b', default=256000)
optional_args = None
for group in parser._action_groups:
if group.title == "optional arguments":
optional_args = group
break;
if optional_args is not None:
optional_args.add_argument('--mode', help='Used to force mode of stations')
optional_args.add_argument('--ap', help='Used to force a connection to a particular AP')
optional_args.add_argument('--number_template', help='Start the station numbering with a particular number. Default is 0000', default=0000)
optional_args.add_argument('--station_list', help='Optional: User defined station names', action='append',default=None)
args = parser.parse_args()
num_sta = 2
if (args.num_stations is not None) and (int(args.num_stations) > 0):
num_sta = int(args.num_stations)
if args.station_list is None:
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=int(args.number_template), end_id_=num_sta+int(args.number_template) - 1, padding_number_=10000,
radio=args.radio)
else:
station_list = args.station_list
ip_var_test = CreateL3(host=args.mgr,
port=args.mgr_port,
number_template=str(args.number_template),
sta_list=station_list,
name_prefix="VT",
upstream=args.upstream_port,
ssid=args.ssid,
password=args.passwd,
radio=args.radio,
security=args.security,
use_ht160=False,
side_a_min_rate=args.a_min,
side_b_min_rate=args.b_min,
mode=args.mode,
ap=args.ap,
_debug_on=args.debug)
ip_var_test.pre_cleanup()
ip_var_test.build()
if not ip_var_test.passes():
print(ip_var_test.get_fail_message())
ip_var_test.exit_fail()
print('Creates %s stations and connections' % num_sta)
if __name__ == "__main__":
main()

View File

@@ -1,6 +1,4 @@
#!/usr/bin/env python3
"""
This script will create a variable number of layer4 stations each with their own set of cross-connects and endpoints.
@@ -8,21 +6,23 @@
"""
import sys
import os
import importlib
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
TestGroupProfile = realm.TestGroupProfile
import argparse
from LANforge.lfcli_base import LFCliBase
from LANforge import LFUtils
from realm import Realm
import time
import datetime
from realm import TestGroupProfile
class CreateL4(Realm):
def __init__(self,
@@ -149,7 +149,7 @@ python3 ./layer4.py
optional_args=group
break;
if optional_args is not None:
optional_args.add_argument('--mode',help='Used to force mode of stations')
optional_args.add_argument('--mode',help='Used to force mode of stations', default=0)
optional_args.add_argument('--ap',help='Used to force a connection to a particular AP')
args = parser.parse_args()

View File

@@ -1,19 +1,22 @@
#!/usr/bin/env python3
import sys
import os
import importlib
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append('../py-json')
from LANforge.lfcli_base import LFCliBase
from LANforge.LFUtils import *
from LANforge import LFUtils
from LANforge.add_file_endp import *
import argparse
from realm import Realm
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
add_file_endp = importlib.import_module("py-json.LANforge.add_file_endp")
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
class CreateMacVlan(Realm):
@@ -186,4 +189,4 @@ Generic command layout:
if __name__ == "__main__":
main()
main()

View File

@@ -1,22 +1,22 @@
#!/usr/bin/env python3
import sys
import os
import importlib
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
import argparse
from LANforge.lfcli_base import LFCliBase
from LANforge.LFUtils import *
from LANforge.add_file_endp import *
from LANforge import LFUtils
import argparse
from realm import Realm
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
add_file_endp = importlib.import_module("py-json.LANforge.add_file_endp")
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
class CreateQVlan(Realm):

View File

@@ -1,23 +1,25 @@
#!/usr/bin/env python3
"""
Script for creating a variable number of stations.
"""
import sys
import os
import importlib
import argparse
import pprint
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
from LANforge.lfcli_base import LFCliBase
from LANforge import LFUtils
from realm import Realm
import pprint
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
class CreateStation(Realm):
@@ -29,6 +31,7 @@ class CreateStation(Realm):
_port=None,
_mode=0,
_sta_list=None,
_sta_flags=None,
_number_template="00000",
_radio="wiphy0",
_proxy_str=None,
@@ -46,6 +49,7 @@ class CreateStation(Realm):
self.password = _password
self.mode = _mode
self.sta_list = _sta_list
self.sta_flags = _sta_flags
self.radio = _radio
self.timeout = 120
self.number_template = _number_template
@@ -59,6 +63,10 @@ class CreateStation(Realm):
self.station_profile.security = self.security
self.station_profile.number_template_ = self.number_template
self.station_profile.mode = self.mode
if self.sta_flags is not None:
self.station_profile.desired_add_sta_flags = self.sta_flags
self.station_profile.desired_add_sta_mask = self.sta_flags
if self.debug:
print("----- Station List ----- ----- ----- ----- ----- ----- \n")
pprint.pprint(self.sta_list)
@@ -99,22 +107,23 @@ def main():
description='''\
create_station.py
--------------------
Command example:
./create_station.py
--radio wiphy0
--start_id 2
--num_stations 3
--security open
--ssid netgear
--passwd BLANK
--debug
--------------------
Command example:
./create_station.py
--radio wiphy0
--start_id 2
--num_stations 3
--security open
--ssid netgear
--passwd BLANK
--debug
''')
required = parser.add_argument_group('required arguments')
required.add_argument('--start_id', help='--start_id <value> default 0', default=0)
optional = parser.add_argument_group('Optional arguments')
optional.add_argument('--mode', help='Mode for your station (as a number)',default=0)
optional.add_argument('--station_flag', help='station flags to add', required=False, default=None, action='append')
args = parser.parse_args()
# if args.debug:
@@ -156,6 +165,7 @@ Command example:
_password=args.passwd,
_security=args.security,
_sta_list=station_list,
_sta_flags=args.station_flag,
_mode=args.mode,
_radio=args.radio,
_set_txo_data=None,

View File

@@ -1,23 +1,25 @@
#!/usr/bin/env python3
"""
Script for creating a variable number of stations.
"""
import sys
import os
import importlib
import argparse
import pandas as pd
import pprint
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
from LANforge.lfcli_base import LFCliBase
from realm import Realm
import pandas as pd
import pprint
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
class CreateStation(Realm):

View File

@@ -1,26 +1,25 @@
#!/usr/bin/env python3
"""
Script for creating a variable number of VAPs.
"""
import sys
import os
import importlib
import argparse
import pprint
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
import LANforge
from LANforge.lfcli_base import LFCliBase
from LANforge import LFUtils
import realm
from realm import Realm
import time
import pprint
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
class CreateVAP(Realm):
@@ -28,11 +27,19 @@ class CreateVAP(Realm):
_ssid=None,
_security=None,
_password=None,
_mac=None,
_host=None,
_port=None,
_vap_list=None,
_resource=None,
_vap_flags=None,
_mode=None,
_number_template="00000",
_radio="wiphy0",
_radio=None,
_channel=36,
_country_code=0,
_nss=False,
_bridge=False,
_proxy_str=None,
_debug_on=False,
_exit_on_error=False,
@@ -46,41 +53,55 @@ class CreateVAP(Realm):
self.security = _security
self.password = _password
self.vap_list = _vap_list
self.resource = _resource
if _vap_flags is None:
self.vap_flags = ["wpa2_enable", "80211u_enable", "create_admin_down"]
else:
self.vap_flags = _vap_flags
self.mode = _mode
self.radio = _radio
self.channel = _channel
self.country_code = _country_code
self.timeout = 120
self.number_template = _number_template
self.debug = _debug_on
self.dhcp = _dhcp
self.bridge = _bridge
self.vap_profile = self.new_vap_profile()
self.vap_profile.vap_name = self.vap_list
self.vap_profile.ssid = self.ssid
self.vap_profile.security = self.security
self.vap_profile.ssid_pass = self.password
self.vap_profile.dhcp = self.dhcp
self.vap_profile.mode = self.mode
self.vap_profile.desired_add_vap_flags = self.vap_flags + ["wpa2_enable", "80211u_enable", "create_admin_down"]
self.vap_profile.desired_add_vap_flags_mask = self.vap_flags + ["wpa2_enable", "80211u_enable", "create_admin_down"]
if self.debug:
print("----- VAP List ----- ----- ----- ----- ----- ----- \n")
pprint.pprint(self.vap_list)
print("---- ~VAP List ----- ----- ----- ----- ----- ----- \n")
def build(self):
# Build VAPs
self.vap_profile.use_security(self.security, self.ssid, passwd=self.password)
print("Creating VAPs")
self.vap_profile.create(resource = 1,
self.vap_profile.create(resource = self.resource,
radio = self.radio,
channel = 36,
channel = self.channel,
country=self.country_code,
up_ = True,
debug = False,
use_ht40=True,
use_ht80=True,
use_ht160=False,
suppress_related_commands_ = True,
use_radius = True,
hs20_enable = False)
use_radius=False,
hs20_enable=False,
bridge=self.bridge)
self._pass("PASS: VAP build finished")
def main():
parser = LFCliBase.create_basic_argparse(
prog='create_vap.py',
@@ -102,11 +123,19 @@ Command example:
--passwd BLANK
--debug
''')
required = parser.add_argument_group('required arguments')
#required.add_argument('--security', help='WiFi Security protocol: < open | wep | wpa | wpa2 | wpa3 >', required=True)
optional = parser.add_argument_group('optional arguments')
optional.add_argument('--num_vaps', help='Number of VAPs to Create', required=False)
optional.add_argument('--num_vaps', help='Number of VAPs to Create', required=False, default=1)
optional.add_argument('--vap_flag', help='VAP flags to add', required=False, default=None, action='append')
optional.add_argument('--bridge', help='Create a bridge connecting the VAP to a port', required=False, default=False)
optional.add_argument('--mac', help='Custom mac address', default="xx:xx:xx:xx:*:xx")
optional.add_argument('--mode', default='AUTO')
optional.add_argument('--channel', default=36)
optional.add_argument('--country_code', default=0)
optional.add_argument('--nss', default=False)
optional.add_argument('--resource', default=1)
optional.add_argument('--start_id', default=0)
optional.add_argument('--vap_name',default=None)
args = parser.parse_args()
#if args.debug:
# pprint.pprint(args)
@@ -114,29 +143,56 @@ Command example:
if (args.radio is None):
raise ValueError("--radio required")
num_vap = 2
if (args.num_vaps is not None) and (int(args.num_vaps) > 0):
num_vaps_converted = int(args.num_vaps)
num_vap = num_vaps_converted
num_vap = int(args.num_vaps)
vap_list = LFUtils.port_name_series(prefix="vap",
start_id=0,
start_id=int(args.start_id),
end_id=num_vap-1,
padding_number=10000,
radio=args.radio)
print(args.passwd)
print(args.ssid)
#print(args.passwd)
#print(args.ssid)
for vap in vap_list:
if args.vap_name is None:
for vap in vap_list:
create_vap = CreateVAP(_host=args.mgr,
_port=args.mgr_port,
_ssid=args.ssid,
_password=args.passwd,
_security=args.security,
_mode=args.mode,
_vap_list=vap,
_resource=args.resource,
_vap_flags=args.vap_flag,
_radio=args.radio,
_channel=args.channel,
_country_code=args.country_code,
_nss=args.nss,
_proxy_str=args.proxy,
_bridge=args.bridge,
_debug_on=args.debug)
print('Creating VAP')
create_vap.build()
else:
vap_name = "vap"+args.vap_name
create_vap = CreateVAP(_host=args.mgr,
_port=args.mgr_port,
_ssid=args.ssid,
_password=args.passwd,
_security=args.security,
_vap_list=vap,
_radio=args.radio,
_proxy_str=args.proxy,
_debug_on=args.debug)
_port=args.mgr_port,
_ssid=args.ssid,
_password=args.passwd,
_security=args.security,
_mode=args.mode,
_vap_list=vap_name,
_resource=args.resource,
_vap_flags=args.vap_flag,
_radio=args.radio,
_channel=args.channel,
_country_code=args.country_code,
_nss=args.nss,
_proxy_str=args.proxy,
_bridge=args.bridge,
_debug_on=args.debug)
print('Creating VAP')
create_vap.build()

View File

@@ -1,23 +1,26 @@
#!/usr/bin/env python3
"""
Script for creating a variable number of bridges.
Script for creating a variable number of virtual routers.
"""
import os
import sys
import os
import importlib
import time
from pprint import pprint
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
from LANforge.lfcli_base import LFCliBase
from LANforge import LFUtils
from realm import Realm
import time
from pprint import pprint
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
class CreateVR(Realm):
def __init__(self,
@@ -171,9 +174,9 @@ Command example:
create_vr.clean()
create_vr.build()
create_vr.start()
# create_vr.monitor()
create_vr.stop()
create_vr.clean()
create_vr.monitor()
#create_vr.stop()
#create_vr.clean()
print('Created Virtual Router')
if __name__ == "__main__":

314
py-scripts/create_wanlink.py Executable file
View File

@@ -0,0 +1,314 @@
#!/usr/bin/python3
"""
Create and modify WAN Links Using LANforge JSON AP : http://www.candelatech.com/cookbook.php?vol=cli&book=JSON:+Managing+WANlinks+using+JSON+and+Python
Written by Candela Technologies Inc.
Updated by: Erin Grimes
"""
import sys
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
from time import sleep
import urllib
import pprint
sys.path.append("../py-json")
from LANforge import LFRequest
from LANforge import LFUtils
from LANforge.lfcli_base import LFCliBase
j_printer = pprint.PrettyPrinter(indent=2)
# todo: this needs to change
resource_id = 1
def main():
parser = LFCliBase.create_basic_argparse()
args = parser.parse_args()
base_url = 'http://%s:%s' % (args.mgr, args.mgr_port)
print(base_url)
json_post = ""
json_response = ""
num_wanlinks = -1
# force a refresh on the ports and wanlinks
lf_r = LFRequest.LFRequest(base_url+"/cli-json/nc_show_ports", debug_=True)
lf_r.addPostData({
"shelf": 1,
"resource": 1,
"port": "all",
})
json_response = lf_r.jsonPost(debug=True)
lf_r = LFRequest.LFRequest(base_url+"/cli-json/nc_show_endpoints", debug_=True)
lf_r.addPostData({
"endpoint": "all"
})
json_response = lf_r.jsonPost(debug=True)
sleep(1)
# see if there are old wanlinks to remove
lf_r = LFRequest.LFRequest(base_url+"/wl_ep/list", debug_=True)
json_reponse = lf_r.get_as_json()
endpA = args['name']+"-A"
endpB = args['name']+"-B"
# count the number of wanlink endpoints
if "endpoint" in json_response:
endpoint_map = LFUtils.list_to_alias_map(json_list=json_reponse, from_element="endpoint")
if endpA in endpoint_map:
num_wanlinks += 1
if endpB in endpoint_map:
num_wanlinks += 1
# remove old wanlinks
if (num_wanlinks > 0):
print("Removing old wanlinks...")
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_cx", debug_=True)
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': args['name']
})
lf_r.jsonPost()
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp", debug_=True)
lf_r.addPostData({
'endp_name': endpA
})
lf_r.jsonPost()
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp", debug_=True)
lf_r.addPostData({
'endp_name': endpB
})
lf_r.jsonPost()
sleep(1)
# check to see if we have ports
lf_r = LFRequest.LFRequest(base_url+"/ports/1/1/list", debug_=True)
port_response = lf_r.getAsJson()
if "interfaces" not in port_response:
print("No interfaces in port_response!")
pprint.pprint(port_response)
exit(1)
if "interfaces" in port_response:
port_map = LFUtils.list_to_alias_map(json_list=port_response, from_element="interfaces")
ports_created = 0
if args["port_A"] not in port_map:
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_rdd", debug_=True)
lf_r.addPostData({
"shelf": 1,
"resource": 1,
"port": args["port_A"],
"peer_ifname": args["port_A"]+"b",
})
json_reponse = lf_r.jsonPost(debug=True)
if not json_response:
print("could not create port "+args["port_A"])
exit(1)
sleep(0.1)
ports_created += 1
if args["port_B"] not in port_map:
lf_r.addPostData({
"shelf": 1,
"resource": 1,
"port": args["port_B"],
"peer_ifname": args["port_B"]+"b",
})
json_reponse = lf_r.jsonPost(debug=True)
if not json_response:
print("could not create port " + args["port_B"])
exit(1)
ports_created += 1
sleep(0.1)
if ports_created > 0:
LFUtils.wait_until_ports_appear(base_url=base_url,
port_list=(args["port_A"], args["port_B"]),
debug=True)
print("Created {} ports".format(ports_created))
# create wanlink endpoint A
print("Adding WL Endpoints...", end='')
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp", debug_=True)
lf_r.addPostData({
'alias': endpA,
'shelf': 1,
'resource': '1',
'port': args['port_A'],
'latency': args['latency_A'],
'max_rate': args['rate_A'],
})
json_response = lf_r.jsonPost(debug=True)
if not json_response:
print("Unable to create "+endpA)
else:
print("A, ", end='')
# create wanlink endpoint B
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp", debug_=True)
lf_r.addPostData({
'alias': endpB,
'shelf': 1,
'resource': '1',
'port': args['port_B'],
'latency': args['latency_B'],
'max_rate': args['rate_B'],
})
json_response = lf_r.jsonPost()
if not json_response:
print("Unable to create "+endpB)
else:
print("B")
sleep(1)
# create cx
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_cx", debug_=True)
lf_r.addPostData({
'alias': args['name'],
'test_mgr': 'default_tm',
'tx_endp': endpA,
'rx_endp': endpB
})
lf_r.jsonPost(debug=True)
sleep(0.5)
# modify wanlink endpoint A
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_wanlink_info", debug_=True)
lf_r.addPostData({
'name': endpA,
'max_jitter': args['jitter_A'],
'jitter_freq': args['jitter_freq_A'],
'drop_freq': args['drop_A']
})
lf_r.jsonPost(debug=True)
# modify wanlink endpoint B
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_wanlink_info", debug_=True)
lf_r.addPostData({
'name': endpB,
'max_jitter': args['jitter_B'],
'jitter_freq': args['jitter_freq_B'],
'drop_freq': args['drop_B']
})
lf_r.jsonPost()
# start wanlink once we see it
seen = 0
print("Looking for {} and {}: ".format(endpA, endpB), end='')
while (seen < 2):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl_ep/list?fields=name,eid")
try:
json_response = lf_r.getAsJson()
if json_response is None:
print(".", end="")
continue
LFUtils.debug_printer.pprint(json_response)
if "endpoint" not in json_response:
print("-", end="")
continue
endpoint_map = LFUtils.list_to_alias_map(json_list=json_response["endpoint"],
from_element="endpoint")
if endpA in endpoint_map:
seen += 1
print("+", end="")
if endpB in endpoint_map:
seen += 1
print("+", end="")
except urllib.error.HTTPError as error:
print("Error code {}".format(error.code))
continue
print("")
print("Starting wanlink:")
# print("the latency is {laten}".format(laten=latency))
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': args['name'],
'cx_state': 'RUNNING'
})
lf_r.jsonPost()
running = 0
while (running < 1):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,state,_links")
try:
json_response = lf_r.getAsJson()
if (json_response is None):
continue
for key, value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == args['name']):
if (value["state"].startswith("Run")):
LFUtils.debug_printer.pprint(json_response)
running = 1
except urllib.error.HTTPError as error:
print("Error code {}".format(error.code))
continue
print("Wanlink is running")
# stop wanlink
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': args['name'],
'cx_state': 'STOPPED'
})
lf_r.jsonPost()
running = 1
while (running > 0):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,eid,state,_links")
LFUtils.debug_printer.pprint(json_response)
try:
json_response = lf_r.getAsJson()
if (json_response is None):
continue
for key, value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == args['name']):
if (value["state"].startswith("Stop")):
LFUtils.debug_printer.pprint(json_response)
running = 0
except urllib.error.HTTPError as error:
print("Error code {}".format(error.code))
continue
print("Wanlink is stopped.")
# print("Wanlink info:")
# lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-A")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-B")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if __name__ == '__main__':
main()

View File

@@ -1,30 +1,31 @@
#!/usr/bin/env python3
"""
# This program is used to read in a LANforge Dataplane CSV file and output
# a csv file that works with a customer's RvRvO visualization tool.
#
# Example use case:
#
# Read in ~/text-csv-0-candela.csv, output is stored at outfile.csv
# ./py-scripts/csv_convert.py -i ~/text-csv-0-candela.csv
#
# Output is csv file with mixxed columns, top part:
# Test Run,Position [Deg],Attenuation 1 [dB], Pal Stats Endpoint 1 Control Rssi [dBm], Pal Stats Endpoint 1 Data Rssi [dBm]
This program is used to read in a LANforge Dataplane CSV file and output
a csv file that works with a customer's RvRvO visualization tool.
# Second part:
# Step Index,Position [Deg],Attenuation [dB],Traffic Pair 1 Throughput [Mbps]
Example use case:
Read in ~/text-csv-0-candela.csv, output is stored at outfile.csv
./py-scripts/csv_convert.py -i ~/text-csv-0-candela.csv
Output is csv file with mixxed columns, top part:
Test Run,Position [Deg],Attenuation 1 [dB], Pal Stats Endpoint 1 Control Rssi [dBm], Pal Stats Endpoint 1 Data Rssi [dBm]
Second part:
Step Index,Position [Deg],Attenuation [dB],Traffic Pair 1 Throughput [Mbps]
"""
import sys
import os
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
class CSVParcer():
def __init__(self,csv_infile=None,csv_outfile=None):

View File

@@ -1,19 +1,8 @@
#!/usr/bin/env python3
import sys
import os
import argparse
#import time
#import datetime
#import subprocess
#import re
#import csv
#import time
#import operator
import pandas as pd
#import matplotlib.pyplot as plt
#import numpy as np
#https://pandas.pydata.org/pandas-docs/stable/user_guide/visualization.html
#https://queirozf.com/entries/pandas-dataframe-plot-examples-with-matplotlib-pyplot
@@ -23,8 +12,8 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
class L3CSVParcer():
def __init__(self,csv_file):
@@ -135,7 +124,7 @@ def main():
#debug_on = False
parser = argparse.ArgumentParser(
prog='quick_test.py',
prog='csv_processor.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
Useful Information:

View File

@@ -1,24 +1,31 @@
#!/usr/bin/env python3
import sys
import os
import importlib
import argparse
import json
import random
import string
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath('..'), 'py-dashboard'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
csv_to_influx = importlib.import_module("py-scripts.csv_to_influx")
CSVtoInflux = csv_to_influx.CSVtoInflux
influx_add_parser_args = csv_to_influx.influx_add_parser_args
grafana_profile = importlib.import_module("py-scripts.grafana_profile")
UseGrafana = grafana_profile.UseGrafana
influx = importlib.import_module("py-scripts.influx")
RecordInflux = influx.RecordInflux
InfluxRequest = importlib.import_module("py-dashboard.InfluxRequest")
influx_add_parser_args = InfluxRequest.influx_add_parser_args
from LANforge.lfcli_base import LFCliBase
import json
from influx2 import RecordInflux
from csv_to_influx import CSVtoInflux, influx_add_parser_args
from grafana_profile import UseGrafana
import random
import string
class data_to_grafana(LFCliBase):
def __init__(self,

View File

@@ -1,167 +1,62 @@
#!/usr/bin/env python3
# Copies the data from a CSV file from the KPI file generated from a Wifi Capacity test to an Influx database
# The CSV requires three columns in order to work: Date, test details, and numeric-score.
# Date is a unix timestamp, test details is the variable each datapoint is measuring, and numeric-score is the value for that timepoint and variable.
import sys
import os
from pprint import pprint
from influx2 import RecordInflux
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
import importlib
from pathlib import Path
import argparse
import datetime
def influx_add_parser_args(parser):
parser.add_argument('--influx_host', help='Hostname for the Influx database', default=None)
parser.add_argument('--influx_port', help='IP Port for the Influx database', default=8086)
parser.add_argument('--influx_org', help='Organization for the Influx database', default=None)
parser.add_argument('--influx_token', help='Token for the Influx database', default=None)
parser.add_argument('--influx_bucket', help='Name of the Influx bucket', default=None)
parser.add_argument('--influx_tag', action='append', nargs=2,
help='--influx_tag <key> <val> Can add more than one of these.', default=[])
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
cv_add_base_parser = cv_test_manager.cv_add_base_parser
cv_base_adjust_parser = cv_test_manager.cv_base_adjust_parser
InfluxRequest = importlib.import_module("py-dashboard.InfluxRequest")
RecordInflux = InfluxRequest.RecordInflux
influx_add_parser_args = InfluxRequest.influx_add_parser_args
class CSVtoInflux():
class CSVtoInflux:
def __init__(self,
_exit_on_error=False,
_exit_on_fail=False,
_proxy_str=None,
_capture_signal_list=[],
influxdb=None,
_influx_tag=[],
target_csv=None,
sep='\t'):
self.influxdb = influxdb
self.target_csv = target_csv
self.influx_tag = _influx_tag
self.sep = sep
def read_csv(self, file):
csv = open(file).read().split('\n')
rows = list()
for x in csv:
if len(x) > 0:
rows.append(x.split(self.sep))
return rows
# Submit data to the influx db if configured to do so.
def post_to_influx(self):
df = self.read_csv(self.target_csv)
length = list(range(0, len(df[0])))
columns = dict(zip(df[0], length))
print('columns: %s' % columns)
influx_variables = ['script', 'short-description', 'test_details', 'Graph-Group',
'DUT-HW-version', 'DUT-SW-version', 'DUT-Serial-Num', 'testbed', 'Test Tag', 'Units']
csv_variables = ['test-id', 'short-description', 'test details', 'Graph-Group',
'dut-hw-version', 'dut-sw-version', 'dut-serial-num', 'test-rig', 'test-tag', 'Units']
csv_vs_influx = dict(zip(csv_variables, influx_variables))
for row in df[1:]:
row = [sub.replace('NaN', '0') for sub in row]
tags = dict()
print("row: %s" % row)
short_description = row[columns['short-description']]
if row[columns['numeric-score']] == 'NaN':
numeric_score = '0x0'
else:
numeric_score = float(row[columns['numeric-score']])
date = row[columns['Date']]
date = datetime.datetime.utcfromtimestamp(int(date) / 1000).isoformat() #convert to datetime so influx can read it, this is required
for variable in csv_variables:
if variable in columns.keys():
index = columns[variable]
influx_variable = csv_vs_influx[variable]
tags[influx_variable] = row[index]
self.influxdb.post_to_influx(short_description, numeric_score, tags, date)
def script_name(self):
with open(self.target_csv) as fp:
line = fp.readline()
line = line.split('\t')
test_id_index = line.index('test-id')
line = fp.readline()
line.split('\t')
return line[test_id_index]
def create_dashboard(self,
dashboard_name=None):
#Create a dashboard in Grafana to look at the data you just posted to Influx
dashboard_name
influx_host,
influx_port,
influx_org,
influx_token,
influx_bucket,
path):
self.path = path
self.influxdb = RecordInflux(_influx_host=influx_host,
_influx_port=influx_port,
_influx_org=influx_org,
_influx_token=influx_token,
_influx_bucket=influx_bucket)
def glob(self):
path = Path(self.path)
self.kpi_list = list(path.glob('**/kpi.csv'))
for kpi in self.kpi_list:
self.influxdb.RecordInflux.csv_to_influx(kpi)
def main():
lfjson_host = "localhost"
lfjson_port = 8080
endp_types = "lf_udp"
debug = False
parser = argparse.ArgumentParser(
prog='csv_to_influx.py',
# formatter_class=argparse.RawDescriptionHelpFormatter,
formatter_class=argparse.RawTextHelpFormatter,
epilog='''
''',
prog='csv_to_influx.py'
)
cv_add_base_parser(parser)
description='''\
csv_to_influx.py:
--------------------
Summary :
----------
Copies the data from a CSV file generated by a wifi capacity test to an influx database.
Column names are designed for the KPI file generated by our Wifi Capacity Test.
A user can of course change the column names to match these in order to input any csv file.
The CSV file needs to have the following columns:
--date - which is a UNIX timestamp
--test details - which is the variable being measured by the test
--numeric-score - which is the value for that variable at that point in time.
Generic command layout:
-----------------------
python .\\csv_to_influx.py
Command:
python3 csv_to_influx.py --influx_host localhost --influx_org Candela --influx_token random_token --influx_bucket lanforge
--target_csv kpi.csv
''')
influx_add_parser_args(parser)
# This argument is specific to this script, so not part of the generic influxdb parser args
# method above.
parser.add_argument('--target_csv', help='CSV file to record to influx database', default="")
parser.add_argument('--sep', help='character to split CSV by', default='\t')
parser.add_argument('--path', action='append')
args = parser.parse_args()
influxdb = RecordInflux(_influx_host=args.influx_host,
_influx_port=args.influx_port,
_influx_org=args.influx_org,
_influx_token=args.influx_token,
_influx_bucket=args.influx_bucket)
cv_base_adjust_parser(args)
csvtoinflux = CSVtoInflux(influxdb=influxdb,
target_csv=args.target_csv,
_influx_tag=args.influx_tag,
sep=args.sep)
csvtoinflux.post_to_influx()
csvtoinflux = CSVtoInflux(args.influx_host,
args.influx_port,
args.influx_org,
args.influx_token,
args.influx_bucket,
args.path)
csvtoinflux.glob()
if __name__ == "__main__":

View File

@@ -0,0 +1,38 @@
{
"ct_igg":{
"Notes":[
"This json file is used as an input to the ./lf_check.py file",
"The variables that are all capitalized below are replaced with configuration",
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
"The replacement is loosely coupled so the upper and lower case convention is used",
"to identify replaced strings in the lf_check.py code.",
"this file contains the Influx, Grafana and Ghost configuration",
"Influx, Ghost, and Grafana are up and running on v-centos8s 192.168.100.153"
]
},
"test_database":{
"database_config_influx": "True",
"database_host_influx": "192.168.100.153",
"database_port_influx": "8086",
"database_token_influx": "PwYwrDjUSpLyUa8-0QeJGuf9p6KgPgmTVs0Zz0BZiyele74pNasBMJR-dKiF3LE8Qft5tADHtPSIS0WcVXHc_g==",
"database_org_influx": "Candela",
"database_bucket_influx": "candela",
"database_tag_influx": "testbed CT-US-002",
"test_rig_influx": "CT-US-002"
},
"test_dashboard":{
"dashboard_config_grafana": "True",
"dashboard_host_grafana": "192.168.100.153",
"dashboard_token_grafana": "eyJrIjoid1hpM0pwZFRSc3c0bGU2TEpITEVscHh4T0pPMVdZRzEiLCJuIjoiY2h1Y2siLCJpZCI6MX0="
},
"test_blog":{
"blog_config_ghost": "True",
"blog_host_ghost": "192.168.100.153",
"blog_token_ghost": "60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742",
"blog_authors_ghost": "Matthew",
"blog_customer_ghost": "candela",
"blog_user_push_ghost": "lanforge",
"blog_password_push_ghost": "lanforge",
"blog_flag_ghost": "--kpi_to_ghost"
}
}

View File

@@ -0,0 +1,39 @@
{
"ct_igg":{
"Notes":[
"This json file is used as an input to the ./lf_check.py file",
"The variables that are all capitalized below are replaced with configuration",
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
"The replacement is loosely coupled so the upper and lower case convention is used",
"to identify replaced strings in the lf_check.py code.",
"this file contains the Influx, Grafana and Ghost configuration",
"Influx, Ghost, and Grafana are up and running on v-centos8s 192.168.100.153"
]
},
"test_database":{
"database_config_influx": "True",
"database_host_influx": "192.168.100.153",
"database_port_influx": "8086",
"database_token_influx": "PwYwrDjUSpLyUa8-0QeJGuf9p6KgPgmTVs0Zz0BZiyele74pNasBMJR-dKiF3LE8Qft5tADHtPSIS0WcVXHc_g==",
"database_org_influx": "Candela",
"database_bucket_influx": "candela",
"database_tag_influx": "testbed CT-US-004",
"test_rig_influx": "CT-US-004"
},
"test_dashboard":{
"dashboard_config_grafana": "True",
"dashboard_host_grafana": "192.168.100.153",
"dashboard_token_grafana": "eyJrIjoid1hpM0pwZFRSc3c0bGU2TEpITEVscHh4T0pPMVdZRzEiLCJuIjoiY2h1Y2siLCJpZCI6MX0="
},
"test_blog":{
"blog_config_ghost": "True",
"blog_host_ghost": "192.168.100.153",
"blog_token_ghost": "60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742",
"blog_authors_ghost": "Matthew",
"blog_customer_ghost": "candela",
"blog_user_push_ghost": "lanforge",
"blog_password_push_ghost": "lanforge",
"blog_flag_ghost": "--kpi_to_ghost"
}
}

39
py-scripts/ct_igg.json Normal file
View File

@@ -0,0 +1,39 @@
{
"ct_igg":{
"Notes":[
"This json file is used as an input to the ./lf_check.py file",
"The variables that are all capitalized below are replaced with configuration",
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
"The replacement is loosely coupled so the upper and lower case convention is used",
"to identify replaced strings in the lf_check.py code.",
"this file contains the Influx, Grafana and Ghost configuration",
"Influx, Ghost, and Grafana are up and running on v-centos8s 192.168.100.153"
]
},
"test_database":{
"database_config_influx": "True",
"database_host_influx": "192.168.100.153",
"database_port_influx": "8086",
"database_token_influx": "PwYwrDjUSpLyUa8-0QeJGuf9p6KgPgmTVs0Zz0BZiyele74pNasBMJR-dKiF3LE8Qft5tADHtPSIS0WcVXHc_g==",
"database_org_influx": "Candela",
"database_bucket_influx": "candela",
"database_tag_influx": "testbed CT-US-001",
"test_rig_influx": "CT-US-001"
},
"test_dashboard":{
"dashboard_config_grafana": "True",
"dashboard_host_grafana": "192.168.100.153",
"dashboard_token_grafana": "eyJrIjoid1hpM0pwZFRSc3c0bGU2TEpITEVscHh4T0pPMVdZRzEiLCJuIjoiY2h1Y2siLCJpZCI6MX0="
},
"test_blog":{
"blog_config_ghost": "True",
"blog_host_ghost": "192.168.100.153",
"blog_token_ghost": "60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742",
"blog_authors_ghost": "Matthew",
"blog_customer_ghost": "candela",
"blog_user_push_ghost": "lanforge",
"blog_password_push_ghost": "lanforge",
"blog_flag_ghost": "--kpi_to_ghost"
}
}

View File

@@ -0,0 +1,587 @@
{
"ct_us_001":{
"Notes":[
"The json is used to orchastrate the tests to be run on testbed ct_us_001",
"This json file is used as an input to the ./lf_check.py file",
"The variables that are all capitalized below are replaced with configuration",
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
"The replacement is loosely coupled so the upper and lower case convention is used",
"to identify replaced strings in the lf_check.py code."
]
},
"test_parameters":{
"test_bed": "CT-US-001",
"lf_mgr_ip": "192.168.100.116",
"lf_mgr_port": "8080",
"dut_set_name": "DUT_NAME ASUSRT-AX88U",
"dut_name": "ASUSRT-AX88U",
"dut_bssid_2g": "3c:7c:3f:55:4d:60",
"dut_bssid_5g": "3c:7c:3f:55:4d:64",
"dut_sw": "3.0.0.4.386_44266",
"test_timeout": 300,
"load_blank_db": false,
"load_factory_default_db": true,
"load_custom_db": false,
"custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com",
"host_ip_production": "192.168.100.201",
"email_list_test": "chuck.rekiere@candelatech.com",
"host_ip_test": "192.168.100.201",
"email_title_txt": "Lanforge QA Testing CT-US-001",
"email_txt": "Lanforge QA Testing CT-US-001 "
},
"test_network":{
"http_test_ip": "10.40.0.10",
"ftp_test_ip": "10.40.0.10",
"test_ip": "192.168.0.104"
},
"test_generic":{
"radio_used": "wiphy1",
"ssid_used": "asus11ax-5",
"ssid_pw_used": "hello123",
"security_used": "wpa2",
"num_sta": 1,
"col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth2"
},
"test_database":{
"database_config": "True",
"database_host": "192.168.100.201",
"database_port": "8086",
"database_token": "-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ==",
"database_org": "Candela",
"database_bucket": "lanforge_qa_testing",
"database_tag": "testbed CT-US-001",
"test_rig": "CT-US-001"
},
"test_dashboard":{
"dashboard_config": "True",
"dashboard_host": "192.168.100.201",
"dashboard_token": "eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ=="
},
"test_blog":{
"blog_config": "True",
"blog_host": "192.168.100.153",
"blog_token": "60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742",
"blog_authors": "Matthew",
"blog_customer": "candela",
"blog_user_push": "lanforge",
"blog_password_push": "lanforge",
"blog_flag": "--kpi_to_ghost"
},
"radio_dict":{
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"1","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"},
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"1","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"}
},
"test_suites":{
"suite_l3":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"skip","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"auto_suite":{
"CT-US-001_create_chamberview_dut_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-001_create_chamberview_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-001_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 4 --max_stations_5 32 --max_stations_dual 4 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_lf_ap_auto_test1": {
"enabled": "FALSE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_2G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_lf_ap_auto_test_2": {
"enabled": "FALSE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{
"enabled":"TRUE",
"load_db":"skip",
"command":"ghost_profile.py",
"args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
},
"suite_wc_dp_mt":{
"CT-US-001_create_chamberview_dut_0":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_mt7915e_sta19":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 19 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_mt7915e":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_mt7915e_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_dataplane_ATH10K_mt7915e_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan7",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e' ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
},
"suite_wc_dp_short":{
"CT-US-001_create_chamberview_dut_for_ATH10K":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_ATH10K(9984)_sta50":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 50 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_ATH10K(9984)":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
" --test_rig TEST_RIG "
]
},
"CT-US-001_QA":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"./tools/lf_qa.py",
"args":"",
"args_list":[
" --path REPORT_PATH --store --png --database ./tools/qa_test_db"
]
}
},
"suite_wc_dp_short_igg":{
"CT-US-001_create_chamberview_dut_for_ATH10K":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_ATH10K(9984)_sta50":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 50 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_ATH10K(9984)":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
},
"suite_wc_dp":{
"CT-US-001_create_chamberview_dut_for_ATH10K":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_ATH10K(9984)_sta50":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 50 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_ATH10K(9984)":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_dataplane_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan1",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)' ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_dut_for_AX210":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
"--lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_wiphy3_AX210_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy3,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\" "
]
},
"CT-US-001_wifi_capacity_wiphy3_AX210_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan3 --test_tag 'AX210'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_dataplane_wiphy3_AX210_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan3",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'AX210'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_dut_for_mt7915e":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_mt7915e_sta19":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 19 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_mt7915e":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_mt7915e_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_dataplane_ATH10K_mt7915e_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan7",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e' ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_dut_2":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-001_create_chamberview_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-001_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
}
}
}

View File

@@ -0,0 +1,455 @@
{
"ct_tests_001":{
"Notes":[
"The json is used to orchastrate the tests to be run on testbed ct_us_001",
"This json file is used as an input to the ./lf_check.py file",
"The variables that are all capitalized below are replaced with configuration",
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
"The replacement is loosely coupled so the upper and lower case convention is used",
"to identify replaced strings in the lf_check.py code."
]
},
"test_suites":{
"suite_l3":{
"test_l3_longevity":{
"enabled":"TRUE",
"load_db":"skip",
"command":"test_l3_longevity.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --test_duration 15s --polling_interval 5s --upstream_port eth2 ",
" --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' ",
" --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"
]
}
},
"suite_l3_ap":{
"test_l3_longevity":{
"enabled":"TRUE",
"load_db":"skip",
"command":"test_l3_longevity.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --test_duration 15s --polling_interval 5s --upstream_port eth2 ",
" --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' ",
" --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000",
" --ap_read --ap_test_mode"
]
}
},
"suite_wc_dp_shorter":{
"CT-US-001_create_chamberview_dut_0":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_mt7915e_sta19":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 19 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_mt7915e":{
"enabled":"TRUE",
"timeout":"600",
"iterations":"1",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,10,19 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e'",
" --test_rig TEST_RIG",
" --set DUT_SET_NAME"
]
},
"CT-US-001_QA":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"./tools/lf_qa.py",
"args":"",
"args_list":[
" --path REPORT_PATH --store --png --database ./tools/qa_001_test_db"
]
}
},
"suite_wc_dp_short":{
"CT-US-001_create_chamberview_dut_for_ATH10K":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_ATH10K(9984)_sta50":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 50 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_ATH10K(9984)":{
"enabled":"TRUE",
"timeout":"600",
"iterations":"1",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
" --test_rig TEST_RIG ",
" --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_dut_0":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_mt7915e_sta19":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 19 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_mt7915e":{
"enabled":"TRUE",
"timeout":"600",
"iterations":"1",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,10,15,19 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e'",
" --test_rig TEST_RIG",
" --set DUT_SET_NAME"
]
},
"CT-US-001_QA":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"./tools/lf_qa.py",
"args":"",
"args_list":[
" --path REPORT_PATH --store --png --database ./tools/qa_001_test_db"
]
}
},
"suite_wc_dp":{
"CT-US-001_create_chamberview_dut_for_ATH10K":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_ATH10K(9984)_sta50":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 50 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_ATH10K(9984)":{
"enabled":"TRUE",
"timeout":"600",
"iterations":"1",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
" --test_rig TEST_RIG ",
" --set DUT_SET_NAME",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG"
]
},
"CT-US-001_create_chamberview_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-ATH10K-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_dataplane_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"timeout":"600",
"iterations":"1",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan1",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;Custom' ",
" --raw_line 'cust_pkt_sz: 88;256;512;768;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: AUTO'",
" --raw_line 'spatial_streams: AUTO' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)' ",
" --test_rig TEST_RIG",
" --set DUT_SET_NAME",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG"
]
},
"CT-US-001_create_chamberview_dut_for_AX210":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
"--lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_wiphy3_AX210_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-AX210-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy3,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\" "
]
},
"CT-US-001_wifi_capacity_wiphy3_AX210_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"iterations":"1",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan3 --test_tag 'AX210'",
" --test_rig TEST_RIG",
" --set DUT_SET_NAME",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG"
]
},
"CT-US-001_dataplane_wiphy3_AX210_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"iterations":"1",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan3",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: AUTO'",
" --raw_line 'spatial_streams: AUTO' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'AX210'",
" --test_rig TEST_RIG",
" --set DUT_SET_NAME",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG"
]
},
"CT-US-001_create_chamberview_dut_for_mt7915e":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_mt7915e_sta19":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-mt7915e-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 19 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_mt7915e":{
"enabled":"TRUE",
"timeout":"600",
"iterations":"1",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,10,15,19 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e'",
" --test_rig TEST_RIG",
" --set DUT_SET_NAME",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG"
]
},
"CT-US-001_create_chamberview_mt7915e_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-mt7915e-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_dataplane_ATH10K_mt7915e_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"iterations":"1",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan7",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: AUTO'",
" --raw_line 'spatial_streams: AUTO' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e' ",
" --test_rig TEST_RIG",
" --set DUT_SET_NAME",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG"
]
},
"CT-US-001_create_chamberview_dut_2":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-001_create_chamberview_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ap-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-001_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"iterations":"1",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
" --test_rig TEST_RIG",
" --set DUT_SET_NAME",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG"
]
},
"CT-US-001_QA":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"./tools/lf_qa.py",
"args":"",
"args_list":[
" --path REPORT_PATH --store --png --database ./tools/qa_001_test_db"
]
},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
}
}
}

View File

@@ -1,7 +1,7 @@
{
"ct_us_004":{
"ct_us_002":{
"Notes":[
"The json is used to orchastrate the tests to be run on testbed ct_us_004",
"The json is used to orchastrate the tests to be run on testbed ct_us_001",
"This json file is used as an input to the ./lf_check.py file",
"The variables that are all capitalized below are replaced with configuration",
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
@@ -10,12 +10,13 @@
]
},
"test_parameters":{
"test_bed": "CT-US-004",
"lf_mgr_ip": "192.168.100.194",
"test_bed": "CT-US-002",
"lf_mgr_ip": "192.168.100.200",
"lf_mgr_port": "8080",
"dut_name": "NetgearAX12",
"dut_set_name": "DUT_NAME NETGEAR59-5G",
"dut_name": "NETGEAR-AX12",
"dut_bssid_5g": "94:a6:7e:54:d4:33",
"test_timeout": 300,
"test_timeout": 1200,
"load_blank_db": false,
"load_factory_default_db": true,
"load_custom_db": false,
@@ -24,8 +25,8 @@
"host_ip_production": "192.168.100.201",
"email_list_test": "chuck.rekiere@candelatech.com",
"host_ip_test": "192.168.100.201",
"email_title_txt": "Lanforge QA Testing CT-US-004",
"email_txt": "Lanforge QA Testing CT-US-004 "
"email_title_txt": "Lanforge QA Testing CT-US-002",
"email_txt": "Lanforge QA Testing CT-US-002"
},
"test_network":{
"http_test_ip": "10.40.0.10",
@@ -37,9 +38,9 @@
"ssid_used": "NETGEAR59-5G",
"ssid_pw_used": "crispynest798",
"security_used": "wpa2",
"num_sta": 1,
"num_sta": 4,
"col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth2"
"upstream_port": "1.1.eth2"
},
"test_database":{
"database_config": "True",
@@ -48,9 +49,8 @@
"database_token": "-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ==",
"database_org": "Candela",
"database_bucket": "lanforge_qa_testing",
"dut_set_name": "DUT_NAME NetgearAX12",
"database_tag": "testbed CT-US-004",
"test_rig": "CT-US-004"
"database_tag": "testbed CT-US-002",
"test_rig": "CT-US-002"
},
"test_dashboard":{
"dashboard_config": "True",
@@ -68,15 +68,15 @@
"blog_flag": "--kpi_to_ghost"
},
"radio_dict":{
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"1","SSID":"NETGEAR59-5G","PASSWD":"crispynest798","SECURITY":"wpa2"},
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"1","SSID":"NETGEAR59-5G","PASSWD":"crispynest798","SECURITY":"wpa2"}
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"4","SSID":"NETGEAR59-5G","PASSWD":"crispynest798","SECURITY":"wpa2"},
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"4","SSID":"NETGEAR59-5G","PASSWD":"crispynest798","SECURITY":"wpa2"}
},
"test_suites":{
"suite_l3":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"skip","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==NETGEAR59-5G,ssid_pw==crispynest798,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
"suite_two":{
"test_l3_longevity":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port UPSTREAM_PORT --radio 'radio==wiphy1,stations==4,ssid==ct523c-vap,ssid_pw==ct523c-vap,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"auto_suite":{
"CT-US-004_create_chamberview_dut_ap":{
"CT-US-002_create_chamberview_dut_1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
@@ -87,7 +87,7 @@
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-004_create_chamberview_ap":{
"CT-US-002_create_chamberview_1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
@@ -95,33 +95,15 @@
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-004_lf_ap_auto_test": {
"CT-US-002_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_lf_ap_auto_test_2": {
"enabled": "FALSE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"timeout":"4800",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
@@ -151,124 +133,7 @@
}
},
"suite_wc_dp":{
"CT-US-004_create_chamberview_dut_NetgearAX12":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-004_create_chamberview_wiphy1_sta64":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 32 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-004_wifi_capacity_wiphy1_ATH10K(9984)_sta64":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_create_chamberview_wiphy1_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-004_dataplane_wiphy1_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan1",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_create_chamberview_dut_NetgearAX12_5_2":{
"enabled":"FALSE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
"--lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-004_create_chamberview_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy3,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\" "
]
},
"CT-US-004_wifi_capacity_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan3",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_dataplane_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan3",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_create_chamberview_dut_ap":{
"CT-US-002_create_chamberview_dut_1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
@@ -279,7 +144,7 @@
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-004_create_chamberview_ap":{
"CT-US-002_create_chamberview_ATH10k_sta64":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
@@ -287,24 +152,173 @@
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 32 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-002_wifi_capacity_ATH10k(9984)":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream UPSTREAM_PORT --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-002_create_chamberview_dut_ATH10K_wan1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-002_create_chamberview_ATH10k_wan1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-002_dataplane_ATH10k(9984) CT-US-002":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream UPSTREAM_PORT --dut DUT_NAME --duration 30s --station 1.1.wlan1",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' ",
" --raw_line 'spatial_streams: 4' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)' ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-002_create_chamberview_dut_2":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-002_create_chamberview_AX200_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-002_wifi_capacity_AX200 CT-US-002":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream UPSTREAM_PORT --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan4 --test_tag 'ATH10K(9984)' ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-002_create_chamberview_dut_AX200_wan1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-002_create_chamberview_AX200_wan1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-002_dataplane_AX200":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream UPSTREAM_PORT --dut DUT_NAME --duration 30s --station 1.1.wlan4",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 4' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'AX200'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-002_create_chamberview_dut_auto":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=NETGEAR59-5G security=WPA2 password=crispynest798 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-002_create_chamberview_auto":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-004_lf_ap_auto_test": {
"enabled": "TRUE",
"CT-US-002_lf_ap_auto_test": {
"enabled": "FALSE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"timeout": "1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy1 --set 'Basic Client Connectivity' 1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'AX200'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
@@ -319,7 +333,7 @@
]
}
}
}
}
}

View File

@@ -7,7 +7,7 @@
"custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,chuck.rekiere@candelatech.com,",
"host_ip_production": "192.168.95.6",
"email_list_test": "chuck.rekiere@candelatech.com,chuck.rekiere@gmail.com",
"email_list_test": "chuck.rekiere@candelatech.com",
"host_ip_test": "192.168.95.6",
"lf_mgr": "192.168.100.116",
"email_title_txt": "Lanforge QA Testing CT-US-001",

View File

@@ -1,7 +1,7 @@
{
"ct_us_001":{
"ct_us_004":{
"Notes":[
"The json is used to orchastrate the tests to be run on testbed ct_us_001",
"The json is used to orchastrate the tests to be run on testbed ct_us_004",
"This json file is used as an input to the ./lf_check.py file",
"The variables that are all capitalized below are replaced with configuration",
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
@@ -10,13 +10,14 @@
]
},
"test_parameters":{
"test_bed": "CT-US-001",
"lf_mgr_ip": "192.168.100.116",
"test_bed": "CT-US-004",
"lf_mgr_ip": "192.168.100.194",
"lf_mgr_port": "8080",
"dut_name": "ASUSRT-AX88U",
"dut_bssid_2g": "3c:7c:3f:55:4d:60",
"dut_bssid_5g": "3c:7c:3f:55:4d:64",
"dut_sw": "3.0.0.4.386_42820",
"dut_set_name": "DUT_NAME Asus-RT-AX88U",
"dut_name": "Asus-RT-AX88U",
"dut_bssid_2g": "d4:5d:64:a0:7f:78",
"dut_bssid_5g": "d4:5d:64:a0:7f:7c",
"dut_sw": "3.0.0.4.386_44266",
"test_timeout": 300,
"load_blank_db": false,
"load_factory_default_db": true,
@@ -24,10 +25,10 @@
"custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com",
"host_ip_production": "192.168.100.201",
"email_list_test": "chuck.rekiere@candelatech.com,logan.lipke@candelatech.com",
"email_list_test": "chuck.rekiere@candelatech.com",
"host_ip_test": "192.168.100.201",
"email_title_txt": "Lanforge QA Testing CT-US-001",
"email_txt": "Lanforge QA Testing CT-US-001 "
"email_title_txt": "Lanforge QA Testing CT-US-004",
"email_txt": "Lanforge QA Testing CT-US-004 "
},
"test_network":{
"http_test_ip": "10.40.0.10",
@@ -50,9 +51,8 @@
"database_token": "-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ==",
"database_org": "Candela",
"database_bucket": "lanforge_qa_testing",
"dut_set_name": "DUT_NAME ASUSRT-AX88U",
"database_tag": "testbed CT-US-001",
"test_rig": "CT-US-001"
"database_tag": "testbed CT-US-004",
"test_rig": "CT-US-004"
},
"test_dashboard":{
"dashboard_config": "True",
@@ -78,7 +78,7 @@
"test_l3_longevity":{"enabled":"TRUE","load_db":"skip","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"auto_suite":{
"CT-US-001_create_chamberview_dut_ap":{
"CT-US-004_create_chamberview_dut_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
@@ -89,7 +89,7 @@
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-001_create_chamberview_ap":{
"CT-US-004_create_chamberview_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
@@ -102,7 +102,7 @@
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-001_lf_ap_auto_test": {
"CT-US-004_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
@@ -111,24 +111,6 @@
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
" --max_stations_2 4 --max_stations_5 32 --max_stations_dual 4 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_lf_ap_auto_test1": {
"enabled": "FALSE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_2G (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
@@ -138,7 +120,7 @@
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_lf_ap_auto_test_2": {
"CT-US-004_lf_ap_auto_test_2": {
"enabled": "FALSE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
@@ -170,8 +152,8 @@
]
}
},
"suite_wc_dp":{
"CT-US-001_create_chamberview_dut_asus11ax_5":{
"suite_wc_dp_mt":{
"CT-US-004_create_chamberview_dut_0":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
@@ -183,19 +165,95 @@
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_wiphy0_wiphy1_sta128":{
"CT-US-004_create_chamberview_mt7915e_sta19":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 19 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_wiphy0_wiphy1_ATH10K(9984)_sta128":{
"CT-US-004_wifi_capacity_mt7915e":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_create_chamberview_mt7915e_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-004_dataplane_ATH10K_mt7915e_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan7",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e' ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
},
"suite_wc_dp":{
"CT-US-004_create_chamberview_dut_0":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-004_create_chamberview_ATH10K(9984)_sta50":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 50 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-004_wifi_capacity_ATH10K(9984)":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
@@ -209,19 +267,19 @@
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_wiphy1_ATH10K(9984)_sta1":{
"CT-US-004_create_chamberview_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_dataplane_wiphy1_ATH10K(9984)_sta1":{
"CT-US-004_dataplane_ATH10K(9984)_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
@@ -231,12 +289,12 @@
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan1",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH ",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_dut_asus11ax_5_2":{
"CT-US-004_create_chamberview_dut_1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
@@ -248,19 +306,19 @@
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_wiphy3_AX210_sta1":{
"CT-US-004_create_chamberview_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy3,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\" "
]
},
"CT-US-001_wifi_capacity_wiphy3_AX210_sta1":{
"CT-US-004_wifi_capacity_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
@@ -273,7 +331,7 @@
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_dataplane_wiphy3_AX210_sta1":{
"CT-US-004_dataplane_wiphy3_AX210_sta1":{
"enabled":"FALSE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
@@ -288,7 +346,7 @@
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_dut_ap":{
"CT-US-004_create_chamberview_dut_for_mt7915e":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
@@ -298,8 +356,74 @@
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]},
"CT-US-001_create_chamberview_ap":{
]
},
"CT-US-004_create_chamberview_mt7915e_sta19":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 19 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-004_wifi_capacity_mt7915e":{
"enabled":"TRUE",
"timeout":"600",
"load_db":"skip",
"command":"lf_wifi_capacity_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e'",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_create_chamberview_mt7915e_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario CT-US-004-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-004_dataplane_ATH10K_mt7915e_sta1":{
"enabled":"TRUE",
"load_db":"skip",
"command":"lf_dataplane_test.py",
"args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan7",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e' ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-004_create_chamberview_dut_2":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview_dut.py",
"args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
]
},
"CT-US-004_create_chamberview_ap":{
"enabled":"TRUE",
"load_db":"skip",
"command":"create_chamberview.py",
@@ -307,12 +431,12 @@
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 32 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-001_lf_ap_auto_test": {
"CT-US-004_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"timeout":"1200",
@@ -325,7 +449,7 @@
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag ATH10K(9984)",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]

View File

@@ -1,19 +1,18 @@
#!/usr/bin/env python3
import sys
import os
import importlib
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath('..'), 'py-dashboard'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
from cv_test_manager import cv_test
from cv_test_manager import *
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
cv_test = cv_test_manager.cv_test
class CVManager(cv_test):
@@ -31,7 +30,10 @@ class CVManager(cv_test):
self.build_cv_scenario()
def main():
parser = argparse.ArgumentParser(description='''This is a simple driver script to load a CV Scenario''')
parser = argparse.ArgumentParser(
prog='cv_manager.py',
formatter_class=argparse.RawTextHelpFormatter,
description='''This is a simple driver script to load a CV Scenario''')
parser.add_argument('--scenario', help='Scenario you wish to build')
parser.add_argument('--debug', help='Enable debugging', default=False, action="store_true")
parser.add_argument('--mgr', default='localhost')
@@ -44,4 +46,4 @@ def main():
manager.apply_and_build_scenario()
if __name__ =="__main__":
main()
main()

View File

@@ -62,6 +62,7 @@ DUT syntax is somewhat tricky: DUT-name SSID BSID (bssid-idx), example: linksys
'''
import sys
import os
import importlib
import argparse
import time
@@ -69,17 +70,20 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath('..'), 'py-dashboard'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
from lf_wifi_capacity_test import WiFiCapacityTest
from cv_test_manager import *
from create_chamberview_dut import DUT
from create_chamberview import CreateChamberview
from lf_dataplane_test import DataplaneTest
from grafana_profile import UseGrafana
from lf_ap_auto_test import ApAutoTest
lf_wifi_capacity_test = importlib.import_module("py-scripts.lf_wifi_capacity_test")
WiFiCapacityTest = lf_wifi_capacity_test.WiFiCapacityTest
cv_test_manager = importlib.import_module("py-scripts.cv_test_manager")
create_chamberview = importlib.import_module("py-scripts.create_chamberview")
CreateChamberview = create_chamberview.CreateChamberview
DUT = create_chamberview.DUT
lf_dataplane_test = importlib.import_module("py-scripts.lf_dataplane_test")
DataplaneTest = lf_dataplane_test.DataplaneTest
grafana_profile = importlib.import_module("py-scripts.grafana_profile")
UseGrafana = grafana_profile.UseGrafana
lf_ap_auto_test = importlib.import_module("py-scripts.lf_ap_auto_test")
def main():

View File

@@ -6,7 +6,6 @@
import ast
import os
import pprint
class DocstringCollector:

View File

@@ -2,19 +2,23 @@
"""download_test.py will do lf_report::add_kpi(tags, 'throughput-download-bps', $my_value);"""
import sys
import os
import importlib
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
influx = importlib.import_module("py-scripts.influx")
RecordInflux = influx.RecordInflux
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
from LANforge.lfcli_base import LFCliBase
from influx import RecordInflux
from realm import Realm
import argparse
class DownloadTest(Realm):
def __init__(self,
@@ -78,4 +82,4 @@ def main():
)
if __name__ == "__main__":
main()
main()

View File

@@ -5,22 +5,24 @@ problems in the /events/ URL handler by querying events rapidly.
Please use concurrently with event_flood.py.
"""
import sys
import os
import importlib
from datetime import datetime
import pprint
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append('../py-json')
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
import argparse
from LANforge.lfcli_base import LFCliBase
from realm import Realm
import datetime
from datetime import datetime
import time
from time import sleep
import pprint
class EventBreaker(Realm):
def __init__(self, host, port,

View File

@@ -5,22 +5,24 @@ problems in the /events/ URL handler by inserting events rapidly.
Please concurrently use with event_breaker.py.
"""
import sys
import os
import importlib
import argparse
from datetime import datetime
from time import sleep
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append('../py-json')
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
import argparse
from LANforge.lfcli_base import LFCliBase
from realm import Realm
import datetime
from datetime import datetime
import time
from time import sleep
import pprint
class EventBreaker(Realm):
def __init__(self, host, port,

View File

@@ -0,0 +1,193 @@
show_events: 1
show_log: 1
port_sorting: 2
kpi_id: TR_398v2
bg: 0xE0ECF8
dut_info_override: Anonymous Enterprise AX AP
test_rig:
test_tag:
show_scan: 1
auto_helper: 1
skip_ac: 0
skip_ax: 0
skip_2: 0
skip_5: 0
skip_5b: 1
skip_dual: 0
skip_tri: 1
selected_dut5: TR398-DUT-r750 ruckus-r750-5g 4c:b1:cd:18:e8:ec (1)
selected_dut2: TR398-DUT-r750 ruckus-r750-2g 4c:b1:cd:18:e8:e8 (2)
upstream_port: 1.2.2 eth2
operator:
mconn: 5
txpower: 20
band2_freq: 2437
band5_freq: 5180
tos: 0
speed: 65%
ospeed: 20000
max_cx_random: 0
speed_max_cx_adjust: 1000000
speed_max_cx_2: 2000000
speed_max_cx_ax_2: 3000000
speed_max_cx_5: 8000000
speed_max_cx_ax_5: 10000000
max_tput_speed_2: 100000000
max_tput_speed_5: 560000000
max_tput_speed_ax_2: 200000000
max_tput_speed_ax_5: 720000000
max_peak_tput_speed_ax_2: 300000000
max_peak_tput_speed_ax_5: 1100000000
max_peak_tput_speed_ax_5_4: 1100000000
atf_max_nss: 2
atf_extra_2m_atten: 0
rxsens_deg_rot: 180
rxsens_pre_steps: 4
stability_udp_dur: 900
stability_iter: 16
calibrate_mode: 4
calibrate_nss: 1
dur120: 30
dur180: 180
i_5g_80: 195000000
i_5g_40: 90000000
i_2g_20: 32000000
i_5g_80_ax: 195000000
i_5g_40_ax: 90000000
i_2g_20_ax: 32000000
spatial_deg_rot: 30
spatial_retry: 0
reset_pp: 99
bidir_dp_prcnt: 0.05
rxsens_stop_at_pass: 0
spatial_pause_on_zero_tput: 0
auto_coex: 0
use_virtual_ax_sta: 0
rvr_adj: 0
rssi_2m_2: -26
rssi_2m_5: -30
extra_dl_path_loss: 0
dur60: 20
turn_table: TR-398
radio-0: 1.1.2 wiphy0
radio-1: 1.1.3 wiphy1
radio-2: 1.1.4 wiphy2
radio-3: 1.1.5 wiphy3
radio-4: 1.1.6 wiphy4
radio-5: 1.1.7 wiphy5
ax_radio-0: 1.2.wiphy0
ax_radio-1: 1.2.wiphy1
ax_radio-2: 1.2.wiphy2
ax_radio-3: 1.2.wiphy3
ax_radio-4: 1.2.wiphy4
ax_radio-5: 1.2.wiphy5
ax_radio-6: 1.2.wiphy6
ax_radio-7: 1.2.wiphy7
ax_radio-8: 1.2.wiphy8
ax_radio-9: 1.2.wiphy9
ax_radio-10: 1.2.wiphy10
ax_radio-11: 1.2.wiphy11
ax_radio-12: 1.3.wiphy0
ax_radio-13: 1.3.wiphy5
ax_radio-14: 1.3.wiphy10
ax_radio-15: 1.3.wiphy15
ax_radio-16: 1.3.wiphy1
ax_radio-17: 1.3.wiphy6
ax_radio-18: 1.3.wiphy11
ax_radio-19: 1.3.wiphy16
ax_radio-20: 1.3.wiphy2
ax_radio-21: 1.3.wiphy7
ax_radio-22: 1.3.wiphy12
ax_radio-23: 1.3.wiphy17
ax_radio-24: 1.3.wiphy3
ax_radio-25: 1.3.wiphy8
ax_radio-26: 1.3.wiphy13
ax_radio-27: 1.3.wiphy18
ax_radio-28: 1.3.wiphy4
ax_radio-29: 1.3.wiphy9
ax_radio-30: 1.3.wiphy14
ax_radio-31: 1.3.wiphy19
rssi_0_2-0: -28
rssi_0_2-1: -28
rssi_0_2-2: -28
rssi_0_2-3: -28
rssi_0_2-4: -22
rssi_0_2-5: -22
rssi_0_2-6: -22
rssi_0_2-7: -22
rssi_0_2-8: -25
rssi_0_2-9: -25
rssi_0_2-10: -25
rssi_0_2-11: -25
ax_rssi_0_2-0: -29
ax_rssi_0_2-1: -29
ax_rssi_0_2-2: -29
ax_rssi_0_2-3: -29
ax_rssi_0_2-4: -23
ax_rssi_0_2-5: -23
ax_rssi_0_2-6: -23
ax_rssi_0_2-7: -23
ax_rssi_0_2-8: -26
ax_rssi_0_2-9: -26
ax_rssi_0_2-10: -26
ax_rssi_0_2-11: -26
rssi_0_5-0: -35
rssi_0_5-1: -35
rssi_0_5-2: -35
rssi_0_5-3: -35
rssi_0_5-4: -33
rssi_0_5-5: -33
rssi_0_5-6: -33
rssi_0_5-7: -33
rssi_0_5-8: -39
rssi_0_5-9: -39
rssi_0_5-10: -39
rssi_0_5-11: -39
ax_rssi_0_5-0: -35
ax_rssi_0_5-1: -35
ax_rssi_0_5-2: -35
ax_rssi_0_5-3: -35
ax_rssi_0_5-4: -32
ax_rssi_0_5-5: -32
ax_rssi_0_5-6: -32
ax_rssi_0_5-7: -32
ax_rssi_0_5-8: -39
ax_rssi_0_5-9: -39
ax_rssi_0_5-10: -39
ax_rssi_0_5-11: -39
atten-0: 1.1.3094.0
atten-1: 1.1.3094.1
atten-2: 1.1.3094.2
atten-3: 1.1.3094.3
atten-4: 1.1.3102.0
atten-5: 1.1.3102.1
atten-6: 1.1.3099.0
atten-7: 1.1.3099.1
atten-8: 1.1.3102.2
atten-9: 1.1.3102.3
ax_atten-0: 1.1.3100.3
ax_atten-1: 1.1.3100.2
ax_atten-2: NA
ax_atten-3: NA
ax_atten-4: 1.1.3100.1
ax_atten-5: 1.1.3100.0
ax_atten-8: 1.1.3099.3
ax_atten-9: 1.1.3099.2
atten_cal_ac: 0
atten_cal_ax: 0
rxsens: 0
max_cx: 0
max_tput: 1
peak_perf: 0
max_tput_bi: 0
dual_band_tput: 0
atf: 0
atf3: 0
qos3: 0
rvr: 0
spatial: 0
multi_sta: 0
reset: 0
mu_mimo: 0
stability: 0
ap_coex: 0

View File

@@ -1,20 +1,21 @@
#!/usr/bin/env python3
import sys
import os
import importlib
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
import LANforge
from LANforge.lfcli_base import LFCliBase
from LANforge import LFUtils
import realm
import argparse
import time
import pprint
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
class IPv4Test(LFCliBase):

View File

@@ -1,7 +1,6 @@
#!/usr/bin/env python3
from matplotlib import pyplot as plt
from datetime import datetime
import numpy as np
import os.path
from os import path

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python3
"""
NAME: ghost_profile.py
PURPOSE: modify ghost database from the command line.
@@ -27,17 +26,18 @@ this script uses pyjwt. If you get the issue module 'jwt' has no attribute 'enco
"""
import sys
import os
import importlib
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath('..'), 'py-dashboard'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
from GhostRequest import GhostRequest
# from GhostRequest import GhostRequest
GhostRequest = importlib.import_module("py-dashboard.GhostRequest")
class UseGhost(GhostRequest):

View File

@@ -7,165 +7,23 @@ It gets the columns of the files and from that it automatically determines the n
"""
import sys
import os
import importlib
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath('..'), 'py-dashboard'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
from GrafanaRequest import GrafanaRequest
from LANforge.lfcli_base import LFCliBase
import string
<<<<<<< HEAD
import random
# from GrafanaRequest import GrafanaRequest
GrafanaRequest = importlib.import_module("py-dashboard.GrafanaRequest")
GrafanaRequest = GrafanaRequest.GrafanaRequest
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
class UseGrafana(LFCliBase):
def __init__(self,
_grafana_token,
host="localhost",
_grafana_host="localhost",
port=8080,
_debug_on=False,
_exit_on_fail=False,
_grafana_port=3000):
super().__init__(host, port, _debug=_debug_on, _exit_on_fail=_exit_on_fail)
self.grafana_token = _grafana_token
self.grafana_port = _grafana_port
self.grafana_host = _grafana_host
self.GR = GrafanaRequest(self.grafana_host, str(self.grafana_port), _folderID=0, _api_token=self.grafana_token)
def create_dashboard(self,
dashboard_name):
return self.GR.create_dashboard(dashboard_name)
def delete_dashboard(self,
dashboard_uid):
return self.GR.delete_dashboard(dashboard_uid)
def list_dashboards(self):
return self.GR.list_dashboards()
def create_dashboard_from_data(self,
json_file):
return self.GR.create_dashboard_from_data(json_file=json_file)
def groupby(self, params, grouptype):
dic = dict()
dic['params'] = list()
dic['params'].append(params)
dic['type'] = grouptype
return dic
def maketargets(self,
bucket,
scriptname,
groupBy,
index,
graph_group,
testbed):
query = (
'from(bucket: "%s")\n '
'|> range(start: v.timeRangeStart, stop: v.timeRangeStop)\n '
'|> filter(fn: (r) => r["script"] == "%s")\n '
'|> group(columns: ["_measurement"])\n '
% (bucket, scriptname))
queryend = ('|> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false)\n '
'|> yield(name: "mean")\n ')
if graph_group is not None:
graphgroup = ('|> filter(fn: (r) => r["Graph-Group"] == "%s")\n' % graph_group)
query += graphgroup
if testbed is not None:
query += ('|> filter(fn: (r) => r["testbed"] == "%s")\n' % testbed)
targets = dict()
targets['delimiter'] = ','
targets['groupBy'] = groupBy
targets['header'] = True
targets['ignoreUnknown'] = False
targets['orderByTime'] = 'ASC'
targets['policy'] = 'default'
targets['query'] = query + queryend
targets['refId'] = dict(enumerate(string.ascii_uppercase, 1))[index + 1]
targets['resultFormat'] = "time_series"
targets['schema'] = list()
targets['skipRows'] = 0
targets['tags'] = list()
return targets
def create_custom_dashboard(self,
scripts=None,
title=None,
bucket=None,
graph_groups=None,
graph_groups_file=None,
testbed=None,
datasource='InfluxDB',
from_date='now-1y',
graph_height=8,
graph__width=12):
options = string.ascii_lowercase + string.ascii_uppercase + string.digits
uid = ''.join(random.choice(options) for i in range(9))
input1 = dict()
annotations = dict()
annotations['builtIn'] = 1
annotations['datasource'] = '-- Grafana --'
annotations['enable'] = True
annotations['hide'] = True
annotations['iconColor'] = 'rgba(0, 211, 255, 1)'
annotations['name'] = 'Annotations & Alerts'
annotations['type'] = 'dashboard'
annot = dict()
annot['list'] = list()
annot['list'].append(annotations)
templating = dict()
templating['list'] = list()
timedict = dict()
timedict['from'] = from_date
timedict['to'] = 'now'
panels = list()
index = 1
if graph_groups_file:
print("graph_groups_file: %s" % graph_groups_file)
target_csvs = open(graph_groups_file).read().split('\n')
graph_groups = self.get_graph_groups(target_csvs) # Get the list of graph groups which are in the tests we ran
unit_dict = dict()
for csv in target_csvs:
if len(csv)>1:
print(csv)
unit_dict.update(self.get_units(csv))
for scriptname in graph_groups.keys():
for graph_group in graph_groups[scriptname]:
panel = dict()
gridpos = dict()
gridpos['h'] = graph_height
gridpos['w'] = graph__width
gridpos['x'] = 0
gridpos['y'] = 0
legend = dict()
legend['avg'] = False
legend['current'] = False
legend['max'] = False
legend['min'] = False
legend['show'] = True
legend['total'] = False
legend['values'] = False
options = dict()
options['alertThreshold'] = True
=======
class UseGrafana(GrafanaRequest):
>>>>>>> 0ef021e1165cbaa612e5128bc48d6abfbb7b887b
def read_csv(self, file):
csv = open(file).read().split('\n')
@@ -182,7 +40,6 @@ class UseGrafana(GrafanaRequest):
results.append(row[value])
return results
def get_units(self, target_csv):
csv = self.read_csv(target_csv)
graph_group = self.get_values(csv, 'Graph-Group')
@@ -190,7 +47,6 @@ class UseGrafana(GrafanaRequest):
return dict(zip(graph_group, units))
def main():
parser = LFCliBase.create_basic_argparse(
prog='grafana_profile.py',
@@ -204,7 +60,7 @@ def main():
--grafana_token
--dashbaord_name
--scripts "Wifi Capacity"
Create a custom dashboard with the following command:
./grafana_profile.py --create_custom yes
--title Dataplane
@@ -213,7 +69,7 @@ def main():
--graph_groups 'Per Stations Rate DL'
--graph_groups 'Per Stations Rate UL'
--graph_groups 'Per Stations Rate UL+DL'
Create a snapshot of a dashboard:
./grafana_profile.py --grafana_token TOKEN
--grafana_host HOST
@@ -240,7 +96,8 @@ def main():
optional.add_argument('--influx_bucket', help='Name of your Influx Bucket', default=None)
optional.add_argument('--graph_groups', help='How you want to filter your graphs on your dashboard',
action='append', default=[None])
optional.add_argument('--graph_groups_file', help='File which determines how you want to filter your graphs on your dashboard',
optional.add_argument('--graph_groups_file',
help='File which determines how you want to filter your graphs on your dashboard',
default=None)
optional.add_argument('--testbed', help='Which testbed you want to query', default=None)
optional.add_argument('--kpi', help='KPI file(s) which you want to graph form', action='append', default=None)
@@ -248,11 +105,13 @@ def main():
optional.add_argument('--from_date', help='Date you want to start your Grafana dashboard from', default='now-1y')
optional.add_argument('--graph_height', help='Custom height for the graph on grafana dashboard', default=8)
optional.add_argument('--graph_width', help='Custom width for the graph on grafana dashboard', default=12)
optional.add_argument('--create_snapshot', action='store_true')
optional.add_argument('--list_snapshots', action='store_true')
args = parser.parse_args()
Grafana = UseGrafana(args.grafana_token,
args.grafana_port,
args.grafana_host
args.grafana_host,
grafanajson_port=args.grafana_port
)
if args.dashboard_name is not None:
Grafana.create_dashboard(args.dashboard_name)
@@ -267,7 +126,7 @@ def main():
Grafana.create_dashboard_from_data(args.dashboard_json)
if args.kpi is not None:
args.graph_groups = args.graph_groups+Grafana.get_graph_groups(args.graph_groups)
args.graph_groups = args.graph_groups + Grafana.get_graph_groups(args.graph_groups)
if args.create_custom:
Grafana.create_custom_dashboard(scripts=args.scripts,
@@ -281,6 +140,12 @@ def main():
graph_height=args.graph_height,
graph__width=args.graph_width)
if args.create_snapshot:
Grafana.create_snapshot(args.title)
if args.list_snapshots:
Grafana.list_snapshots()
if __name__ == "__main__":
main()

View File

@@ -3,12 +3,7 @@
date - 11- feb - 2021
-Nikita Yadav
"""
from matplotlib import pyplot as plt
from datetime import datetime
import numpy as np
import os.path
from os import path
import sys
print(sys.path)

View File

@@ -1,19 +1,23 @@
#!/usr/bin/env python3
# pip3 install influxdb
import sys
import os
import importlib
import requests
import json
from influxdb import InfluxDBClient
import datetime
import time
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
import requests
import json
from influxdb import InfluxDBClient
import datetime
from LANforge.lfcli_base import LFCliBase
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
class RecordInflux(LFCliBase):

View File

@@ -1,22 +1,24 @@
#!/usr/bin/env python3
import sys
import os
import importlib
import argparse
import datetime
import time
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append('../py-json')
from LANforge import LFUtils
from LANforge import lfcli_base
from LANforge.lfcli_base import LFCliBase
from LANforge.LFUtils import *
import realm
from realm import Realm
import argparse
import datetime
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
class Layer3Test(LFCliBase):
@@ -135,13 +137,12 @@ class Layer3Test(LFCliBase):
def main():
# This has --mgr, --mgr_port and --debug
parser = LFCliBase.create_bare_argparse(prog="layer3_test.py", formatter_class=argparse.RawTextHelpFormatter, epilog="About This Script")
parser = LFCliBase.create_basic_argparse(
prog="layer3_test.py",
formatter_class=argparse.RawTextHelpFormatter,
epilog="About This Script")
# Adding More Arguments for custom use
parser.add_argument('--ssid', help='--ssid of DUT', default="lexusdut")
parser.add_argument('--passwd', help='--passwd of dut', default="[BLANK]")
parser.add_argument('--radio', help='--radio to use on LANforge', default="wiphy1")
parser.add_argument('--security', help='--security of dut', default="open")
parser.add_argument('--test_duration', help='--test_duration sets the duration of the test', default="1m")
parser.add_argument('--session_id', help='--session_id is for websocket', default="local")
parser.add_argument('--num_client', type=int, help='--num_sta is number of stations you want to create', default=2)

View File

@@ -1,40 +1,41 @@
#!/usr/bin/env python3
"""
Candela Technologies Inc.
Info : Standard Script for Layer 4 Testing
Date :
Author : Shivam Thakur
"""
import sys
import os
import importlib
import argparse
import datetime
import time
import json
import re
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append('../py-json')
from LANforge import LFUtils
from LANforge import lfcli_base
from LANforge.lfcli_base import LFCliBase
from LANforge.LFUtils import *
import realm
from realm import PortUtils
import argparse
import datetime
import time
from test_utility import CreateHTML
from test_utility import RuntimeUpdates
import pdfkit
import json
import re
import os
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
PortUtils = realm.PortUtils
test_utility = importlib.import_module("py-json.test_utility")
CreateHTML = test_utility.CreateHTML
RuntimeUpdates = test_utility.RuntimeUpdates
webconsole_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.getcwd())))
print(webconsole_dir)
class HTTPTest(LFCliBase):
def __init__(self, lfclient_host="localhost", lfclient_port=8080, radio="wiphy1", sta_prefix="sta", start_id=0, num_sta=2,
@@ -163,13 +164,13 @@ class HTTPTest(LFCliBase):
def main():
# This has --mgr, --mgr_port and --debug
parser = LFCliBase.create_bare_argparse(prog="layer3_test.py", formatter_class=argparse.RawTextHelpFormatter, epilog="About This Script")
parser = LFCliBase.create_basic_argparse(prog="layer4_test.py", formatter_class=argparse.RawTextHelpFormatter, epilog="About This Script")
# Adding More Arguments for custom use
parser.add_argument('--ssid', help='--ssid of DUT', default="WebAP")
parser.add_argument('--passwd', help='--passwd of dut', default="[BLANK]")
parser.add_argument('--radio', help='--radio to use on LANforge', default="wiphy1")
parser.add_argument('--security', help='--security of dut', default="open")
#parser.add_argument('--ssid', help='--ssid of DUT', default="WebAP")
#parser.add_argument('--passwd', help='--passwd of dut', default="[BLANK]")
#parser.add_argument('--radio', help='--radio to use on LANforge', default="wiphy1")
#parser.add_argument('--security', help='--security of dut', default="open")
parser.add_argument('--test_duration', help='--test_duration sets the duration of the test', default="1m")
parser.add_argument('--session_id', help='--session_id is for websocket', default="local")
parser.add_argument('--num_client', type=int, help='--num_sta is number of stations you want to create', default=2)

View File

@@ -162,23 +162,24 @@ reset_duration_max: 60000
bandsteer_always_5g: 0
"""
import sys
import os
import importlib
import argparse
import time
import json
from os import path
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
cvtest = cv_test_manager.cv_test
cv_add_base_parser = cv_test_manager.cv_add_base_parser
cv_base_adjust_parser = cv_test_manager.cv_base_adjust_parser
from cv_test_manager import cv_test as cvtest
from cv_test_manager import *
class ApAutoTest(cvtest):
def __init__(self,
@@ -186,6 +187,7 @@ class ApAutoTest(cvtest):
lf_port=8080,
lf_user="lanforge",
lf_password="lanforge",
ssh_port=22,
local_lf_report_dir="",
instance_name="ap_auto_instance",
config_name="ap_auto_config",
@@ -230,6 +232,7 @@ class ApAutoTest(cvtest):
self.raw_lines = raw_lines
self.raw_lines_file = raw_lines_file
self.sets = sets
self.ssh_port = ssh_port
self.graph_groups = graph_groups
self.local_lf_report_dir = local_lf_report_dir
@@ -284,14 +287,18 @@ class ApAutoTest(cvtest):
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets,
self.pull_report, self.lf_host, self.lf_user, self.lf_password,
cv_cmds, graph_groups_file=self.graph_groups, local_lf_report_dir=self.local_lf_report_dir)
cv_cmds, ssh_port=self.ssh_port, local_lf_report_dir=self.local_lf_report_dir,
graph_groups_file=self.graph_groups)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
def main():
parser = argparse.ArgumentParser("""
parser = argparse.ArgumentParser(
prog="lf_ap_auto_test.py",
formatter_class=argparse.RawTextHelpFormatter,
description="""
Open this file in an editor and read the top notes for more details.
Example:
@@ -307,7 +314,7 @@ def main():
--set 'Skip 2.4Ghz Tests' 1 --set 'Skip 5Ghz Tests' 1 \
--set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Stability' 0 --set 'Band-Steering' 0 \
--set 'Multi-Station Throughput vs Pkt Size' 0 --set 'Long-Term' 0 \
--test_rig Testbed-01 --pull_report \
--test_rig Testbed-01 --test_tag ATH10K --pull_report \
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \
--influx_bucket ben \

View File

@@ -1,5 +1,4 @@
#!/usr/bin/env python3
"""
NAME: lf_atten_mod_test.py
@@ -19,22 +18,24 @@ Use './lf_atten_mod_test.py --help' to see command line usage and options
Copyright 2021 Candela Technologies Inc
License: Free to distribute and modify. LANforge systems must be licensed.
"""
import sys
import os
import importlib
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append('../py-json')
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
import argparse
from LANforge.lfcli_base import LFCliBase
from LANforge.LFUtils import *
from LANforge import LFUtils
import realm
import time
class CreateAttenuator(LFCliBase):
def __init__(self, host, port, serno, idx, val,
@@ -80,4 +81,4 @@ def main():
if __name__ == "__main__":
main()
main()

View File

@@ -0,0 +1,60 @@
{
"test_parameters":{
"test_timeout": 200,
"load_blank_db": false,
"load_factory_default_db": true,
"load_custom_db": false,
"custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com",
"host_ip_production": "192.168.100.201",
"email_list_test": "chuck.rekiere@candelatech.com,logan.lipke@candelatech.com",
"host_ip_test": "192.168.100.201",
"lf_mgr": "192.168.100.116",
"email_title_txt": "Lanforge QA Testing CT-US-001",
"email_txt": "Lanforge QA Testing CT-US-001 "
},
"test_network":{
"http_test_ip": "10.40.0.10",
"ftp_test_ip": "10.40.0.10",
"test_ip": "192.168.0.104"
},
"test_generic":{
"radio_used": "wiphy1",
"ssid_used": "asus11ax-5",
"ssid_pw_used": "hello123",
"security_used": "wpa2",
"num_sta": 4,
"col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth2"
},
"radio_dict":{
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"4","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"},
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"4","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"}
},
"test_suites":{
"suite_one":{
"create_l3":{"enabled":"TRUE","command":"create_l4.py","args":"--radio RADIO_USED --ssid SSID_USED --passwd SSID_PW_USED --security SECURITY_USED --debug"},
"create_l4":{"enabled":"TRUE","command":"create_l4.py","args":"RADIO_1_CFG --debug"},
"create_l4_2":{"enabled":"TRUE","command":"create_l4.py","args":"--radio wiphy1 --ssid ct523c-vap --passwd ct523c-vap --security wpa2 --debug"}
},
"suite_two":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"skip","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"suite_wc_dp_ig_dut":{
"CT-US-001_wifi_capacity_ATH10K(9984)":{"enabled":"TRUE","load_db":"skip","command":"lf_wifi_capacity_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name cicd-wct --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000 --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.sta0000,1.1.sta0001 --create_stations --radio wiphy1 --ssid asus11ax-5 --security wpa2 --paswd hello123 --test_rig CT-US-001 --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 --raw_line 'dut-model-num: ASUS RT-AX88U'"},
"CT-US-001_dataplane_ATH10K(9984)":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"--ghost_token 60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742 --ghost_host 192.168.100.153 --authors LANForge --grafana_host 192.168.100.201 --grafana_token eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ== --parent_folder REPORT_PATH --user_push lanforge --password_push lanforge --customer candela --grafana_bucket lanforge_qa_testing --kpi_to_ghost"}
},
"suite_wc_dp":{
"CT-US-001_wifi_capacity_ATH10K(9984)":{"enabled":"TRUE","load_db":"skip","command":"lf_wifi_capacity_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name cicd-wct --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000 --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.sta0000,1.1.sta0001 --create_stations --radio wiphy1 --ssid asus11ax-5 --security wpa2 --paswd hello123 --test_rig CT-US-001 --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"CT-US-001_dataplane_ATH10K(9984)":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"CT-US-001_wifi_capacity_AX210":{"enabled":"TRUE","load_db":"skip","command":"lf_wifi_capacity_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name cicd-wct --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000 --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.sta2000 --create_stations --radio wiphy2 --ssid asus11ax-5 --security wpa2 --paswd hello123 --test_rig CT-US-001 --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"CT-US-001_dataplane_AX210":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta2000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"--ghost_token 60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742 --ghost_host 192.168.100.153 --authors LANForge --grafana_host 192.168.100.201 --grafana_token eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ== --parent_folder REPORT_PATH --user_push lanforge --password_push lanforge --customer candela --grafana_bucket lanforge_qa_testing --kpi_to_ghost"}
}
}
}

1219
py-scripts/lf_check_orig.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -10,7 +10,7 @@ SETUP:
/lanforge/html-reports directory needs to be present or output generated in local file
EXAMPLE:
see: /py-scritps/lf_report_test.py for example
see: /py-scripts/lf_report_test.py for example
COPYWRITE
Copyright 2021 Candela Technologies Inc

Some files were not shown because too many files have changed in this diff Show More