lf_autogen rm

Signed-off-by: shivamcandela <shivam.thakur@candelatech.com>
This commit is contained in:
shivamcandela
2021-07-15 13:14:01 +05:30
53 changed files with 6146 additions and 1645 deletions

View File

@@ -79,7 +79,7 @@ def usage():
print("-s|--scheme (serial|telnet|ssh): connect to controller via serial, ssh or telnet") print("-s|--scheme (serial|telnet|ssh): connect to controller via serial, ssh or telnet")
print("--tty Serial port for accessing AP") print("--tty Serial port for accessing AP")
print("-l|--log file: log messages here") print("-l|--log file: log messages here")
print("-b|--band: a (5Ghz) or b (2.4Ghz) or abgn for dual-band 2.4Ghz AP") print("-b|--baud: serial baud rate")
print("-z|--action: action") print("-z|--action: action")
print("-h|--help") print("-h|--help")

View File

@@ -0,0 +1,46 @@
#!/bin/bash
# This bash script installs Influx, Grafana, and Ghost on Fedora or CentOS.
# Run this script as a normal user with sudo access.
# You need to provide your username at the beginning of the script.
# There are a few fields you will need to enter when this installs Ghost, and you will be prompted by the script.
# Many scripts in this library are built around Influx, Grafana, and Ghost. Influx is a time series database,
# Grafana has dashboards which display the data stored in Influx,
# and Ghost is a blogging platform which creates an easy way for a user to view automated reports which are built using LANforge scripts
# Once a user uses this script, the user can use those features with the credentials for the system this script sets up.
# After running this script, Grafana is at port 3000, Influx is at port 8086, and Ghost is at port 2368
# The user will need to login to those through a web browser to create login credentials, and find API tokens.
# These API tokens are needed to run many scripts in LANforge scripts with these three programs.
echo Type in your username here
read -r USER
#Influx installation
wget https://dl.influxdata.com/influxdb/releases/influxdb2-2.0.4.x86_64.rpm
sudo yum localinstall influxdb2-2.0.4.x86_64.rpm
sudo service influxdb start
sudo service influxdb enable
#Grafana installation
wget https://dl.grafana.com/oss/release/grafana-7.5.3-1.x86_64.rpm
sudo yum localinstall grafana-7.5.3-1.x86_64.rpm -y
sudo systemctl start grafana-server
sudo systemctl enable grafana-server
#Ghost installation
sudo adduser ghost
sudo usermod -aG sudo ghost
sudo ufw allow 'Nginx Full'
curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash
sudo npm install ghost-cli@latest -g
# Ensure that NPM is up to date
npm cache verify
sudo npm install -g n
sudo n stable
curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash
npm install ghost-cli@latest -g
sudo mkdir -p /var/www/ghostsite
sudo chown ${USER}:${USER} /var/www/ghostsite
sudo chmod 775 /var/www/ghostsite
cd /var/www/ghostsite
ghost install local

View File

@@ -0,0 +1,52 @@
#!/bin/bash
#This script installs Influx, Grafana, and Ghost on Ubuntu.
#Run this script as a normal user with sudo access.
#You need to provide your username at the beginning of the script.
#There are a few fields you will need to enter when it is installing Ghost, and you will be prompted by the script.
#Lanforge scripts is built around Influx, Grafana, and Ghost. Influx is a time series database,
#Grafana has dashboards which display the data stored in Influx,
#and Ghost is a blogging platform which creates an easy way for a user to view automated reports which are built using LANforge scripts
#Once a user uses this script, the user can use those features with the credentials for the system this script sets up.
#After running this script, Grafana is accessible through port 3000, Influx is at port 8086, and Ghost is accessible at 2368
#The user will need to login to those through a web browser to create login credentials, and find API tokens.
#These API tokens are needed to run many scripts in LANforge scripts with the functionality these three programs provide.
#Update necessary parts of system
echo Type in your username here
read -r USER
sudo apt-get update && sudo apt-get upgrade -y
sudo apt-get install nginx mysql-server nodejs npm -y
#Influx installation
wget https://dl.influxdata.com/influxdb/releases/influxdb2-2.0.7-amd64.deb
sudo dpkg -i influxdb2-2.0.7-amd64.deb
sudo systemctl unmask influxdb
sudo systemctl start influxdb
sudo systemctl enable influxdb
#Grafana installation
sudo apt-get install -y adduser libfontconfig1
wget https://dl.grafana.com/oss/release/grafana_8.0.5_amd64.deb
sudo dpkg -i grafana_8.0.5_amd64.deb
sudo systemctl start grafana-server
sudo systemctl enable grafana-server
#Ghost installation
sudo adduser ghost
sudo usermod -aG sudo ghost
sudo ufw allow 'Nginx Full'
curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash
sudo npm install ghost-cli@latest -g
# Ensure that NPM is up to date
npm cache verify
sudo npm install -g n
sudo n stable
curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash
npm install ghost-cli@latest -g
sudo mkdir -p /var/www/ghostsite
sudo chown ${USER}:${USER} /var/www/ghostsite
sudo chmod 775 /var/www/ghostsite
cd /var/www/ghostsite
ghost install local

View File

@@ -3,7 +3,7 @@
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Class holds default settings for json requests to Ghost - # Class holds default settings for json requests to Ghost -
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
import ast
import os import os
import sys import sys
@@ -14,18 +14,23 @@ if sys.version_info[0] != 3:
import requests import requests
import jwt import jwt
from datetime import datetime as date from datetime import datetime
from dateutil import tz
import json import json
import subprocess import subprocess
from scp import SCPClient from scp import SCPClient
import paramiko import paramiko
from GrafanaRequest import GrafanaRequest from GrafanaRequest import GrafanaRequest
from influx2 import RecordInflux
import time
from collections import Counter
import shutil
class CSVReader: class CSVReader:
def read_csv(self, def read_csv(self,
file, file,
sep=','): sep='\t'):
df = open(file).read().split('\n') df = open(file).read().split('\n')
rows = list() rows = list()
for x in df: for x in df:
@@ -44,6 +49,70 @@ class CSVReader:
values.append(row[index]) values.append(row[index])
return values return values
def get_columns(self, df, targets):
target_index = []
for item in targets:
target_index.append(df[0].index(item))
results = []
for row in df:
row_data = []
for x in target_index:
row_data.append(row[x])
results.append(row_data)
return results
def to_html(self, df):
html = ''
html = html + ('<table style="border:1px solid #ddd">'
'<colgroup>'
'<col style="width:25%">'
'<col style="width:25%">'
'<col style="width:50%">'
'</colgroup>'
'<tbody>'
'<tr>')
for row in df:
for item in row:
html = html + ('<td style="border:1px solid #ddd">%s</td>' % item)
html = html + ('</tr>\n<tr>')
html = html + ('</tbody>'
'</table>')
return html
def filter_df(self, df, column, expression, target):
target_index = df[0].index(column)
counter = 0
targets = [0]
for row in df:
try:
if expression == 'less than':
if float(row[target_index]) < target:
targets.append(counter)
counter += 1
else:
counter += 1
if expression == 'greater than':
if float(row[target_index]) > target:
targets.append(counter)
counter += 1
else:
counter += 1
if expression == 'greater than or equal to':
if float(row[target_index]) >= target:
targets.append(counter)
counter += 1
else:
counter += 1
except:
counter += 1
return list(map(df.__getitem__, targets))
def concat(self, dfs):
final_df = dfs[0]
for df in dfs[1:]:
final_df = final_df + df[1:]
return final_df
class GhostRequest: class GhostRequest:
def __init__(self, def __init__(self,
@@ -52,7 +121,12 @@ class GhostRequest:
_api_token=None, _api_token=None,
_overwrite='false', _overwrite='false',
debug_=False, debug_=False,
die_on_error_=False): die_on_error_=False,
influx_host=None,
influx_port=8086,
influx_org=None,
influx_token=None,
influx_bucket=None):
self.debug = debug_ self.debug = debug_
self.die_on_error = die_on_error_ self.die_on_error = die_on_error_
self.ghost_json_host = _ghost_json_host self.ghost_json_host = _ghost_json_host
@@ -64,6 +138,11 @@ class GhostRequest:
self.api_token = _api_token self.api_token = _api_token
self.images = list() self.images = list()
self.pdfs = list() self.pdfs = list()
self.influx_host = influx_host
self.influx_port = influx_port
self.influx_org = influx_org
self.influx_token = influx_token
self.influx_bucket = influx_bucket
def encode_token(self): def encode_token(self):
@@ -71,7 +150,7 @@ class GhostRequest:
key_id, secret = self.api_token.split(':') key_id, secret = self.api_token.split(':')
# Prepare header and payload # Prepare header and payload
iat = int(date.now().timestamp()) iat = int(datetime.now().timestamp())
header = {'alg': 'HS256', 'typ': 'JWT', 'kid': key_id} header = {'alg': 'HS256', 'typ': 'JWT', 'kid': key_id}
payload = { payload = {
@@ -147,111 +226,265 @@ class GhostRequest:
tags='custom', tags='custom',
authors=authors) authors=authors)
def wifi_capacity_to_ghost(self, def list_append(self, list_1, value):
authors, list_1.append(value)
folders,
title=None, def kpi_to_ghost(self,
server_pull=None, authors,
ghost_host=None, folders,
port='22', parent_folder=None,
user_pull='lanforge', title=None,
password_pull='lanforge', server_pull=None,
user_push=None, ghost_host=None,
password_push=None, port=22,
customer=None, user_push=None,
testbed='Unknown Testbed', password_push=None,
test_run=None, customer=None,
target_folders=list(), testbed=None,
grafana_dashboard=None, test_run=None,
grafana_token=None, target_folders=list(),
grafana_host=None, grafana_token=None,
grafana_port=3000): grafana_host=None,
grafana_port=3000,
grafana_datasource='InfluxDB',
grafana_bucket=None):
global dut_hw, dut_sw, dut_model, dut_serial
now = datetime.now()
text = '' text = ''
csvreader = CSVReader() csvreader = CSVReader()
if test_run is None: if grafana_token is not None:
test_run = sorted(folders)[0].split('/')[-1].strip('/') grafana = GrafanaRequest(grafana_token,
for folder in folders: grafana_host,
print(folder) grafanajson_port=grafana_port
ssh_pull = paramiko.SSHClient() )
ssh_pull.set_missing_host_key_policy(paramiko.client.AutoAddPolicy) if self.debug:
ssh_pull.connect(server_pull, print('Folders: %s' % folders)
port,
username=user_pull, ssh_push = paramiko.SSHClient()
password=password_pull, ssh_push.set_missing_host_key_policy(paramiko.client.AutoAddPolicy)
allow_agent=False, ssh_push.connect(ghost_host,
look_for_keys=False) port,
scp_pull = SCPClient(ssh_pull.get_transport()) username=user_push,
scp_pull.get(folder, recursive=True) password=password_push,
target_folder = str(folder).rstrip('/').split('/')[-1] allow_agent=False,
target_folders.append(target_folder) look_for_keys=False)
print(target_folder) scp_push = SCPClient(ssh_push.get_transport())
if parent_folder is not None:
print("parent_folder %s" % parent_folder)
files = os.listdir(parent_folder)
print(files)
for file in files:
if os.path.isdir(parent_folder + '/' + file) is True:
if os.path.exists(file):
shutil.rmtree(file)
shutil.copytree(parent_folder + '/' + file, file)
target_folders.append(file)
print('Target folders: %s' % target_folders)
else:
for folder in folders:
if self.debug:
print(folder)
target_folders.append(folder)
testbeds = list()
pdfs = list()
high_priority_list = list()
low_priority_list = list()
images = list()
times = list()
test_pass_fail = list()
for target_folder in target_folders:
try: try:
target_file = '%s/kpi.csv' % target_folder target_file = '%s/kpi.csv' % target_folder
print('target file %s' % target_file)
df = csvreader.read_csv(file=target_file, sep='\t') df = csvreader.read_csv(file=target_file, sep='\t')
csv_testbed = csvreader.get_column(df, 'test-rig')[0] test_rig = csvreader.get_column(df, 'test-rig')[0]
print(csv_testbed) pass_fail = Counter(csvreader.get_column(df, 'pass/fail'))
except: test_pass_fail.append(pass_fail)
pass dut_hw = csvreader.get_column(df, 'dut-hw-version')[0]
if len(csv_testbed) > 2: dut_sw = csvreader.get_column(df, 'dut-sw-version')[0]
testbed = csv_testbed dut_model = csvreader.get_column(df, 'dut-model-num')[0]
text = text + 'Testbed: %s<br />' % testbed dut_serial = csvreader.get_column(df, 'dut-serial-num')[0]
if testbed == 'Unknown Testbed': duts = [dut_serial, dut_hw, dut_sw, dut_model, test_rig]
raise UserWarning('Please define your testbed') times_append = csvreader.get_column(df, 'Date')
print('testbed %s' % testbed) for target_time in times_append:
times.append(float(target_time) / 1000)
if pass_fail['PASS'] + pass_fail['FAIL'] > 0:
text = text + 'Tests passed: %s<br />' % pass_fail['PASS']
text = text + 'Tests failed: %s<br />' % pass_fail['FAIL']
text = text + 'Percentage of tests passed: %s<br />' % (
pass_fail['PASS'] / (pass_fail['PASS'] + pass_fail['FAIL']))
else:
text = text + 'Tests passed: 0<br />' \
'Tests failed : 0<br />' \
'Percentage of tests passed: Not Applicable<br />'
except:
print("Failure")
target_folders.remove(target_folder)
break
testbeds.append(test_rig)
if testbed is None:
testbed = test_rig
if test_run is None:
test_run = now.strftime('%B-%d-%Y-%I-%M-%p-report')
ssh_push = paramiko.SSHClient()
ssh_push.set_missing_host_key_policy(paramiko.client.AutoAddPolicy)
ssh_push.connect(ghost_host,
port,
username=user_push,
password=password_push,
allow_agent=False,
look_for_keys=False)
scp_push = SCPClient(ssh_push.get_transport())
local_path = '/home/%s/%s/%s/%s' % (user_push, customer, testbed, test_run) local_path = '/home/%s/%s/%s/%s' % (user_push, customer, testbed, test_run)
transport = paramiko.Transport((ghost_host, port))
transport = paramiko.Transport(ghost_host, port)
transport.connect(None, user_push, password_push) transport.connect(None, user_push, password_push)
sftp = paramiko.sftp_client.SFTPClient.from_transport(transport) sftp = paramiko.sftp_client.SFTPClient.from_transport(transport)
print(local_path)
if self.debug:
print(local_path)
print(target_folder)
try:
sftp.mkdir('/home/%s/%s/%s' % (user_push, customer, testbed))
except:
pass
try: try:
sftp.mkdir(local_path) sftp.mkdir(local_path)
except: except:
print('folder %s already exists' % local_path) pass
scp_push.put(target_folder, recursive=True, remote_path=local_path) scp_push.put(target_folder, local_path, recursive=True)
files = sftp.listdir(local_path + '/' + target_folder) files = sftp.listdir(local_path + '/' + target_folder)
# print('Files: %s' % files)
for file in files: for file in files:
if 'pdf' in file: if 'pdf' in file:
url = 'http://%s/%s/%s/%s/%s/%s' % ( url = 'http://%s/%s/%s/%s/%s/%s' % (
ghost_host, customer.strip('/'), testbed, test_run, target_folder, file) ghost_host, customer.strip('/'), testbed, test_run, target_folder, file)
text = text + 'PDF of results: <a href="%s">%s</a><br />' % (url, file) pdfs.append('PDF of results: <a href="%s">%s</a><br />' % (url, file))
print(url)
scp_pull.close()
scp_push.close() scp_push.close()
self.upload_images(target_folder) self.upload_images(target_folder)
for image in self.images: for image in self.images:
if 'kpi-' in image: if 'kpi-' in image:
if '-print' not in image: if '-print' not in image:
text = text + '<img src="%s"></img>' % image images.append('<img src="%s"></img>' % image)
self.images = [] self.images = []
if grafana_token is not None: results = csvreader.get_columns(df, ['short-description', 'numeric-score', 'test details', 'pass/fail',
GR = GrafanaRequest(grafana_token, 'test-priority'])
grafana_host,
grafanajson_port=grafana_port
)
GR.create_snapshot(title=grafana_dashboard)
snapshot = GR.list_snapshots()[-1]
text = text + '<iframe src="%s" width="100%s" height=500></iframe>' % (snapshot['externalUrl'], '%')
now = date.now() results[0] = ['Short Description', 'Score', 'Test Details', 'Pass or Fail', 'test-priority']
low_priority = csvreader.filter_df(results, 'test-priority', 'less than', 94)
high_priority = csvreader.filter_df(results, 'test-priority', 'greater than or equal to', 95)
high_priority_list.append(high_priority)
low_priority_list.append(low_priority)
test_pass_fail_results = sum((Counter(test) for test in test_pass_fail), Counter())
end_time = max(times)
start_time = '2021-07-01'
end_time = datetime.utcfromtimestamp(end_time)#.strftime('%Y-%m-%d %H:%M:%S')
now = time.time()
offset = datetime.fromtimestamp(now) - datetime.utcfromtimestamp(now)
end_time = end_time + offset
high_priority = csvreader.concat(high_priority_list)
low_priority = csvreader.concat(low_priority_list)
high_priority = csvreader.get_columns(high_priority,
['Short Description', 'Score', 'Test Details'])
low_priority = csvreader.get_columns(low_priority,
['Short Description', 'Score', 'Test Details'])
high_priority.append(['Total Passed', test_pass_fail_results['PASS'], 'Total subtests passed during this run'])
high_priority.append(['Total Failed', test_pass_fail_results['FAIL'], 'Total subtests failed during this run'])
if title is None: if title is None:
title = "%s %s %s %s:%s report" % (now.day, now.month, now.year, now.hour, now.minute) title = end_time.strftime('%B %d, %Y %I:%M %p report')
if grafana_dashboard is not None: # create Grafana Dashboard
pass target_files = []
for folder in target_folders:
target_files.append(folder.split('/')[-1] + '/kpi.csv')
if self.debug:
print('Target files: %s' % target_files)
grafana.create_custom_dashboard(target_csvs=target_files,
title=title,
datasource=grafana_datasource,
bucket=grafana_bucket,
from_date=start_time,
to_date=end_time.strftime('%Y-%m-%d %H:%M:%S'),
pass_fail='GhostRequest',
testbed=testbeds[0])
if self.influx_token is not None:
influxdb = RecordInflux(_influx_host=self.influx_host,
_influx_port=self.influx_port,
_influx_org=self.influx_org,
_influx_token=self.influx_token,
_influx_bucket=self.influx_bucket)
short_description = 'Ghost Post Tests passed' # variable name
numeric_score = test_pass_fail_results['PASS'] # value
tags = dict()
print(datetime.utcfromtimestamp(max(times)))
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'PASS'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
short_description = 'Ghost Post Tests failed' # variable name
numeric_score = test_pass_fail_results['FAIL'] # value
tags = dict()
tags['testbed'] = testbeds[0]
tags['script'] = 'GhostRequest'
tags['Graph-Group'] = 'FAIL'
date = datetime.utcfromtimestamp(max(times)).isoformat()
influxdb.post_to_influx(short_description, numeric_score, tags, date)
text = 'Testbed: %s<br />' % testbeds[0]
dut_table = '<table width="700px" border="1" cellpadding="2" cellspacing="0" ' \
'style="border-color: gray; border-style: solid; border-width: 1px; "><tbody>' \
'<tr><th colspan="2">Test Information</th></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Testbed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT_HW</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT_SW</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT model</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">DUT Serial</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Tests passed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' \
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">Tests failed</td>' \
'<td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' % (
duts[4], duts[1], duts[2], duts[3], duts[0], test_pass_fail_results['PASS'],
test_pass_fail_results['FAIL'])
dut_table = dut_table + '</tbody></table>'
text = text + dut_table
for pdf in pdfs:
print(pdf)
text = text + pdf
for image in images:
text = text + image
text = text + 'High priority results: %s' % csvreader.to_html(high_priority)
if grafana_token is not None:
# get the details of the dashboard through the API, and set the end date to the youngest KPI
grafana.list_dashboards()
grafana.create_snapshot(title='Testbed: ' + title)
time.sleep(3)
snapshot = grafana.list_snapshots()[-1]
text = text + '<iframe src="http://%s:3000/dashboard/snapshot/%s" width="100%s" height=1500></iframe><br />' % (
grafana_host, snapshot['key'], '%')
text = text + 'Low priority results: %s' % csvreader.to_html(low_priority)
self.create_post(title=title, self.create_post(title=title,
text=text, text=text,

View File

@@ -12,6 +12,35 @@ if sys.version_info[0] != 3:
import requests import requests
import json import json
import string
import random
class CSVReader:
def __init__(self):
self.shape = None
def read_csv(self,
file,
sep='\t'):
df = open(file).read().split('\n')
rows = list()
for x in df:
if len(x) > 0:
rows.append(x.split(sep))
length = list(range(0, len(df[0])))
columns = dict(zip(df[0], length))
self.shape = (length, columns)
return rows
def get_column(self,
df,
value):
index = df[0].index(value)
values = []
for row in df[1:]:
values.append(row[index])
return values
class GrafanaRequest: class GrafanaRequest:
@@ -35,6 +64,8 @@ class GrafanaRequest:
self.grafanajson_url = "http://%s:%s" % (_grafanajson_host, grafanajson_port) self.grafanajson_url = "http://%s:%s" % (_grafanajson_host, grafanajson_port)
self.data = dict() self.data = dict()
self.data['overwrite'] = _overwrite self.data['overwrite'] = _overwrite
self.csvreader = CSVReader()
self.units = dict()
def create_bucket(self, def create_bucket(self,
bucket_name=None): bucket_name=None):
@@ -45,7 +76,7 @@ class GrafanaRequest:
def list_dashboards(self): def list_dashboards(self):
url = self.grafanajson_url + '/api/search' url = self.grafanajson_url + '/api/search'
print(url) print(url)
return json.loads(requests.get(url,headers=self.headers).text) return json.loads(requests.get(url, headers=self.headers).text)
def create_dashboard(self, def create_dashboard(self,
dashboard_name=None, dashboard_name=None,
@@ -77,32 +108,277 @@ class GrafanaRequest:
datastore['dashboard'] = dashboard datastore['dashboard'] = dashboard
datastore['overwrite'] = False datastore['overwrite'] = False
data = json.dumps(datastore, indent=4) data = json.dumps(datastore, indent=4)
#return print(data)
return requests.post(grafanajson_url, headers=self.headers, data=data, verify=False) return requests.post(grafanajson_url, headers=self.headers, data=data, verify=False)
def create_dashboard_from_dict(self, def create_dashboard_from_dict(self,
dictionary=None): dictionary=None,
overwrite=False):
grafanajson_url = self.grafanajson_url + '/api/dashboards/db' grafanajson_url = self.grafanajson_url + '/api/dashboards/db'
datastore = dict() datastore = dict()
dashboard = dict(json.loads(dictionary)) dashboard = dict(json.loads(dictionary))
datastore['dashboard'] = dashboard datastore['dashboard'] = dashboard
datastore['overwrite'] = False datastore['overwrite'] = overwrite
data = json.dumps(datastore, indent=4) data = json.dumps(datastore, indent=4)
#return print(data)
return requests.post(grafanajson_url, headers=self.headers, data=data, verify=False) return requests.post(grafanajson_url, headers=self.headers, data=data, verify=False)
def get_graph_groups(self, target_csvs): # Get the unique values in the Graph-Group column
dictionary = dict()
for target_csv in target_csvs:
if len(target_csv) > 1:
csv = self.csvreader.read_csv(target_csv)
# Unique values in the test-id column
scripts = list(set(self.csvreader.get_column(csv, 'test-id')))
# we need to make sure we match each Graph Group to the script it occurs in
for script in scripts:
# Unique Graph Groups for each script
graph_groups = self.csvreader.get_column(csv, 'Graph-Group')
dictionary[script] = list(set(graph_groups))
units = self.csvreader.get_column(csv, 'Units')
self.units[script] = dict()
for index in range(0, len(graph_groups)):
self.units[script][graph_groups[index]] = units[index]
print(dictionary)
return dictionary
def maketargets(self,
bucket,
scriptname,
groupBy,
index,
graph_group,
testbed):
query = (
'from(bucket: "%s")\n '
'|> range(start: v.timeRangeStart, stop: v.timeRangeStop)\n '
'|> filter(fn: (r) => r["script"] == "%s")\n '
'|> group(columns: ["_measurement"])\n '
% (bucket, scriptname))
queryend = ('|> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false)\n '
'|> yield(name: "mean")\n ')
if graph_group is not None:
graphgroup = ('|> filter(fn: (r) => r["Graph-Group"] == "%s")\n' % graph_group)
query += graphgroup
if testbed is not None:
query += ('|> filter(fn: (r) => r["testbed"] == "%s")\n' % testbed)
targets = dict()
targets['delimiter'] = ','
targets['groupBy'] = groupBy
targets['header'] = True
targets['ignoreUnknown'] = False
targets['orderByTime'] = 'ASC'
targets['policy'] = 'default'
targets['query'] = query + queryend
targets['refId'] = dict(enumerate(string.ascii_uppercase, 1))[index + 1]
targets['resultFormat'] = "time_series"
targets['schema'] = list()
targets['skipRows'] = 0
targets['tags'] = list()
return targets
def groupby(self, params, grouptype):
dic = dict()
dic['params'] = list()
dic['params'].append(params)
dic['type'] = grouptype
return dic
def create_custom_dashboard(self, def create_custom_dashboard(self,
datastore=None): scripts=None,
data = json.dumps(datastore, indent=4) title=None,
return requests.post(self.grafanajson_url, headers=self.headers, data=data, verify=False) bucket=None,
graph_groups=None,
graph_groups_file=None,
target_csvs=None,
testbed=None,
datasource='InfluxDB',
from_date='now-1y',
to_date='now',
graph_height=8,
graph__width=12,
pass_fail=None):
options = string.ascii_lowercase + string.ascii_uppercase + string.digits
uid = ''.join(random.choice(options) for i in range(9))
input1 = dict()
annotations = dict()
annotations['builtIn'] = 1
annotations['datasource'] = '-- Grafana --'
annotations['enable'] = True
annotations['hide'] = True
annotations['iconColor'] = 'rgba(0, 211, 255, 1)'
annotations['name'] = 'Annotations & Alerts'
annotations['type'] = 'dashboard'
annot = dict()
annot['list'] = list()
annot['list'].append(annotations)
templating = dict()
templating['list'] = list()
timedict = dict()
timedict['from'] = from_date
timedict['to'] = to_date
panels = list()
index = 1
if graph_groups_file:
print("graph_groups_file: %s" % graph_groups_file)
target_csvs = open(graph_groups_file).read().split('\n')
graph_groups = self.get_graph_groups(
target_csvs) # Get the list of graph groups which are in the tests we ran
if target_csvs:
print('Target CSVs: %s' % target_csvs)
graph_groups = self.get_graph_groups(
target_csvs) # Get the list of graph groups which are in the tests we ran
if pass_fail is not None:
graph_groups[pass_fail] = ['PASS', 'FAIL']
for scriptname in graph_groups.keys():
print(scriptname)
for graph_group in graph_groups[scriptname]:
panel = dict()
gridpos = dict()
gridpos['h'] = graph_height
gridpos['w'] = graph__width
gridpos['x'] = 0
gridpos['y'] = 0
legend = dict()
legend['avg'] = False
legend['current'] = False
legend['max'] = False
legend['min'] = False
legend['show'] = True
legend['total'] = False
legend['values'] = False
options = dict()
options['alertThreshold'] = True
groupBy = list()
groupBy.append(self.groupby('$__interval', 'time'))
groupBy.append(self.groupby('null', 'fill'))
targets = list()
counter = 0
new_target = self.maketargets(bucket, scriptname, groupBy, counter, graph_group, testbed)
targets.append(new_target)
fieldConfig = dict()
fieldConfig['defaults'] = dict()
fieldConfig['overrides'] = list()
transformation = dict()
transformation['id'] = "renameByRegex"
transformation_options = dict()
transformation_options['regex'] = "(.*) value.*"
transformation_options['renamePattern'] = "$1"
transformation['options'] = transformation_options
xaxis = dict()
xaxis['buckets'] = None
xaxis['mode'] = "time"
xaxis['name'] = None
xaxis['show'] = True
xaxis['values'] = list()
yaxis = dict()
yaxis['format'] = 'short'
try:
yaxis['label'] = self.units[scriptname][graph_group]
except:
pass
yaxis['logBase'] = 1
yaxis['max'] = None
yaxis['min'] = None
yaxis['show'] = True
yaxis1 = dict()
yaxis1['align'] = False
yaxis1['alignLevel'] = None
panel['aliasColors'] = dict()
panel['bars'] = False
panel['dashes'] = False
panel['dashLength'] = 10
panel['datasource'] = datasource
panel['fieldConfig'] = fieldConfig
panel['fill'] = 0
panel['fillGradient'] = 0
panel['gridPos'] = gridpos
panel['hiddenSeries'] = False
panel['id'] = index
panel['legend'] = legend
panel['lines'] = True
panel['linewidth'] = 1
panel['nullPointMode'] = 'null'
panel['options'] = options
panel['percentage'] = False
panel['pluginVersion'] = '7.5.4'
panel['pointradius'] = 2
panel['points'] = True
panel['renderer'] = 'flot'
panel['seriesOverrides'] = list()
panel['spaceLength'] = 10
panel['stack'] = False
panel['steppedLine'] = False
panel['targets'] = targets
panel['thresholds'] = list()
panel['timeFrom'] = None
panel['timeRegions'] = list()
panel['timeShift'] = None
if graph_group is not None:
panel['title'] = scriptname + ' ' + graph_group
else:
panel['title'] = scriptname
if 'PASS' in panel['title']:
panel['title'] = 'Total Passed'
if 'FAIL' in panel['title']:
panel['title'] = 'Total Failed'
panel['transformations'] = list()
panel['transformations'].append(transformation)
panel['type'] = "graph"
panel['xaxis'] = xaxis
panel['yaxes'] = list()
panel['yaxes'].append(yaxis)
panel['yaxes'].append(yaxis)
panel['yaxis'] = yaxis1
panels.append(panel)
index = index + 1
input1['annotations'] = annot
input1['editable'] = True
input1['gnetId'] = None
input1['graphTooltip'] = 0
input1['links'] = list()
input1['panels'] = panels
input1['refresh'] = False
input1['schemaVersion'] = 27
input1['style'] = 'dark'
input1['tags'] = list()
input1['templating'] = templating
input1['time'] = timedict
input1['timepicker'] = dict()
input1['timezone'] = ''
input1['title'] = ("Testbed: %s" % title)
input1['uid'] = uid
input1['version'] = 11
return self.create_dashboard_from_dict(dictionary=json.dumps(input1))
# def create_custom_dashboard(self,
# datastore=None):
# data = json.dumps(datastore, indent=4)
# return requests.post(self.grafanajson_url, headers=self.headers, data=data, verify=False)
def create_snapshot(self, title): def create_snapshot(self, title):
print('create snapshot')
grafanajson_url = self.grafanajson_url + '/api/snapshots' grafanajson_url = self.grafanajson_url + '/api/snapshots'
data=self.get_dashboard(title) data = self.get_dashboard(title)
data['expires'] = 3600 data['expires'] = 360000
data['external'] = True data['external'] = False
print(data) data['timeout'] = 15
if self.debug:
print(data)
return requests.post(grafanajson_url, headers=self.headers, json=data, verify=False).text return requests.post(grafanajson_url, headers=self.headers, json=data, verify=False).text
def list_snapshots(self): def list_snapshots(self):
@@ -112,9 +388,21 @@ class GrafanaRequest:
def get_dashboard(self, target): def get_dashboard(self, target):
dashboards = self.list_dashboards() dashboards = self.list_dashboards()
print(target)
for dashboard in dashboards: for dashboard in dashboards:
if dashboard['title'] == target: if dashboard['title'] == target:
uid = dashboard['uid'] uid = dashboard['uid']
grafanajson_url = self.grafanajson_url + '/api/dashboards/uid/' + uid grafanajson_url = self.grafanajson_url + '/api/dashboards/uid/' + uid
print(grafanajson_url) print(grafanajson_url)
return json.loads(requests.get(grafanajson_url, headers=self.headers, verify=False).text) return json.loads(requests.get(grafanajson_url, headers=self.headers, verify=False).text)
def get_units(self, csv):
df = self.csvreader.read_csv(csv)
units = self.csvreader.get_column(df, 'Units')
test_id = self.csvreader.get_column(df, 'test-id')
maxunit = max(set(units), key=units.count)
maxtest = max(set(test_id), key=test_id.count)
d = dict()
d[maxunit] = maxtest
print(maxunit, maxtest)
return d

View File

@@ -746,4 +746,58 @@ def exec_wrap(cmd):
print("\nError with '" + cmd + "', bye\n") print("\nError with '" + cmd + "', bye\n")
exit(1) exit(1)
def expand_endp_histogram(distribution_payload=None):
"""
Layer 3 endpoints can contain DistributionPayloads that appear like
"rx-silence-5m" : {
# "histo_category_width" : 1,
# "histogram" : [
# 221,
# 113,
# 266,
# 615,
# 16309,
# 56853,
# 7954,
# 1894,
# 29246,
# 118,
# 12,
# 2,
# 0,
# 0,
# 0,
# 0
# ],
# "time window ms" : 300000,
# "window avg" : 210.285,
# "window max" : 228,
# "window min" : 193
These histogbrams are a set of linear categorys roughly power-of-two categories.
:param distribution_payload: dictionary requiring histo_category_width and histogram
:return: dictionary containing expanded category ranges and values for categories
"""
if distribution_payload is None:
return None
if ("histogram" not in distribution_payload) \
or ("histo_category_width" not in distribution_payload):
raise ValueError("Unexpected histogram format.")
multiplier = int(distribution_payload["histo_category_width"])
formatted_dict = {
#"00000 <= x <= 00001" : "0"
}
for bucket_index in range(len(distribution_payload["histogram"]) - 1):
pow1 = (2**bucket_index) * multiplier
pow2 = (2**(bucket_index+1)) * multiplier
if bucket_index == 0:
category_name = "00000 <= x <= {:-05.0f}".format(pow2)
else:
category_name = "{:-05.0f} < x <= {:-05.0f}".format(pow1, pow2)
formatted_dict[category_name] = distribution_payload["histogram"][bucket_index]
pprint.pprint([("historgram", distribution_payload["histogram"]),
("formatted", formatted_dict)])
return formatted_dict
### ###

View File

@@ -1,17 +1,12 @@
#!/usr/bin/python3 #!/usr/bin/python3
# Create and modify WAN Links Using LANforge JSON AP : http://www.candelatech.com/cookbook.php?vol=cli&book=JSON:+Managing+WANlinks+using+JSON+and+Python # Create and modify WAN Links Using LANforge JSON AP : http://www.candelatech.com/cookbook.php?vol=cli&book=JSON:+Managing+WANlinks+using+JSON+and+Python
# Written by Candela Technologies Inc. # Written by Candela Technologies Inc.
# Updated by: # Updated by: Erin Grimes
import sys import sys
import urllib import urllib
if sys.version_info[0] != 3: if sys.version_info[0] != 3:
print("This script requires Python 3") print("This script requires Python 3")
exit() exit()
import time import time
from time import sleep from time import sleep
from urllib import error from urllib import error
@@ -22,19 +17,22 @@ from LANforge import LFUtils
from LANforge.LFUtils import NA from LANforge.LFUtils import NA
j_printer = pprint.PrettyPrinter(indent=2) j_printer = pprint.PrettyPrinter(indent=2)
# typically you're using resource 1 in stand alone realm # todo: this needs to change
resource_id = 1 resource_id = 1
def main(base_url="http://localhost:8080"): def main(base_url="http://localhost:8080", args={}):
json_post = "" json_post = ""
json_response = "" json_response = ""
num_wanlinks = -1 num_wanlinks = -1
# see if there are old wanlinks to remove # see if there are old wanlinks to remove
lf_r = LFRequest.LFRequest(base_url+"/wl/list") lf_r = LFRequest.LFRequest(base_url+"/wl/list")
print(lf_r.get_as_json()) print(lf_r.get_as_json())
# ports to set as endpoints
port_a ="rd0a" port_a ="rd0a"
port_b ="rd1a" port_b ="rd1a"
try: try:
json_response = lf_r.getAsJson() json_response = lf_r.getAsJson()
LFUtils.debug_printer.pprint(json_response) LFUtils.debug_printer.pprint(json_response)
@@ -71,13 +69,12 @@ def main(base_url="http://localhost:8080"):
# create wanlink 1a # create wanlink 1a
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp") lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp")
lf_r.addPostData({ lf_r.addPostData({
'alias': 'wl_eg1-A', 'alias': 'wl_eg1-A',
'shelf': 1, 'shelf': 1,
'resource': '1', 'resource': '1',
'port': port_a, 'port': port_a,
'latency': '75', 'latency': args['latency_A'],
'max_rate': '128000', 'max_rate': args['rate_A']
'description': 'cookbook-example'
}) })
lf_r.jsonPost() lf_r.jsonPost()
sleep(0.05) sleep(0.05)
@@ -85,13 +82,12 @@ def main(base_url="http://localhost:8080"):
# create wanlink 1b # create wanlink 1b
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp") lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp")
lf_r.addPostData({ lf_r.addPostData({
'alias': 'wl_eg1-B', 'alias': 'wl_eg1-B',
'shelf': 1, 'shelf': 1,
'resource': '1', 'resource': '1',
'port': port_b, 'port': port_b,
'latency': '95', 'latency': args['latency_B'],
'max_rate': '256000', 'max_rate': args['rate_B']
'description': 'cookbook-example'
}) })
lf_r.jsonPost() lf_r.jsonPost()
sleep(0.05) sleep(0.05)
@@ -134,6 +130,7 @@ def main(base_url="http://localhost:8080"):
continue continue
print("starting wanlink:") print("starting wanlink:")
# print("the latency is {laten}".format(laten=latency))
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state") lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({ lf_r.addPostData({
'test_mgr': 'all', 'test_mgr': 'all',
@@ -163,25 +160,7 @@ def main(base_url="http://localhost:8080"):
print("Error code "+error.code) print("Error code "+error.code)
continue continue
print("Wanlink is running, wait one sec...") print("Wanlink is running")
sleep(1)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Now we can alter the delay and speed of the wanlink by
# updating its endpoints see https://www.candelatech.com/lfcli_ug.php#set_wanlink_info
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
print("Updating Wanlink...")
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_wanlink_info")
lf_r.addPostData({
'name': 'wl_eg1-A',
'speed': 265333,
'latency': 30,
'reorder_freq': 3200, # thats 3200/1000000
'drop_freq': 2000, # 2000/1000000
'dup_freq': 1325, # 1325/1000000
'jitter_freq': 25125, # 25125/1000000
})
lf_r.jsonPost()
sleep(1)
# stop wanlink # stop wanlink
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state") lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
@@ -214,22 +193,19 @@ def main(base_url="http://localhost:8080"):
print("Wanlink is stopped.") print("Wanlink is stopped.")
print("Wanlink info:") # print("Wanlink info:")
lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1") # lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1")
json_response = lf_r.getAsJson() # json_response = lf_r.getAsJson()
LFUtils.debug_printer.pprint(json_response) # LFUtils.debug_printer.pprint(json_response)
lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-A") # lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-A")
json_response = lf_r.getAsJson() # json_response = lf_r.getAsJson()
LFUtils.debug_printer.pprint(json_response) # LFUtils.debug_printer.pprint(json_response)
lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-B") # lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-B")
json_response = lf_r.getAsJson() # json_response = lf_r.getAsJson()
LFUtils.debug_printer.pprint(json_response) # LFUtils.debug_printer.pprint(json_response)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if __name__ == '__main__': if __name__ == '__main__':
main() main()
###
###

View File

@@ -5,13 +5,12 @@ Note: This script is working as library for chamberview tests.
import time import time
from LANforge.lfcli_base import LFCliBase
from realm import Realm from realm import Realm
import json import json
from pprint import pprint from pprint import pprint
import argparse
from cv_test_reports import lanforge_reports as lf_rpt from cv_test_reports import lanforge_reports as lf_rpt
from csv_to_influx import * from csv_to_influx import *
import os.path
def cv_base_adjust_parser(args): def cv_base_adjust_parser(args):
@@ -67,12 +66,14 @@ class cv_test(Realm):
def __init__(self, def __init__(self,
lfclient_host="localhost", lfclient_host="localhost",
lfclient_port=8080, lfclient_port=8080,
report_dir="" lf_report_dir="",
debug=False
): ):
super().__init__(lfclient_host=lfclient_host, super().__init__(lfclient_host=lfclient_host,
lfclient_port=lfclient_port) lfclient_port=lfclient_port)
self.report_dir = report_dir self.lf_report_dir = lf_report_dir
self.report_name = None self.report_name = None
self.debug = debug
# Add a config line to a text blob. Will create new text blob # Add a config line to a text blob. Will create new text blob
# if none exists already. # if none exists already.
@@ -127,7 +128,7 @@ class cv_test(Realm):
"cmd": command "cmd": command
} }
debug_par = "" debug_par = ""
rsp = self.json_post("/gui-json/cmd%s" % debug_par, data, debug_=False, response_json_list_=response_json) rsp = self.json_post("/gui-json/cmd%s" % debug_par, data, debug_=self.debug, response_json_list_=response_json)
try: try:
if response_json[0]["LAST"]["warnings"].startswith("Unknown"): if response_json[0]["LAST"]["warnings"].startswith("Unknown"):
print("Unknown command?\n"); print("Unknown command?\n");
@@ -286,7 +287,7 @@ class cv_test(Realm):
# cv_cmds: Array of raw chamber-view commands, such as "cv click 'button-name'" # cv_cmds: Array of raw chamber-view commands, such as "cv click 'button-name'"
# These (and the sets) are applied after the test is created and before it is started. # These (and the sets) are applied after the test is created and before it is started.
def create_and_run_test(self, load_old_cfg, test_name, instance_name, config_name, sets, def create_and_run_test(self, load_old_cfg, test_name, instance_name, config_name, sets,
pull_report, lf_host, lf_user, lf_password, cv_cmds, local_path="", ssh_port=22, pull_report, lf_host, lf_user, lf_password, cv_cmds, local_lf_report_dir="", ssh_port=22,
graph_groups_file=None): graph_groups_file=None):
load_old = "false" load_old = "false"
if load_old_cfg: if load_old_cfg:
@@ -349,12 +350,12 @@ class cv_test(Realm):
filelocation.write(location + '/kpi.csv\n') filelocation.write(location + '/kpi.csv\n')
filelocation.close() filelocation.close()
print(location) print(location)
self.report_dir = location self.lf_report_dir = location
if pull_report: if pull_report:
try: try:
print(lf_host) print(lf_host)
report.pull_reports(hostname=lf_host, username=lf_user, password=lf_password, report.pull_reports(hostname=lf_host, username=lf_user, password=lf_password,
port=ssh_port, local_path=local_path, port=ssh_port, report_dir=local_lf_report_dir,
report_location=location) report_location=location)
except Exception as e: except Exception as e:
print("SCP failed, user %s, password %s, dest %s", (lf_user, lf_password, lf_host)) print("SCP failed, user %s, password %s, dest %s", (lf_user, lf_password, lf_host))
@@ -385,7 +386,7 @@ class cv_test(Realm):
# Takes cmd-line args struct or something that looks like it. # Takes cmd-line args struct or something that looks like it.
# See csv_to_influx.py::influx_add_parser_args for options, or --help. # See csv_to_influx.py::influx_add_parser_args for options, or --help.
def check_influx_kpi(self, args): def check_influx_kpi(self, args):
if self.report_dir == "": if self.lf_report_dir == "":
# Nothing to report on. # Nothing to report on.
print("Not submitting to influx, no report-dir.\n") print("Not submitting to influx, no report-dir.\n")
return return
@@ -399,16 +400,21 @@ class cv_test(Realm):
(args.influx_host, args.influx_port, args.influx_org, args.influx_token, args.influx_bucket)) (args.influx_host, args.influx_port, args.influx_org, args.influx_token, args.influx_bucket))
# lfjson_host would be if we are reading out of LANforge or some other REST # lfjson_host would be if we are reading out of LANforge or some other REST
# source, which we are not. So dummy those out. # source, which we are not. So dummy those out.
influxdb = RecordInflux(_lfjson_host="", influxdb = RecordInflux(_influx_host=args.influx_host,
_lfjson_port="",
_influx_host=args.influx_host,
_influx_port=args.influx_port, _influx_port=args.influx_port,
_influx_org=args.influx_org, _influx_org=args.influx_org,
_influx_token=args.influx_token, _influx_token=args.influx_token,
_influx_bucket=args.influx_bucket) _influx_bucket=args.influx_bucket)
path = "%s/kpi.csv" % (self.report_dir) # lf_wifi_capacity_test.py may be run / initiated by a remote system against a lanforge
# the local_lf_report_dir is data is stored, if there is no local_lf_report_dir then the test is run directly on lanforge
if self.local_lf_report_dir == "":
path = "%s/kpi.csv" % (self.lf_report_dir)
else:
kpi_location = self.local_lf_report_dir + "/" + os.path.basename(self.lf_report_dir)
# the local_lf_report_dir is the parent directory, need to get the directory name
path = "%s/kpi.csv" % (kpi_location)
print("Attempt to submit kpi: ", path) print("Attempt to submit kpi: ", path)
csvtoinflux = CSVtoInflux(influxdb=influxdb, csvtoinflux = CSVtoInflux(influxdb=influxdb,
target_csv=path, target_csv=path,

View File

@@ -5,13 +5,13 @@ class lanforge_reports:
def pull_reports(self, hostname="localhost", port=22, username="lanforge", password="lanforge", def pull_reports(self, hostname="localhost", port=22, username="lanforge", password="lanforge",
report_location="/home/lanforge/html-reports/", report_location="/home/lanforge/html-reports/",
local_path="../../../reports/"): report_dir="../../../reports/"):
ssh = paramiko.SSHClient() ssh = paramiko.SSHClient()
ssh.load_system_host_keys() ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname=hostname, username=username, password=password, port=port) ssh.connect(hostname=hostname, username=username, password=password, port=port, allow_agent=False, look_for_keys=False)
with SCPClient(ssh.get_transport()) as scp: with SCPClient(ssh.get_transport()) as scp:
scp.get(remote_path=report_location, local_path=local_path, recursive=True) scp.get(remote_path=report_location, local_path=report_dir, recursive=True)
scp.close() scp.close()

View File

@@ -152,8 +152,8 @@ class GenCXProfile(LFCliBase):
count = 40 count = 40
for i in range(0, count): for i in range(0, count):
port_info = self.local_realm.name_to_eid(sta_port) port_info = self.local_realm.name_to_eid(sta_port)
resource = port_info[0] resource = port_info[1]
shelf = port_info[1] shelf = port_info[0]
name = port_info[2] name = port_info[2]
gen_name_a = "%s-%s" % (self.name_prefix, name) + "_" + str(i) + add gen_name_a = "%s-%s" % (self.name_prefix, name) + "_" + str(i) + add
@@ -167,8 +167,8 @@ class GenCXProfile(LFCliBase):
for i in range(0, 5): for i in range(0, 5):
port_info = self.local_realm.name_to_eid(port_name) port_info = self.local_realm.name_to_eid(port_name)
try: try:
resource = port_info[0] resource = port_info[1]
shelf = port_info[1] shelf = port_info[0]
name = port_info[2] name = port_info[2]
except: except:
raise ValueError("Unexpected name for port_name %s" % port_name) raise ValueError("Unexpected name for port_name %s" % port_name)
@@ -279,8 +279,8 @@ class GenCXProfile(LFCliBase):
endp_tpls = [] endp_tpls = []
for port_name in ports: for port_name in ports:
port_info = self.local_realm.name_to_eid(port_name) port_info = self.local_realm.name_to_eid(port_name)
resource = port_info[0] resource = port_info[1]
shelf = port_info[1] shelf = port_info[0]
name = port_info[2] name = port_info[2]
# this naming convention follows what you see when you use # this naming convention follows what you see when you use

View File

@@ -21,6 +21,7 @@ class L4CXProfile(LFCliBase):
self.local_realm = local_realm self.local_realm = local_realm
self.created_cx = {} self.created_cx = {}
self.created_endp = [] self.created_endp = []
self.test_type = "urls"
self.lfclient_port = lfclient_port self.lfclient_port = lfclient_port
self.lfclient_host = lfclient_host self.lfclient_host = lfclient_host
@@ -69,6 +70,34 @@ class L4CXProfile(LFCliBase):
print(".", end='') print(".", end='')
print("") print("")
def compare_vals(self, old_list, new_list):
passes = 0
expected_passes = 0
if len(old_list) == len(new_list):
for item, value in old_list.items():
expected_passes += 1
if new_list[item] > old_list[item]:
passes += 1
if passes == expected_passes:
return True
else:
return False
else:
return False
def get_bytes(self):
time.sleep(1)
cx_list = self.json_get("layer4/list?fields=name,%s" % self.test_type, debug_=self.debug)
# print("==============\n", cx_list, "\n==============")
cx_map = {}
for cx_name in cx_list['endpoint']:
if cx_name != 'uri' and cx_name != 'handler':
for item, value in cx_name.items():
for value_name, value_rx in value.items():
if item in self.created_cx.keys() and value_name == self.test_type:
cx_map[item] = value_rx
return cx_map
def check_request_rate(self): def check_request_rate(self):
endp_list = self.json_get("layer4/list?fields=urls/s") endp_list = self.json_get("layer4/list?fields=urls/s")
expected_passes = 0 expected_passes = 0
@@ -83,12 +112,11 @@ class L4CXProfile(LFCliBase):
if name in self.created_cx.keys(): if name in self.created_cx.keys():
expected_passes += 1 expected_passes += 1
if info['urls/s'] * self.requests_per_ten >= self.target_requests_per_ten * .9: if info['urls/s'] * self.requests_per_ten >= self.target_requests_per_ten * .9:
print(name, info['urls/s'], info['urls/s'] * self.requests_per_ten, self.target_requests_per_ten * .9) # print(name, info['urls/s'], info['urls/s'] * self.requests_per_ten, self.target_requests_per_ten * .9)
passes += 1 passes += 1
return passes == expected_passes return passes == expected_passes
def cleanup(self): def cleanup(self):
print("Cleaning up cxs and endpoints") print("Cleaning up cxs and endpoints")
if len(self.created_cx) != 0: if len(self.created_cx) != 0:
@@ -110,7 +138,7 @@ class L4CXProfile(LFCliBase):
def create(self, ports=[], sleep_time=.5, debug_=False, suppress_related_commands_=None): def create(self, ports=[], sleep_time=.5, debug_=False, suppress_related_commands_=None):
cx_post_data = [] cx_post_data = []
for port_name in ports: for port_name in ports:
print("port_name: {} len: {} self.local_realm.name_to_eid(port_name): {}".format(port_name,len(self.local_realm.name_to_eid(port_name)),self.local_realm.name_to_eid(port_name),)) print("port_name: {} len: {} self.local_realm.name_to_eid(port_name): {}".format(port_name, len(self.local_realm.name_to_eid(port_name)), self.local_realm.name_to_eid(port_name)))
shelf = self.local_realm.name_to_eid(port_name)[0] shelf = self.local_realm.name_to_eid(port_name)[0]
resource = self.local_realm.name_to_eid(port_name)[1] resource = self.local_realm.name_to_eid(port_name)[1]
name = self.local_realm.name_to_eid(port_name)[2] name = self.local_realm.name_to_eid(port_name)[2]
@@ -186,7 +214,6 @@ class L4CXProfile(LFCliBase):
print(header_row) print(header_row)
# Step 2 - Monitor columns # Step 2 - Monitor columns
start_time = datetime.datetime.now() start_time = datetime.datetime.now()
end_time = start_time + datetime.timedelta(seconds=duration_sec) end_time = start_time + datetime.timedelta(seconds=duration_sec)
sleep_interval = round(duration_sec // 5) sleep_interval = round(duration_sec // 5)
@@ -198,6 +225,9 @@ class L4CXProfile(LFCliBase):
passes = 0 passes = 0
expected_passes = 0 expected_passes = 0
timestamps = [] timestamps = []
if self.test_type != 'urls':
old_rx_values = self.get_bytes()
for test in range(1+iterations): for test in range(1+iterations):
while datetime.datetime.now() < end_time: while datetime.datetime.now() < end_time:
if col_names is None: if col_names is None:
@@ -219,16 +249,27 @@ class L4CXProfile(LFCliBase):
timestamps.append(t) timestamps.append(t)
value_map[t] = response value_map[t] = response
expected_passes += 1 expected_passes += 1
if self.check_errors(debug): if self.test_type == 'urls':
if self.check_request_rate(): if self.check_errors(self.debug):
if self.check_request_rate():
passes += 1
else:
self._fail("FAIL: Request rate did not exceed target rate")
break
else:
self._fail("FAIL: Errors found getting to %s " % self.url)
break
else:
new_rx_values = self.get_bytes()
if self.compare_vals(old_rx_values, new_rx_values):
passes += 1 passes += 1
else: else:
self._fail("FAIL: Request rate did not exceed 90% target rate") self._fail("FAIL: Not all stations increased traffic")
self.exit_fail()
else: # self.exit_fail()
self._fail("FAIL: Errors found getting to %s " % self.url)
self.exit_fail()
time.sleep(monitor_interval) time.sleep(monitor_interval)
print(value_map) print(value_map)
#[further] post-processing data, after test completion #[further] post-processing data, after test completion

View File

@@ -57,7 +57,7 @@ class ATTENUATORProfile(LFCliBase):
def create(self, debug=False): def create(self, debug=False):
if len(self.atten_serno) == 0 or len(self.atten_idx) == 0 or len(self.atten_val) == 0: if len(self.atten_serno) == 0 or len(self.atten_idx) == 0 or len(self.atten_val) == 0:
print("ERROR: Must specify atten_serno, atten_idx, and atten_val when setting attenuator.\n") print("ERROR: Must specify atten_serno, atten_idx, and atten_val when setting attenuator.\n")
print("Creating Attenuator...") print("Setting Attenuator...")
self.set_command_param("set_attenuator", "serno", self.atten_serno) self.set_command_param("set_attenuator", "serno", self.atten_serno)
self.set_command_param("set_attenuator", "atten_idx", self.atten_idx) self.set_command_param("set_attenuator", "atten_idx", self.atten_idx)
self.set_command_param("set_attenuator", "val", self.atten_val) self.set_command_param("set_attenuator", "val", self.atten_val)

View File

@@ -193,6 +193,10 @@ class StationProfile:
self.set_command_param("add_sta", "ieee80211w", 2) self.set_command_param("add_sta", "ieee80211w", 2)
# self.add_sta_data["key"] = passwd # self.add_sta_data["key"] = passwd
def station_mode_to_number(self,mode):
modes = ['a', 'b', 'g', 'abg', 'an', 'abgn', 'bgn', 'bg', 'abgn-AC', 'bgn-AC', 'an-AC']
return modes.index(mode) + 1
def add_security_extra(self, security): def add_security_extra(self, security):
types = {"wep": "wep_enable", "wpa": "wpa_enable", "wpa2": "wpa2_enable", "wpa3": "use-wpa3", "open": "[BLANK]"} types = {"wep": "wep_enable", "wpa": "wpa_enable", "wpa2": "wpa2_enable", "wpa3": "use-wpa3", "open": "[BLANK]"}
if self.desired_add_sta_flags.__contains__(types[security]) and \ if self.desired_add_sta_flags.__contains__(types[security]) and \

38
py-json/test_histogram.py Executable file
View File

@@ -0,0 +1,38 @@
#!/usr/bin/env python3
""" ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
internal test driving LFUtils.expand_endp_histogram
----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- """
import LANforge
from LANforge import LFUtils
import pprint
distrib_load = {
"histo_category_width" : 3,
"histogram" : [
221,
113,
266,
615,
16309,
56853,
7954,
1894,
29246,
118,
12,
2,
0,
0,
0,
0
],
"time window ms" : 300000,
"window avg" : 210.285,
"window max" : 228,
"window min" : 193
}
if __name__ == '__main__':
LFUtils.expand_endp_histogram(distrib_load)

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

View File

@@ -0,0 +1,306 @@
html, body,div {
margin: 0;
padding:0;
font-size: 14px;
}
h1,h2,h3,h4 {
padding: 0em;
line-height: 1.5;
text-align: left;
color: rgb(42,91,41);
}
@font-face {
font-family: CenturyGothic;
src: url("CenturyGothic.woff"),
url("images/CenturyGothic.woff"),
url("/images/CenturyGothic.woff"),
url("http://www.candelatech.com/images/CenturyGothic.woff");
}
body,h1,h2,h3,h4 {
font-family: CenturyGothic, "Century Gothic", Arial, Helvetica, sans-serif;
}
h1 { font-size: 30px;}
h2 { font-size: 24px;}
h3 { font-size: 18px;}
h4 { font-size: 14px;}
li,pre,tt {
text-align: left;
}
pre {
font-size: 10px;
}
table {
border-collapse: collapse;
background: #e0e0e0;
}
table, td, th {
border: 1px solid gray;
padding 4px;
}
table.noborder, table.noborder td, table.noborder th {
border: 0 none;
}
td {
background: white;
}
td.ar {
text-align: right;
}
th {
color: rgb(42,91,41);
text-align: center;
}
#lf_title {
text-align: center;
background-image: url(candela_swirl_small-72h.png);
background-position: right;
background-repeat: no-repeat;
height: 90px;
}
#new_chart {
display: block;
height: 250px;
min-width: 200px;
width: 80%;
border: 1px solid black;
margin: 14px auto;
padding: 14px;
vertical-align: bottom;
text-align: center;
}
.lf_chart {
margin: 1em;
padding: 5px;
}
#error_types ul {
background: #f0f0f0;
font-size: 12px;
line-height: 1.5;
margin: 1em;
padding: 0.25em inherit 0.25em inherit;
max-height: 8em;
overflow: auto;
}
li {
line-height: 1.5;
}
.contentDiv {
min-width: 800px;
max-width: 8in;
margin: 1em auto;
padding: 0;
}
.ct-point {
stroke-width: 6px;}
.o_el {
display: inline-block;
width: 100px;
height: 230px;
border: none;
margin: 1px 1px 16px 1px;
padding: 10px 10px 0 10px;
background: #eee;
text-align: center;
vertical-align: bottom;
}
.bar_el {
display: block;
background: green;
border: none;
min-height: 1px;
margin: 0 0 5px 0;
padding: 0;
text-align: center;
}
.label_el {
color: black;
display: block;
font-size: 14px;
font-family: Arial,Helvetica,sans-serif,mono;
margin: 1px;
text-align: center;
vertical-align: bottom;
width: inherit;
}
.value_el {
font-family: Arial,Helvetica,sans-serif,mono;
color: black;
display: block;
font-size: 14px;
margin: 0 auto;
padding: none;
border: none;
background: white;
text-align: center;
vertical-align: bottom;
width: auto;
}
.value_el>span {
background: #f0f0f0a0;
border: 1px solid #f0f0f0a0;
border-radius: 5px;
padding: 1px;
min-width: 2em;
}
.error {
color: red;
}
@media only screen {
.hideFromPrint { }
.hideFromScreen { display:none; }
}
@media only print {
.hideFromScreen { }
.hideFromPrint { display:none; }
}
/* these styles will get overridden by custom.css */
#BannerBack {
background-color: #e68b15;
height: 205px;
max-height: 205px;
border: 0 none;
margin: 0;
padding: 0;
top: 0;
left: 0;
width: 100%;
}
#Banner {
background-image:url("banner.png");
background-repeat:no-repeat;
padding: 0;
margin: 0 auto;
min-width: 1000px;
min-height: 205px;
width: 1000px;
height: 205px;
max-width: 1000px;
max-height: 205px;
}
#BannerLogo {
text-align: right;
padding: 25px;
margin: 5px;
width: 200px;
border: none;
}
#BannerLogoFooter {
text-align: right;
padding: 1px;
margin: 1px;
width: 200px;
border: none;
}
.TitleFontScreen {
margin-left: auto;
margin-right: auto;
margin-top: 1em;
margin-bottom: 0.2em;
font-size: 50px;
padding-top: 1em;
}
.TitleFontPrint {
line-height: 1;
margin-left: 0px;
margin-right: auto;
margin-top: 0.5em;
margin-bottom: 0.2em;
padding-top: 20px;
padding-left: 20px;
color: darkgreen;
}
.TitleFontPrintSub {
line-height: 1;
margin-left: 0px;
margin-right: auto;
margin-top: 0;
margin-bottom: 0;
/*font-size: 20px; Let 'h3', etc control this */
padding-top: 0px;
padding-left: 20px;
}
.HeaderFont {}
.TableFont {}
.TableBorder {}
.ImgStyle {}
div.Section h1, div.Section h2 {
margin: 0 0 0 0em;
}
div.HeaderStyle h1, div.HeaderStyle h2 {
text-align: left;
margin: 0 0 0 0;
max-width: 8in;
min-width: 800px;
}
div.Section {
padding 5px;
position: relative;
}
div.Section img {
margin: 0;
padding: 0;
position: relative;
top: 50%;
transform: translateY(-50%);
}
div.FooterStyle {
width: 100%;
vertical-align: middle;
border: 0 none;
border-top: 2px solid #2A5B29;
color: #2A5B29;
font-size: 12px;
margin-top: 2em;
}
div.FooterStyle img {
width: auto;
height: auto;
text-align: right;
}
div.FooterStyle span.Gradient {
background: white;
color: #2A5B29;
display: inline-block;
height: 30px;
line-height: 1;
padding-top: 22px;
padding-bottom: 20px;
padding-left: 2em;
vertical-align: middle;
max-width:80%;
float:left;
width:50%;
}
.FooterStyle a, .FooterStyle a:visited {
color: #2A5B29;
font-size: 12px;
line-height: 1;
height: 30px;
margin: 0;
padding: 0;
vertical-align: middle;
}
div.FooterStyle a.LogoImgLink {
display: inline-block;
text-align: right;
float: right;
}
a .LogoImgLink {
}
a.LogoImgLink img {
}
table.dataframe {
margin: 1em;
padding: 0;
}
table.dataframe tr th {
padding: 0.5em;
}

File diff suppressed because it is too large Load Diff

View File

@@ -3,6 +3,8 @@
""" """
This script will create a variable number of layer3 stations each with their own set of cross-connects and endpoints. This script will create a variable number of layer3 stations each with their own set of cross-connects and endpoints.
If you want to
Use './create_l3.py --help' to see command line usage and options Use './create_l3.py --help' to see command line usage and options
""" """
@@ -164,7 +166,7 @@ python3 ./test_ipv4_variable_time.py
if (args.num_stations is not None) and (int(args.num_stations) > 0): if (args.num_stations is not None) and (int(args.num_stations) > 0):
num_sta = int(args.num_stations) num_sta = int(args.num_stations)
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta - 1, padding_number_=10000, station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=int(args.number_template), end_id_=num_sta+int(args.number_template) - 1, padding_number_=10000,
radio=args.radio) radio=args.radio)
ip_var_test = CreateL3(host=args.mgr, ip_var_test = CreateL3(host=args.mgr,
port=args.mgr_port, port=args.mgr_port,

View File

@@ -27,12 +27,14 @@ class CreateStation(Realm):
_password=None, _password=None,
_host=None, _host=None,
_port=None, _port=None,
_mode=0,
_sta_list=None, _sta_list=None,
_number_template="00000", _number_template="00000",
_radio="wiphy0", _radio="wiphy0",
_proxy_str=None, _proxy_str=None,
_debug_on=False, _debug_on=False,
_up=True, _up=True,
_set_txo_data=None,
_exit_on_error=False, _exit_on_error=False,
_exit_on_fail=False): _exit_on_fail=False):
super().__init__(_host, super().__init__(_host,
@@ -42,25 +44,26 @@ class CreateStation(Realm):
self.ssid = _ssid self.ssid = _ssid
self.security = _security self.security = _security
self.password = _password self.password = _password
self.mode = _mode
self.sta_list = _sta_list self.sta_list = _sta_list
self.radio = _radio self.radio = _radio
self.timeout = 120 self.timeout = 120
self.number_template = _number_template self.number_template = _number_template
self.debug = _debug_on self.debug = _debug_on
self.up = _up self.up = _up
self.set_txo_data = _set_txo_data
self.station_profile = self.new_station_profile() self.station_profile = self.new_station_profile()
self.station_profile.lfclient_url = self.lfclient_url self.station_profile.lfclient_url = self.lfclient_url
self.station_profile.ssid = self.ssid self.station_profile.ssid = self.ssid
self.station_profile.ssid_pass = self.password, self.station_profile.ssid_pass = self.password,
self.station_profile.security = self.security self.station_profile.security = self.security
self.station_profile.number_template_ = self.number_template self.station_profile.number_template_ = self.number_template
self.station_profile.mode = 0 self.station_profile.mode = self.mode
if self.debug: if self.debug:
print("----- Station List ----- ----- ----- ----- ----- ----- \n") print("----- Station List ----- ----- ----- ----- ----- ----- \n")
pprint.pprint(self.sta_list) pprint.pprint(self.sta_list)
print("---- ~Station List ----- ----- ----- ----- ----- ----- \n") print("---- ~Station List ----- ----- ----- ----- ----- ----- \n")
def build(self): def build(self):
# Build stations # Build stations
self.station_profile.use_security(self.security, self.ssid, self.password) self.station_profile.use_security(self.security, self.ssid, self.password)
@@ -70,6 +73,15 @@ class CreateStation(Realm):
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1) self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500) self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1) self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
if self.set_txo_data is not None:
self.station_profile.set_wifi_txo(txo_ena=self.set_txo_data["txo_enable"],
tx_power=self.set_txo_data["txpower"],
pream=self.set_txo_data["pream"],
mcs=self.set_txo_data["mcs"],
nss=self.set_txo_data["nss"],
bw=self.set_txo_data["bw"],
retries=self.set_txo_data["retries"],
sgi=self.set_txo_data["sgi"], )
self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug) self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
if self.up: if self.up:
self.station_profile.admin_up() self.station_profile.admin_up()
@@ -78,7 +90,7 @@ class CreateStation(Realm):
def main(): def main():
parser = LFCliBase.create_basic_argparse( parser = LFCliBase.create_basic_argparse( # see create_basic_argparse in ../py-json/LANforge/lfcli_base.py
prog='create_station.py', prog='create_station.py',
formatter_class=argparse.RawTextHelpFormatter, formatter_class=argparse.RawTextHelpFormatter,
epilog='''\ epilog='''\
@@ -91,6 +103,7 @@ def main():
Command example: Command example:
./create_station.py ./create_station.py
--radio wiphy0 --radio wiphy0
--start_id 2
--num_stations 3 --num_stations 3
--security open --security open
--ssid netgear --ssid netgear
@@ -98,14 +111,21 @@ Command example:
--debug --debug
''') ''')
required = parser.add_argument_group('required arguments') required = parser.add_argument_group('required arguments')
#required.add_argument('--security', help='WiFi Security protocol: < open | wep | wpa | wpa2 | wpa3 >', required=True) required.add_argument('--start_id', help='--start_id <value> default 0', default=0)
optional = parser.add_argument_group('Optional arguments')
optional.add_argument('--mode', help='Mode for your station (as a number)',default=0)
args = parser.parse_args() args = parser.parse_args()
#if args.debug: # if args.debug:
# pprint.pprint(args) # pprint.pprint(args)
# time.sleep(5) # time.sleep(5)
if (args.radio is None): if (args.radio is None):
raise ValueError("--radio required") raise ValueError("--radio required")
start_id = 0
if (args.start_id != 0):
start_id = int(args.start_id)
num_sta = 2 num_sta = 2
if (args.num_stations is not None) and (int(args.num_stations) > 0): if (args.num_stations is not None) and (int(args.num_stations) > 0):
@@ -113,20 +133,34 @@ Command example:
num_sta = num_stations_converted num_sta = num_stations_converted
station_list = LFUtils.port_name_series(prefix="sta", station_list = LFUtils.port_name_series(prefix="sta",
start_id=0, start_id=start_id,
end_id=num_sta-1, end_id=start_id + num_sta - 1,
padding_number=10000, padding_number=10000,
radio=args.radio) radio=args.radio)
print("station_list {}".format(station_list))
set_txo_data={
"txo_enable": 1,
"txpower": 255,
"pream": 0,
"mcs": 0,
"nss": 0,
"bw": 3,
"retries": 1,
"sgi": 0
}
create_station = CreateStation(_host=args.mgr, create_station = CreateStation(_host=args.mgr,
_port=args.mgr_port, _port=args.mgr_port,
_ssid=args.ssid, _ssid=args.ssid,
_password=args.passwd, _password=args.passwd,
_security=args.security, _security=args.security,
_sta_list=station_list, _sta_list=station_list,
_radio=args.radio, _mode=args.mode,
_proxy_str=args.proxy, _radio=args.radio,
_debug_on=args.debug) _set_txo_data=None,
_proxy_str=args.proxy,
_debug_on=args.debug)
create_station.build() create_station.build()
print('Created %s stations' % num_sta) print('Created %s stations' % num_sta)

View File

@@ -42,7 +42,7 @@ class CSVtoInflux():
target_csv=None, target_csv=None,
sep='\t'): sep='\t'):
self.influxdb = influxdb self.influxdb = influxdb
self.target_csv = target_csv.replace('/home/lanforge/html-reports/', '') self.target_csv = target_csv
self.influx_tag = _influx_tag self.influx_tag = _influx_tag
self.sep = sep self.sep = sep
@@ -69,7 +69,10 @@ class CSVtoInflux():
tags = dict() tags = dict()
print("row: %s" % row) print("row: %s" % row)
short_description = row[columns['short-description']] short_description = row[columns['short-description']]
numeric_score = float(row[columns['numeric-score']]) if row[columns['numeric-score']] == 'NaN':
numeric_score = '0x0'
else:
numeric_score = float(row[columns['numeric-score']])
date = row[columns['Date']] date = row[columns['Date']]
date = datetime.datetime.utcfromtimestamp(int(date) / 1000).isoformat() #convert to datetime so influx can read it, this is required date = datetime.datetime.utcfromtimestamp(int(date) / 1000).isoformat() #convert to datetime so influx can read it, this is required
for variable in csv_variables: for variable in csv_variables:
@@ -146,9 +149,7 @@ python3 csv_to_influx.py --influx_host localhost --influx_org Candela --influx_t
args = parser.parse_args() args = parser.parse_args()
influxdb = RecordInflux(_lfjson_host=lfjson_host, influxdb = RecordInflux(_influx_host=args.influx_host,
_lfjson_port=lfjson_port,
_influx_host=args.influx_host,
_influx_port=args.influx_port, _influx_port=args.influx_port,
_influx_org=args.influx_org, _influx_org=args.influx_org,
_influx_token=args.influx_token, _influx_token=args.influx_token,

View File

@@ -16,7 +16,6 @@ Influx from this script.
--line "Resource=1.1 Profile=default Amount=4 Uses-1=wiphy1 DUT=DUT_TO_GRAFANA_DUT Traffic=wiphy1 Freq=-1" --line "Resource=1.1 Profile=default Amount=4 Uses-1=wiphy1 DUT=DUT_TO_GRAFANA_DUT Traffic=wiphy1 Freq=-1"
--line "Resource=1.1 Profile=upstream Amount=1 Uses-1=eth1 DUT=DUT_TO_GRAFANA_DUT Traffic=eth1 Freq=-1" --line "Resource=1.1 Profile=upstream Amount=1 Uses-1=eth1 DUT=DUT_TO_GRAFANA_DUT Traffic=eth1 Freq=-1"
--dut DUT_TO_GRAFANA --dut DUT_TO_GRAFANA
--test_rig Stidmatt-01
--create_scenario DUT_TO_GRAFANA_SCENARIO --create_scenario DUT_TO_GRAFANA_SCENARIO
--station 1.1.sta00002 --station 1.1.sta00002
--duration 15s --duration 15s
@@ -103,7 +102,6 @@ def main():
--line --line
--line --line
--dut --dut
--test_rig
--create_scenario --create_scenario
--station --station
--influx_tag --influx_tag

220
py-scripts/ghost_profile.py Executable file
View File

@@ -0,0 +1,220 @@
#!/usr/bin/env python3
"""
NAME: ghost_profile.py
PURPOSE: modify ghost database from the command line.
SETUP: A Ghost installation which the user has admin access to.
EXAMPLE: ./ghost_profile.py --article_text_file text.txt --title Test --authors Matthew --ghost_token SECRET_KEY --host 192.168.1.1
There is a specific class for uploading kpi graphs called kpi_to_ghost.
EXAMPLE: ./ghost_profile.py --ghost_token TOKEN --ghost_host 192.168.100.147
--folders /home/lanforge/html-reports/wifi-capacity-2021-06-04-02-51-07
--kpi_to_ghost appl --authors Matthew --title 'wifi capacity 2021 06 04 02 51 07' --server 192.168.93.51
--user_pull lanforge --password_pull lanforge --customer candela --testbed heather --test_run test-run-6
--user_push matt --password_push PASSWORD
EXAMPLE 2: ./ghost_profile.py --ghost_token TOKEN
--ghost_host 192.168.100.147 --server 192.168.93.51 --customer candela
--testbed heather --user_push matt --password_push "amount%coverage;Online" --kpi_to_ghost app
--folders /home/lanforge/html-reports/wifi-capacity-2021-06-14-10-42-29 --grafana_token TOKEN
--grafana_host 192.168.100.201
this script uses pyjwt. If you get the issue module 'jwt' has no attribute 'encode', run this: pip3 uninstall jwt pyjwt && pip install pyjwt
Matthew Stidham
Copyright 2021 Candela Technologies Inc
License: Free to distribute and modify. LANforge systems must be licensed.
"""
import sys
import os
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
sys.path.append(os.path.join(os.path.abspath('..'), 'py-dashboard'))
from GhostRequest import GhostRequest
class UseGhost(GhostRequest):
def __init__(self,
_ghost_token=None,
host="localhost",
port=8080,
_debug_on=False,
_exit_on_fail=False,
_ghost_host="localhost",
_ghost_port=2368,
influx_host=None,
influx_port=None,
influx_org=None,
influx_token=None,
influx_bucket=None):
super().__init__(_ghost_host,
str(_ghost_port),
_api_token=_ghost_token,
influx_host=influx_host,
influx_port=influx_port,
influx_org=influx_org,
influx_token=influx_token,
influx_bucket=influx_bucket,
debug_=_debug_on)
self.ghost_host = _ghost_host
self.ghost_port = _ghost_port
self.ghost_token = _ghost_token
self.influx_host = influx_host
self.influx_port = influx_port
self.influx_org = influx_org
self.influx_token = influx_token
self.influx_bucket = influx_bucket
def create_post_from_file(self, title, file, tags, authors):
text = open(file).read()
return self.create_post(title=title, text=text, tags=tags, authors=authors)
def kpi(self,
authors,
folders,
parent_folder,
title,
server_pull,
ghost_host,
port,
user_push,
password_push,
customer,
testbed,
test_run,
grafana_token,
grafana_host,
grafana_port,
datasource,
grafana_bucket):
target_folders = list()
return self.kpi_to_ghost(authors,
folders,
parent_folder,
title,
server_pull,
ghost_host,
port,
user_push,
password_push,
customer,
testbed,
test_run,
target_folders,
grafana_token,
grafana_host,
grafana_port,
datasource,
grafana_bucket)
def main():
parser = argparse.ArgumentParser(
prog='ghost_profile.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''Manage Ghost Website''',
description='''
ghost_profile.py
----------------
Command example:
./ghost_profile.py
--ghost_token'''
)
optional = parser.add_argument_group('optional arguments')
optional.add_argument('--ghost_token', default=None)
optional.add_argument('--create_post', default=None)
optional.add_argument('--article_text_file', default=None)
optional.add_argument('--ghost_port', help='Ghost port if different from 2368', default=2368)
optional.add_argument('--ghost_host', help='Ghost host if different from localhost', default='localhost')
optional.add_argument('--article_text')
optional.add_argument('--article_tags', action='append')
optional.add_argument('--authors', action='append')
optional.add_argument('--title', default=None)
optional.add_argument('--image', default=None)
optional.add_argument('--folder', default=None)
optional.add_argument('--custom_post', default=None)
optional.add_argument('--kpi_to_ghost', help='Generate a Ghost report from KPI spreadsheets', action="store_true")
optional.add_argument('--folders', action='append', default=None)
optional.add_argument('--server_pull')
optional.add_argument('--port', default=22)
optional.add_argument('--user_push')
optional.add_argument('--password_push')
optional.add_argument('--customer')
optional.add_argument('--testbed')
optional.add_argument('--test_run', default=None)
optional.add_argument('--grafana_token', default=None)
optional.add_argument('--grafana_host', default=None)
optional.add_argument('--grafana_port', default=3000)
optional.add_argument('--parent_folder', default=None)
optional.add_argument('--datasource', default='InfluxDB')
optional.add_argument('--grafana_bucket', default=None)
optional.add_argument('--influx_host')
optional.add_argument('--influx_token', help='Username for your Influx database')
optional.add_argument('--influx_bucket', help='Password for your Influx database')
optional.add_argument('--influx_org', help='Name of your Influx database')
optional.add_argument('--influx_port', help='Port where your influx database is located', default=8086)
optional.add_argument('--influx_tag', action='append', nargs=2,
help='--influx_tag <key> <val> Can add more than one of these.')
optional.add_argument('--influx_mgr',
help='IP address of the server your Influx database is hosted if different from your LANforge Manager',
default=None)
optional.add_argument('--debug', help='Enable debugging', default=False, action="store_true")
args = parser.parse_args()
Ghost = UseGhost(_ghost_token=args.ghost_token,
_ghost_port=args.ghost_port,
_ghost_host=args.ghost_host,
influx_host=args.influx_host,
influx_port=args.influx_port,
influx_org=args.influx_org,
influx_token=args.influx_token,
influx_bucket=args.influx_bucket,
_debug_on=args.debug)
if args.create_post is not None:
Ghost.create_post(args.title, args.article_text, args.article_tags, args.authors)
if args.article_text_file is not None:
Ghost.create_post_from_file(args.title, args.article_text_file, args.article_tags, args.authors)
if args.image is not None:
Ghost.upload_image(args.image)
if args.custom_post is not None:
if args.folders is not None:
Ghost.custom_post(args.folders, args.authors)
else:
Ghost.custom_post(args.folder, args.authors)
else:
if args.folder is not None:
Ghost.upload_images(args.folder)
if args.kpi_to_ghost is True:
Ghost.kpi(args.authors,
args.folders,
args.parent_folder,
args.title,
args.server_pull,
args.ghost_host,
args.port,
args.user_push,
args.password_push,
args.customer,
args.testbed,
args.test_run,
args.grafana_token,
args.grafana_host,
args.grafana_port,
args.datasource,
args.grafana_bucket)
if __name__ == "__main__":
main()

View File

@@ -19,8 +19,8 @@ if 'py-json' not in sys.path:
from GrafanaRequest import GrafanaRequest from GrafanaRequest import GrafanaRequest
from LANforge.lfcli_base import LFCliBase from LANforge.lfcli_base import LFCliBase
import json
import string import string
<<<<<<< HEAD
import random import random
@@ -161,109 +161,11 @@ class UseGrafana(LFCliBase):
options = dict() options = dict()
options['alertThreshold'] = True options['alertThreshold'] = True
=======
groupBy = list() class UseGrafana(GrafanaRequest):
groupBy.append(self.groupby('$__interval', 'time')) >>>>>>> 0ef021e1165cbaa612e5128bc48d6abfbb7b887b
groupBy.append(self.groupby('null', 'fill'))
targets = list()
counter = 0
new_target = self.maketargets(bucket, scriptname, groupBy, counter, graph_group,testbed)
targets.append(new_target)
fieldConfig = dict()
fieldConfig['defaults'] = dict()
fieldConfig['overrides'] = list()
transformation = dict()
transformation['id'] = "renameByRegex"
transformation_options = dict()
transformation_options['regex'] = "(.*) value.*"
transformation_options['renamePattern'] = "$1"
transformation['options'] = transformation_options
xaxis = dict()
xaxis['buckets'] = None
xaxis['mode'] = "time"
xaxis['name'] = None
xaxis['show'] = True
xaxis['values'] = list()
yaxis = dict()
yaxis['format'] = 'short'
yaxis['label'] = unit_dict[graph_group]
yaxis['logBase'] = 1
yaxis['max'] = None
yaxis['min'] = None
yaxis['show'] = True
yaxis1 = dict()
yaxis1['align'] = False
yaxis1['alignLevel'] = None
panel['aliasColors'] = dict()
panel['bars'] = False
panel['dashes'] = False
panel['dashLength'] = 10
panel['datasource'] = datasource
panel['fieldConfig'] = fieldConfig
panel['fill'] = 0
panel['fillGradient'] = 0
panel['gridPos'] = gridpos
panel['hiddenSeries'] = False
panel['id'] = index
panel['legend'] = legend
panel['lines'] = True
panel['linewidth'] = 1
panel['nullPointMode'] = 'null'
panel['options'] = options
panel['percentage'] = False
panel['pluginVersion'] = '7.5.4'
panel['pointradius'] = 2
panel['points'] = True
panel['renderer'] = 'flot'
panel['seriesOverrides'] = list()
panel['spaceLength'] = 10
panel['stack'] = False
panel['steppedLine'] = False
panel['targets'] = targets
panel['thresholds'] = list()
panel['timeFrom'] = None
panel['timeRegions'] = list()
panel['timeShift'] = None
if graph_group is not None:
panel['title'] = scriptname + ' ' + graph_group
else:
panel['title'] = scriptname
panel['transformations'] = list()
panel['transformations'].append(transformation)
panel['type'] = "graph"
panel['xaxis'] = xaxis
panel['yaxes'] = list()
panel['yaxes'].append(yaxis)
panel['yaxes'].append(yaxis)
panel['yaxis'] = yaxis1
panels.append(panel)
index = index + 1
input1['annotations'] = annot
input1['editable'] = True
input1['gnetId'] = None
input1['graphTooltip'] = 0
input1['links'] = list()
input1['panels'] = panels
input1['refresh'] = False
input1['schemaVersion'] = 27
input1['style'] = 'dark'
input1['tags'] = list()
input1['templating'] = templating
input1['time'] = timedict
input1['timepicker'] = dict()
input1['timezone'] = ''
input1['title'] = ("Testbed: %s" % title)
input1['uid'] = uid
input1['version'] = 11
return self.GR.create_dashboard_from_dict(dictionary=json.dumps(input1))
def read_csv(self, file): def read_csv(self, file):
csv = open(file).read().split('\n') csv = open(file).read().split('\n')
@@ -280,19 +182,6 @@ class UseGrafana(LFCliBase):
results.append(row[value]) results.append(row[value])
return results return results
def get_graph_groups(self,target_csvs): # Get the unique values in the Graph-Group column
dictionary = dict()
for target_csv in target_csvs:
if len(target_csv) > 1:
csv = self.read_csv(target_csv)
# Unique values in the test-id column
scripts = list(set(self.get_values(csv,'test-id')))
# we need to make sure we match each Graph Group to the script it occurs in
for script in scripts:
# Unique Graph Groups for each script
dictionary[script] = list(set(self.get_values(csv,'Graph-Group')))
print(dictionary)
return dictionary
def get_units(self, target_csv): def get_units(self, target_csv):
csv = self.read_csv(target_csv) csv = self.read_csv(target_csv)
@@ -324,6 +213,12 @@ def main():
--graph_groups 'Per Stations Rate DL' --graph_groups 'Per Stations Rate DL'
--graph_groups 'Per Stations Rate UL' --graph_groups 'Per Stations Rate UL'
--graph_groups 'Per Stations Rate UL+DL' --graph_groups 'Per Stations Rate UL+DL'
Create a snapshot of a dashboard:
./grafana_profile.py --grafana_token TOKEN
--grafana_host HOST
--create_snapshot
--title TITLE_OF_DASHBOARD
''') ''')
required = parser.add_argument_group('required arguments') required = parser.add_argument_group('required arguments')
required.add_argument('--grafana_token', help='token to access your Grafana database', required=True) required.add_argument('--grafana_token', help='token to access your Grafana database', required=True)

View File

@@ -21,13 +21,11 @@ import json
import influxdb_client import influxdb_client
from influxdb_client.client.write_api import SYNCHRONOUS from influxdb_client.client.write_api import SYNCHRONOUS
import datetime import datetime
from LANforge.lfcli_base import LFCliBase #from LANforge.lfcli_base import LFCliBase
import time import time
class RecordInflux(LFCliBase): class RecordInflux:
def __init__(self, def __init__(self,
_lfjson_host="lanforge",
_lfjson_port=8080,
_influx_host="localhost", _influx_host="localhost",
_influx_port=8086, _influx_port=8086,
_influx_org=None, _influx_org=None,
@@ -35,9 +33,6 @@ class RecordInflux(LFCliBase):
_influx_bucket=None, _influx_bucket=None,
_debug_on=False, _debug_on=False,
_exit_on_fail=False): _exit_on_fail=False):
super().__init__(_lfjson_host, _lfjson_port,
_debug=_debug_on,
_exit_on_fail=_exit_on_fail)
self.influx_host = _influx_host self.influx_host = _influx_host
self.influx_port = _influx_port self.influx_port = _influx_port
self.influx_org = _influx_org self.influx_org = _influx_org
@@ -49,10 +44,6 @@ class RecordInflux(LFCliBase):
org=self.influx_org, org=self.influx_org,
debug=_debug_on) debug=_debug_on)
self.write_api = self.client.write_api(write_options=SYNCHRONOUS) self.write_api = self.client.write_api(write_options=SYNCHRONOUS)
#print("org: ", self.influx_org)
#print("token: ", self.influx_token)
#print("bucket: ", self.influx_bucket)
#exit(0)
def post_to_influx(self, key, value, tags, time): def post_to_influx(self, key, value, tags, time):
p = influxdb_client.Point(key) p = influxdb_client.Point(key)

View File

@@ -23,7 +23,7 @@ the options and how best to input data.
--set 'Skip 2.4Ghz Tests' 1 --set 'Skip 5Ghz Tests' 1 \ --set 'Skip 2.4Ghz Tests' 1 --set 'Skip 5Ghz Tests' 1 \
--set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Stability' 0 --set 'Band-Steering' 0 \ --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Stability' 0 --set 'Band-Steering' 0 \
--set 'Multi-Station Throughput vs Pkt Size' 0 --set 'Long-Term' 0 \ --set 'Multi-Station Throughput vs Pkt Size' 0 --set 'Long-Term' 0 \
--test_rig Testbed-01 --pull_report \ --pull_report \
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \ --influx_host c7-graphana --influx_port 8086 --influx_org Candela \
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \ --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \
--influx_bucket ben \ --influx_bucket ben \
@@ -46,7 +46,6 @@ show_log: 0
port_sorting: 0 port_sorting: 0
kpi_id: AP Auto kpi_id: AP Auto
bg: 0xE0ECF8 bg: 0xE0ECF8
test_rig: Ferndale-01-Basic
show_scan: 1 show_scan: 1
auto_helper: 1 auto_helper: 1
skip_2: 1 skip_2: 1
@@ -187,6 +186,7 @@ class ApAutoTest(cvtest):
lf_port=8080, lf_port=8080,
lf_user="lanforge", lf_user="lanforge",
lf_password="lanforge", lf_password="lanforge",
local_lf_report_dir="",
instance_name="ap_auto_instance", instance_name="ap_auto_instance",
config_name="ap_auto_config", config_name="ap_auto_config",
upstream="1.1.eth1", upstream="1.1.eth1",
@@ -231,6 +231,7 @@ class ApAutoTest(cvtest):
self.raw_lines_file = raw_lines_file self.raw_lines_file = raw_lines_file
self.sets = sets self.sets = sets
self.graph_groups = graph_groups self.graph_groups = graph_groups
self.local_lf_report_dir = local_lf_report_dir
def setup(self): def setup(self):
# Nothing to do at this time. # Nothing to do at this time.
@@ -283,7 +284,7 @@ class ApAutoTest(cvtest):
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name, self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets, self.config_name, self.sets,
self.pull_report, self.lf_host, self.lf_user, self.lf_password, self.pull_report, self.lf_host, self.lf_user, self.lf_password,
cv_cmds, graph_groups_file=self.graph_groups) cv_cmds, graph_groups_file=self.graph_groups, local_lf_report_dir=self.local_lf_report_dir)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
@@ -333,6 +334,7 @@ def main():
help="Specify 2.4Ghz radio. May be specified multiple times.") help="Specify 2.4Ghz radio. May be specified multiple times.")
parser.add_argument("--radio5", action='append', nargs=1, default=[], parser.add_argument("--radio5", action='append', nargs=1, default=[],
help="Specify 5Ghz radio. May be specified multiple times.") help="Specify 5Ghz radio. May be specified multiple times.")
parser.add_argument("--local_lf_report_dir", help="--local_lf_report_dir <where to pull reports to> default '' put where dataplane script run from",default="")
args = parser.parse_args() args = parser.parse_args()
@@ -346,6 +348,7 @@ def main():
config_name = args.config_name, config_name = args.config_name,
upstream = args.upstream, upstream = args.upstream,
pull_report = args.pull_report, pull_report = args.pull_report,
local_lf_report_dir = args.local_lf_report_dir,
dut5_0 = args.dut5_0, dut5_0 = args.dut5_0,
dut2_0 = args.dut2_0, dut2_0 = args.dut2_0,
load_old_cfg = args.load_old_cfg, load_old_cfg = args.load_old_cfg,

49
py-scripts/lf_csv.py Normal file
View File

@@ -0,0 +1,49 @@
#!/usr/bin/env python3
'''
NAME: lf_csv.py
PURPOSE:
Common Library for generating csv for LANforge output
SETUP:
/lanforge/html-reports directory needs to be present or output generated in local file
EXAMPLE:
see: /py-scritps/lf_report_test.py for example
COPYWRITE
Copyright 2021 Candela Technologies Inc
License: Free to distribute and modify. LANforge systems must be licensed.
INCLUDE_IN_README
'''
import numpy as np
import pandas as pd
class LfCSV:
def __init__(self,
_columns=['Stations', 'bk', 'be', 'vi', 'vo'],
_rows=[['sta0001', 'sta0002', 'sta0003', 'sta0004', 'sta0005'],
[1, 2, 3, 4, 5],
[11, 22, 33, 44, 55],
[6, 7, 8, 9, 10],
[66, 77, 88, 99, 100]],
_filename='test.csv'):
self.rows = _rows
self.columns = _columns
self.filename = _filename
def generate_csv(self):
df = {}
for i in range(len(self.columns)):
df[self.columns[i]] = self.rows[i]
csv_df = pd.DataFrame(df)
print(csv_df)
csv_df.to_csv(self.filename, index=False, encoding='utf-8', na_rep='NA', float_format='%.2f')
if __name__ == "__main__":
test = LfCSV()
test.generate_csv()

View File

@@ -121,7 +121,7 @@ class DataplaneTest(cv_test):
lf_user="lanforge", lf_user="lanforge",
lf_password="lanforge", lf_password="lanforge",
ssh_port=22, ssh_port=22,
local_path="", local_lf_report_dir="",
instance_name="dpt_instance", instance_name="dpt_instance",
config_name="dpt_config", config_name="dpt_config",
upstream="1.1.eth2", upstream="1.1.eth2",
@@ -138,7 +138,9 @@ class DataplaneTest(cv_test):
raw_lines_file="", raw_lines_file="",
sets=[], sets=[],
graph_groups=None, graph_groups=None,
report_dir="" report_dir="",
test_rig="",
debug=False
): ):
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port) super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
@@ -165,7 +167,9 @@ class DataplaneTest(cv_test):
self.graph_groups = graph_groups self.graph_groups = graph_groups
self.report_dir = report_dir self.report_dir = report_dir
self.ssh_port = ssh_port self.ssh_port = ssh_port
self.local_path = local_path self.local_lf_report_dir = local_lf_report_dir
self.test_rig = test_rig
self.debug = debug
def setup(self): def setup(self):
# Nothing to do at this time. # Nothing to do at this time.
@@ -200,6 +204,8 @@ class DataplaneTest(cv_test):
cfg_options.append("duration: " + self.duration) cfg_options.append("duration: " + self.duration)
if self.dut != "": if self.dut != "":
cfg_options.append("selected_dut: " + self.dut) cfg_options.append("selected_dut: " + self.dut)
if self.test_rig != "":
cfg_options.append("test_rig: " + self.test_rig)
# We deleted the scenario earlier, now re-build new one line at a time. # We deleted the scenario earlier, now re-build new one line at a time.
@@ -209,8 +215,8 @@ class DataplaneTest(cv_test):
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name, self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets, self.config_name, self.sets,
self.pull_report, self.lf_host, self.lf_user, self.lf_password, self.pull_report, self.lf_host, self.lf_user, self.lf_password,
cv_cmds, ssh_port=self.ssh_port, local_path=self.local_path, cv_cmds, ssh_port=self.ssh_port, local_lf_report_dir=self.local_lf_report_dir,
graph_groups_file=self.graph_groups) graph_groups_file=self.graph_groups, debug=self.debug)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
@@ -239,6 +245,11 @@ def main():
cv_add_base_parser(parser) # see cv_test_manager.py cv_add_base_parser(parser) # see cv_test_manager.py
<<<<<<< HEAD
=======
parser.add_argument('--json', help="--json <config.json> json input file", default="")
parser.add_argument('--influx_json', help="--influx_json <influx_config.json> influx config json input file", default="")
>>>>>>> 0ef021e1165cbaa612e5128bc48d6abfbb7b887b
parser.add_argument("-u", "--upstream", type=str, default="", parser.add_argument("-u", "--upstream", type=str, default="",
help="Upstream port for wifi capacity test ex. 1.1.eth2") help="Upstream port for wifi capacity test ex. 1.1.eth2")
parser.add_argument("--station", type=str, default="", parser.add_argument("--station", type=str, default="",
@@ -254,9 +265,76 @@ def main():
help="Specify duration of each traffic run") help="Specify duration of each traffic run")
parser.add_argument("--graph_groups", help="File to save graph_groups to", default=None) parser.add_argument("--graph_groups", help="File to save graph_groups to", default=None)
parser.add_argument("--report_dir", default="") parser.add_argument("--report_dir", default="")
parser.add_argument("--local_lf_report_dir", help="--local_lf_report_dir <where to pull reports to> default '' put where dataplane script run from",default="")
parser.add_argument("--debug", default=False)
args = parser.parse_args() args = parser.parse_args()
<<<<<<< HEAD
=======
# use json config file
if args.json != "":
try:
with open(args.json, 'r') as json_config:
json_data = json.load(json_config)
except:
print("Error reading {}".format(args.json))
# json configuation takes presidence to command line
if "mgr" in json_data:
args.mgr = json_data["mgr"]
if "port" in json_data:
args.port = json_data["port"]
if "lf_user" in json_data:
args.lf_user = json_data["lf_user"]
if "lf_password" in json_data:
args.lf_password = json_data["lf_password"]
if "instance_name" in json_data:
args.instance_name = json_data["instance_name"]
if "config_name" in json_data:
args.config_name = json_data["config_name"]
if "upstream" in json_data:
args.upstream = json_data["upstream"]
if "dut" in json_data:
args.dut = json_data["dut"]
if "duration" in json_data:
args.duration = json_data["duration"]
if "station" in json_data:
args.station = json_data["station"]
if "download_speed" in json_data:
args.download_speed = json_data["download_speed"]
if "upload_speed" in json_data:
args.upload_speed = json_data["upload_speed"]
if "pull_report" in json_data:
args.pull_report = json_data["pull_report"]
if "raw_line" in json_data:
# the json_data is a list , need to make into a list of lists, to match command line raw_line paramaters
# https://www.tutorialspoint.com/convert-list-into-list-of-lists-in-python
json_data_tmp = [[x] for x in json_data["raw_line"]]
args.raw_line = json_data_tmp
# use influx json config file
if args.influx_json != "":
try:
with open(args.influx_json, 'r') as influx_json_config:
influx_json_data = json.load(influx_json_config)
except:
print("Error reading {}".format(args.influx_json))
# json configuation takes presidence to command line
# influx DB configuration
if "influx_host" in influx_json_data:
args.influx_host = influx_json_data["influx_host"]
if "influx_port" in influx_json_data:
args.influx_port = influx_json_data["influx_port"]
if "influx_org" in influx_json_data:
args.influx_org = influx_json_data["influx_org"]
if "influx_token" in influx_json_data:
args.influx_token = influx_json_data["influx_token"]
if "influx_bucket" in influx_json_data:
args.influx_bucket = influx_json_data["influx_bucket"]
>>>>>>> 0ef021e1165cbaa612e5128bc48d6abfbb7b887b
cv_base_adjust_parser(args) cv_base_adjust_parser(args)
CV_Test = DataplaneTest(lf_host = args.mgr, CV_Test = DataplaneTest(lf_host = args.mgr,
@@ -267,6 +345,7 @@ def main():
config_name = args.config_name, config_name = args.config_name,
upstream = args.upstream, upstream = args.upstream,
pull_report = args.pull_report, pull_report = args.pull_report,
local_lf_report_dir = args.local_lf_report_dir,
load_old_cfg = args.load_old_cfg, load_old_cfg = args.load_old_cfg,
download_speed = args.download_speed, download_speed = args.download_speed,
upload_speed = args.upload_speed, upload_speed = args.upload_speed,
@@ -278,7 +357,9 @@ def main():
raw_lines = args.raw_line, raw_lines = args.raw_line,
raw_lines_file = args.raw_lines_file, raw_lines_file = args.raw_lines_file,
sets = args.set, sets = args.set,
graph_groups = args.graph_groups graph_groups = args.graph_groups,
test_rig=args.test_rig,
debug=args.debug
) )
CV_Test.setup() CV_Test.setup()
CV_Test.run() CV_Test.run()

View File

@@ -25,16 +25,20 @@ import pandas as pd
import pdfkit import pdfkit
import math import math
from matplotlib.colors import ListedColormap from matplotlib.colors import ListedColormap
from lf_csv import LfCSV
# internal candela references included during intial phases, to be deleted at future date # internal candela references included during intial phases, to be deleted at future date
# graph reporting classes # graph reporting classes
class lf_bar_graph(): class lf_bar_graph():
def __init__(self, _data_set=[[30, 55, 69, 37], [45, 67, 34, 22], [22, 45, 12, 34]], def __init__(self, _data_set=[[30.4, 55.3, 69.2, 37.1], [45.1, 67.2, 34.3, 22.4], [22.5, 45.6, 12.7, 34.8]],
_xaxis_name="x-axis", _xaxis_name="x-axis",
_yaxis_name="y-axis", _yaxis_name="y-axis",
_xaxis_categories=[1, 2, 3, 4], _xaxis_categories=[1, 2, 3, 4, 5],
_xaxis_label=["a", "b", "c", "d", "e"],
_graph_title="",
_title_size=16,
_graph_image_name="image_name", _graph_image_name="image_name",
_label=["bi-downlink", "bi-uplink", 'uplink'], _label=["bi-downlink", "bi-uplink", 'uplink'],
_color=None, _color=None,
@@ -43,12 +47,22 @@ class lf_bar_graph():
_font_weight='bold', _font_weight='bold',
_color_name=['lightcoral', 'darkgrey', 'r', 'g', 'b', 'y'], _color_name=['lightcoral', 'darkgrey', 'r', 'g', 'b', 'y'],
_figsize=(10, 5), _figsize=(10, 5),
_dpi=96): _show_bar_value=False,
_xaxis_step=5,
_xticks_font = None,
_text_font=None,
_text_rotation=None,
_grp_title = "",
_dpi=96,
_enable_csv=False):
self.data_set = _data_set self.data_set = _data_set
self.xaxis_name = _xaxis_name self.xaxis_name = _xaxis_name
self.yaxis_name = _yaxis_name self.yaxis_name = _yaxis_name
self.xaxis_categories = _xaxis_categories self.xaxis_categories = _xaxis_categories
self.xaxis_label = _xaxis_label
self.title = _graph_title
self.title_size = _title_size
self.graph_image_name = _graph_image_name self.graph_image_name = _graph_image_name
self.label = _label self.label = _label
self.color = _color self.color = _color
@@ -57,6 +71,14 @@ class lf_bar_graph():
self.font_weight = _font_weight self.font_weight = _font_weight
self.color_name = _color_name self.color_name = _color_name
self.figsize = _figsize self.figsize = _figsize
self.show_bar_value = _show_bar_value
self.xaxis_step = _xaxis_step
self.xticks_font = _xticks_font
self.text_font = _text_font
self.text_rotation = _text_rotation
self.grp_title = _grp_title
self.enable_csv = _enable_csv
self.lf_csv = LfCSV()
def build_bar_graph(self): def build_bar_graph(self):
if self.color is None: if self.color is None:
@@ -68,31 +90,53 @@ class lf_bar_graph():
fig = plt.subplots(figsize=self.figsize) fig = plt.subplots(figsize=self.figsize)
i = 0 i = 0
def show_value(rects):
for rect in rects:
h = rect.get_height()
plt.text(rect.get_x() + rect.get_width() / 2., h, h,
ha='center', va='bottom', rotation=self.text_rotation, fontsize=self.text_font)
for data in self.data_set: for data in self.data_set:
if i > 0: if i > 0:
br = br1 br = br1
br2 = [x + self.bar_width for x in br] br2 = [x + self.bar_width for x in br]
plt.bar(br2, self.data_set[i], color=self.color[i], width=self.bar_width, rects = plt.bar(br2, self.data_set[i], color=self.color[i], width=self.bar_width,
edgecolor=self.color_edge, label=self.label[i]) edgecolor=self.color_edge, label=self.label[i])
if self.show_bar_value:
show_value(rects)
br1 = br2 br1 = br2
i = i + 1 i = i + 1
else: else:
br1 = np.arange(len(self.data_set[i])) br1 = np.arange(len(self.data_set[i]))
plt.bar(br1, self.data_set[i], color=self.color[i], width=self.bar_width, rects = plt.bar(br1, self.data_set[i], color=self.color[i], width=self.bar_width,
edgecolor=self.color_edge, label=self.label[i]) edgecolor=self.color_edge, label=self.label[i])
if self.show_bar_value:
show_value(rects)
i = i + 1 i = i + 1
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15) plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15) plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
"""plt.xticks([r + self.bar_width for r in range(len(self.data_set[0]))], if self.xaxis_categories[0] == 0:
self.xaxis_categories)""" plt.xticks(np.arange(0, len(self.xaxis_categories), step=self.xaxis_step),fontsize = self.xticks_font)
plt.xticks(np.arange(0, len(self.xaxis_categories), step=5)) else:
plt.xticks(np.arange(0, len(self.data_set[0]), step=self.xaxis_step), self.xaxis_categories,
fontsize = self.xticks_font)
plt.legend() plt.legend()
plt.suptitle(self.title, fontsize=self.title_size)
plt.title(self.grp_title)
fig = plt.gcf() fig = plt.gcf()
plt.savefig("%s.png" % self.graph_image_name, dpi=96) plt.savefig("%s.png" % self.graph_image_name, dpi=96)
plt.close() plt.close()
print("{}.png".format(self.graph_image_name)) print("{}.png".format(self.graph_image_name))
if self.enable_csv:
if self.data_set is not None:
self.lf_csv.columns = self.label
self.lf_csv.rows = self.data_set
self.lf_csv.filename = f"{self.graph_image_name}.csv"
self.lf_csv.generate_csv()
else:
print("No Dataset Found")
print("{}.csv".format(self.graph_image_name))
return "%s.png" % self.graph_image_name return "%s.png" % self.graph_image_name
@@ -104,9 +148,10 @@ class lf_scatter_graph():
_xaxis_name="x-axis", _xaxis_name="x-axis",
_yaxis_name="y-axis", _yaxis_name="y-axis",
_label=["num1", "num2"], _label=["num1", "num2"],
_graph_image_name="image_name", _graph_image_name="image_name1",
_color=["r", "y"], _color=["r", "y"],
_figsize=(9, 4)): _figsize=(9, 4),
_enable_csv=True):
self.x_data_set = _x_data_set self.x_data_set = _x_data_set
self.y_data_set = _y_data_set self.y_data_set = _y_data_set
self.xaxis_name = _xaxis_name self.xaxis_name = _xaxis_name
@@ -116,6 +161,8 @@ class lf_scatter_graph():
self.color = _color self.color = _color
self.label = _label self.label = _label
self.values = _values self.values = _values
self.enable_csv = _enable_csv
self.lf_csv = LfCSV()
def build_scatter_graph(self): def build_scatter_graph(self):
if self.color is None: if self.color is None:
@@ -140,6 +187,11 @@ class lf_scatter_graph():
plt.savefig("%s.png" % self.graph_image_name, dpi=96) plt.savefig("%s.png" % self.graph_image_name, dpi=96)
plt.close() plt.close()
print("{}.png".format(self.graph_image_name)) print("{}.png".format(self.graph_image_name))
if self.enable_csv:
self.lf_csv.columns = self.label
self.lf_csv.rows = self.y_data_set
self.lf_csv.filename = f"{self.graph_image_name}.csv"
self.lf_csv.generate_csv()
return "%s.png" % self.graph_image_name return "%s.png" % self.graph_image_name
@@ -150,9 +202,10 @@ class lf_stacked_graph():
_xaxis_name="Stations", _xaxis_name="Stations",
_yaxis_name="Numbers", _yaxis_name="Numbers",
_label=['Success', 'Fail'], _label=['Success', 'Fail'],
_graph_image_name="image_name", _graph_image_name="image_name2",
_color=["b", "g"], _color=["b", "g"],
_figsize=(9, 4)): _figsize=(9, 4),
_enable_csv=True):
self.data_set = _data_set # [x_axis,y1_axis,y2_axis] self.data_set = _data_set # [x_axis,y1_axis,y2_axis]
self.xaxis_name = _xaxis_name self.xaxis_name = _xaxis_name
self.yaxis_name = _yaxis_name self.yaxis_name = _yaxis_name
@@ -160,6 +213,8 @@ class lf_stacked_graph():
self.graph_image_name = _graph_image_name self.graph_image_name = _graph_image_name
self.label = _label self.label = _label
self.color = _color self.color = _color
self.enable_csv = _enable_csv
self.lf_csv = LfCSV()
def build_stacked_graph(self): def build_stacked_graph(self):
fig = plt.subplots(figsize=self.figsize) fig = plt.subplots(figsize=self.figsize)
@@ -177,7 +232,11 @@ class lf_stacked_graph():
plt.savefig("%s.png" % (self.graph_image_name), dpi=96) plt.savefig("%s.png" % (self.graph_image_name), dpi=96)
plt.close() plt.close()
print("{}.png".format(self.graph_image_name)) print("{}.png".format(self.graph_image_name))
if self.enable_csv:
self.lf_csv.columns = self.label
self.lf_csv.rows = self.data_set
self.lf_csv.filename = f"{self.graph_image_name}.csv"
self.lf_csv.generate_csv()
return "%s.png" % (self.graph_image_name) return "%s.png" % (self.graph_image_name)
@@ -190,10 +249,11 @@ class lf_horizontal_stacked_graph():
_unit="%", _unit="%",
_xaxis_name="Stations", _xaxis_name="Stations",
_label=['Success', 'Fail'], _label=['Success', 'Fail'],
_graph_image_name="image_name", _graph_image_name="image_name3",
_color=["success", "Fail"], _color=["success", "Fail"],
_figsize=(9, 4), _figsize=(9, 4),
_disable_xaxis=False): _disable_xaxis=False,
_enable_csv=True):
self.unit = _unit self.unit = _unit
self.seg = _seg self.seg = _seg
self.xaxis_set1 = _xaxis_set1 self.xaxis_set1 = _xaxis_set1
@@ -205,6 +265,8 @@ class lf_horizontal_stacked_graph():
self.label = _label self.label = _label
self.color = _color self.color = _color
self.disable_xaxis = _disable_xaxis self.disable_xaxis = _disable_xaxis
self.enable_csv = _enable_csv
self.lf_csv = LfCSV()
def build_horizontal_stacked_graph(self): def build_horizontal_stacked_graph(self):
def sumzip(items): def sumzip(items):
@@ -246,7 +308,11 @@ class lf_horizontal_stacked_graph():
plt.savefig("%s.png" % self.graph_image_name, dpi=96) plt.savefig("%s.png" % self.graph_image_name, dpi=96)
plt.close() plt.close()
print("{}.png".format(self.graph_image_name)) print("{}.png".format(self.graph_image_name))
if self.enable_csv:
self.lf_csv.columns = self.label
self.lf_csv.rows = self.data_set
self.lf_csv.filename = f"{self.graph_image_name}.csv"
self.lf_csv.generate_csv()
return "%s.png" % self.graph_image_name return "%s.png" % self.graph_image_name
@@ -261,7 +327,7 @@ if __name__ == "__main__":
<img align='center' style='padding:15;margin:5;width:1000px;' src=""" + "%s" % (graph.build_bar_graph()) + """ border='1' /> <img align='center' style='padding:15;margin:5;width:1000px;' src=""" + "%s" % (graph.build_bar_graph()) + """ border='1' />
<br><br> <br><br>
""" """
# #
test_file = open(output_html_1, "w") test_file = open(output_html_1, "w")
test_file.write(graph_html_obj) test_file.write(graph_html_obj)
test_file.close() test_file.close()
@@ -293,7 +359,7 @@ if __name__ == "__main__":
<img align='center' style='padding:15;margin:5;width:1000px;' src=""" + "%s" % (graph.build_bar_graph()) + """ border='1' /> <img align='center' style='padding:15;margin:5;width:1000px;' src=""" + "%s" % (graph.build_bar_graph()) + """ border='1' />
<br><br> <br><br>
""" """
# #
test_file = open(output_html_2, "w") test_file = open(output_html_2, "w")
test_file.write(graph_html_obj) test_file.write(graph_html_obj)
test_file.close() test_file.close()

View File

@@ -0,0 +1,12 @@
{
"influx_host":"192.168.100.201",
"influx_port": "8086",
"influx_org": "Candela",
"influx_token": "-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ==",
"influx_bucket": "ben",
"influx_tag": "testbed Ferndale-01"
}

View File

@@ -28,107 +28,145 @@ INCLUDE_IN_README
import os import os
import shutil import shutil
import datetime import datetime
import pandas as pd import pandas as pd
import pdfkit import pdfkit
# internal candela references included during intial phases, to be deleted at future date # internal candela references included during intial phases, to be deleted at future date
# https://candelatech.atlassian.net/wiki/spaces/LANFORGE/pages/372703360/Scripting+Data+Collection+March+2021 # https://candelatech.atlassian.net/wiki/spaces/LANFORGE/pages/372703360/Scripting+Data+Collection+March+2021
# base report class # base report class
class lf_report(): class lf_report():
def __init__(self, def __init__(self,
#_path the report directory under which the report directories will be created. # _path the report directory under which the report directories will be created.
_path = "/home/lanforge/html-reports", _path="/home/lanforge/html-reports",
_alt_path = "", _alt_path="",
_date = "", _date="",
_title="LANForge Test Run Heading", _title="LANForge Test Run Heading",
_table_title="LANForge Table Heading", _table_title="LANForge Table Heading",
_graph_title="LANForge Graph Title", _graph_title="LANForge Graph Title",
_obj = "", _obj="",
_obj_title = "", _obj_title="",
_output_html="outfile.html", _output_html="outfile.html",
_output_pdf="outfile.pdf", _output_pdf="outfile.pdf",
_results_dir_name = "LANforge_Test_Results", _results_dir_name="LANforge_Test_Results",
_output_format = 'html', # pass in on the write functionality, current not used _output_format='html', # pass in on the write functionality, current not used
_dataframe="", _dataframe="",
_path_date_time=""): # this is where the final report is placed. _path_date_time="",
#other report paths, _custom_css='custom-example.css'): # this is where the final report is placed.
# other report paths,
# _path is where the directory with the data time will be created # _path is where the directory with the data time will be created
if _path == "local" or _path == "here": if _path == "local" or _path == "here":
self.path = os.path.abspath(__file__) self.path = os.path.abspath(__file__)
print("path set to file path: {}".format(self.path)) print("path set to file path: {}".format(self.path))
elif _alt_path != "": elif _alt_path != "":
self.path = _alt_path self.path = _alt_path
print("path set to alt path: {}".format(self.path)) print("path set to alt path: {}".format(self.path))
else: else:
self.path = _path self.path = _path
print("path set: {}".format(self.path)) print("path set: {}".format(self.path))
self.dataframe=_dataframe
self.text = ""
self.title=_title
self.table_title=_table_title
self.graph_title=_graph_title
self.date=_date
self.output_html=_output_html
self.path_date_time = _path_date_time
self.write_output_html = ""
self.output_pdf=_output_pdf
self.write_output_pdf = ""
self.banner_html = ""
self.graph_titles=""
self.graph_image=""
self.html = ""
self.custom_html = ""
self.objective = _obj
self.obj_title = _obj_title
#self.systeminfopath = ""
self.date_time_directory = ""
self.banner_directory = "artifacts"
self.banner_file_name = "banner.png" # does this need to be configurable
self.logo_directory = "artifacts"
self.logo_file_name = "CandelaLogo2-90dpi-200x90-trans.png" # does this need to be configurable.
self.current_path = os.path.dirname(os.path.abspath(__file__))
# pass in _date to allow to change after construction self.dataframe = _dataframe
self.set_date_time_directory(_date,_results_dir_name) self.text = ""
self.build_date_time_directory() self.title = _title
self.table_title = _table_title
self.graph_title = _graph_title
self.date = _date
self.output_html = _output_html
self.path_date_time = _path_date_time
self.write_output_html = ""
self.output_pdf = _output_pdf
self.write_output_pdf = ""
self.banner_html = ""
self.footer_html = ""
self.graph_titles = ""
self.graph_image = ""
self.csv_file_name = ""
self.html = ""
self.custom_html = ""
self.objective = _obj
self.obj_title = _obj_title
# self.systeminfopath = ""
self.date_time_directory = ""
self.banner_directory = "artifacts"
self.banner_file_name = "banner.png" # does this need to be configurable
self.logo_directory = "artifacts"
self.logo_file_name = "CandelaLogo2-90dpi-200x90-trans.png" # does this need to be configurable.
self.logo_footer_file_name = "candela_swirl_small-72h.png" # does this need to be configurable.
self.current_path = os.path.dirname(os.path.abspath(__file__))
self.custom_css = _custom_css
# pass in _date to allow to change after construction
self.set_date_time_directory(_date, _results_dir_name)
self.build_date_time_directory()
self.font_file = "CenturyGothic.woff"
# move the banners and candela images to report path
self.copy_banner()
self.copy_css()
self.copy_logo()
self.copy_logo_footer()
# move the banners and candela images to report path
self.copy_banner()
self.copy_logo()
def copy_banner(self): def copy_banner(self):
banner_src_file = str(self.current_path)+'/'+str(self.banner_directory)+'/'+str(self.banner_file_name) banner_src_file = str(self.current_path) + '/' + str(self.banner_directory) + '/' + str(self.banner_file_name)
banner_dst_file = str(self.path_date_time)+'/'+ str(self.banner_file_name) banner_dst_file = str(self.path_date_time) + '/' + str(self.banner_file_name)
#print("banner src_file: {}".format(banner_src_file)) # print("banner src_file: {}".format(banner_src_file))
#print("dst_file: {}".format(banner_dst_file)) # print("dst_file: {}".format(banner_dst_file))
shutil.copy(banner_src_file,banner_dst_file) shutil.copy(banner_src_file, banner_dst_file)
def copy_css(self):
reportcss_src_file = str(self.current_path) + '/' + str(self.banner_directory) + '/report.css'
reportcss_dest_file = str(self.path_date_time) + '/report.css'
customcss_src_file = str(self.current_path) + '/' + str(self.banner_directory) + '/' + str(self.custom_css)
customcss_dest_file = str(self.path_date_time) + '/custom.css'
font_src_file = str(self.current_path) + '/' + str(self.banner_directory) + '/' + str(self.font_file)
font_dest_file = str(self.path_date_time) + '/' + str(self.font_file)
shutil.copy(reportcss_src_file, reportcss_dest_file)
shutil.copy(customcss_src_file, customcss_dest_file)
shutil.copy(font_src_file, font_dest_file)
def copy_logo(self): def copy_logo(self):
logo_src_file = str(self.current_path)+'/'+str(self.logo_directory)+'/'+str(self.logo_file_name) logo_src_file = str(self.current_path) + '/' + str(self.logo_directory) + '/' + str(self.logo_file_name)
logo_dst_file = str(self.path_date_time)+'/'+ str(self.logo_file_name) logo_dst_file = str(self.path_date_time) + '/' + str(self.logo_file_name)
#print("logo_src_file: {}".format(logo_src_file)) # print("logo_src_file: {}".format(logo_src_file))
#print("logo_dst_file: {}".format(logo_dst_file)) # print("logo_dst_file: {}".format(logo_dst_file))
shutil.copy(logo_src_file,logo_dst_file) shutil.copy(logo_src_file, logo_dst_file)
def move_graph_image(self,): def copy_logo_footer(self):
logo_footer_src_file = str(self.current_path) + '/' + str(self.logo_directory) + '/' + str(
self.logo_footer_file_name)
logo_footer_dst_file = str(self.path_date_time) + '/' + str(self.logo_footer_file_name)
# print("logo_footer_src_file: {}".format(logo_footer_src_file))
# print("logo_footer_dst_file: {}".format(logo_footer_dst_file))
shutil.copy(logo_footer_src_file, logo_footer_dst_file)
def move_graph_image(self, ):
graph_src_file = str(self.graph_image) graph_src_file = str(self.graph_image)
graph_dst_file = str(self.path_date_time)+'/'+ str(self.graph_image) graph_dst_file = str(self.path_date_time) + '/' + str(self.graph_image)
print("graph_src_file: {}".format(graph_src_file)) print("graph_src_file: {}".format(graph_src_file))
print("graph_dst_file: {}".format(graph_dst_file)) print("graph_dst_file: {}".format(graph_dst_file))
shutil.move(graph_src_file,graph_dst_file) shutil.move(graph_src_file, graph_dst_file)
def set_path(self,_path): def move_csv_file(self):
csv_src_file = str(self.csv_file_name)
csv_dst_file = str(self.path_date_time) + '/' + str(self.csv_file_name)
print("csv_src_file: {}".format(csv_src_file))
print("csv_dst_file: {}".format(csv_dst_file))
shutil.move(csv_src_file, csv_dst_file)
def set_path(self, _path):
self.path = _path self.path = _path
def set_date_time_directory(self,_date,_results_dir_name): def set_date_time_directory(self, _date, _results_dir_name):
self.date = _date self.date = _date
self.results_dir_name = _results_dir_name self.results_dir_name = _results_dir_name
if self.date != "": if self.date != "":
self.date_time_directory = str(self.date) + str("_") + str(self.results_dir_name) self.date_time_directory = str(self.date) + str("_") + str(self.results_dir_name)
else: else:
self.date = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")).replace(':','-') self.date = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")).replace(':', '-')
self.date_time_directory = self.date + str("_") + str(self.results_dir_name) self.date_time_directory = self.date + str("_") + str(self.results_dir_name)
def build_date_time_directory(self): def build_date_time_directory(self):
@@ -136,49 +174,59 @@ class lf_report():
self.set_date_time_directory() self.set_date_time_directory()
self.path_date_time = os.path.join(self.path, self.date_time_directory) self.path_date_time = os.path.join(self.path, self.date_time_directory)
print("path_date_time {}".format(self.path_date_time)) print("path_date_time {}".format(self.path_date_time))
try: try:
if not os.path.exists(self.path_date_time): if not os.path.exists(self.path_date_time):
os.mkdir(self.path_date_time) os.mkdir(self.path_date_time)
except: except:
self.path_date_time = os.path.join(self.current_path, self.date_time_directory) self.path_date_time = os.path.join(self.current_path, self.date_time_directory)
if not os.path.exists(self.path_date_time): if not os.path.exists(self.path_date_time):
os.mkdir(self.path_date_time) os.mkdir(self.path_date_time)
print("report path : {}".format(self.path_date_time)) print("report path : {}".format(self.path_date_time))
def set_text(self,_text): def set_text(self, _text):
self.text = _text self.text = _text
def set_title(self,_title): def set_title(self, _title):
self.title = _title self.title = _title
def set_table_title(self,_table_title): def set_table_title(self, _table_title):
self.table_title = _table_title self.table_title = _table_title
def set_graph_title(self,_graph_title): def set_graph_title(self, _graph_title):
self.graph_title = _graph_title self.graph_title = _graph_title
def set_date(self,_date): # sets the csv file name as graph title
def set_csv_filename(self, _graph_title):
fname, ext = os.path.splitext(_graph_title)
self.csv_file_name = fname + ".csv"
# The _date is set when class is enstanciated / created so this set_date should be used with caution, used to synchronize results
def set_date(self, _date):
self.date = _date self.date = _date
def set_table_dataframe(self,_dataframe): def set_table_dataframe(self, _dataframe):
self.dataframe = _dataframe self.dataframe = _dataframe
def set_table_dataframe_from_csv(self,_csv): def set_table_dataframe_from_csv(self, _csv):
self.dataframe = pd.read_csv(_csv) self.dataframe = pd.read_csv(_csv)
def set_custom_html(self,_custom_html): def set_custom_html(self, _custom_html):
self.custom_html = _custom_html self.custom_html = _custom_html
def set_obj_html(self,_obj_title, _obj ): def set_obj_html(self, _obj_title, _obj):
self.objective = _obj self.objective = _obj
self.obj_title = _obj_title self.obj_title = _obj_title
def set_graph_image(self,_graph_image): def set_graph_image(self, _graph_image):
self.graph_image = _graph_image self.graph_image = _graph_image
def get_date(self):
return self.date
def get_path(self): def get_path(self):
return self.path return self.path
# get_path_date_time, get_report_path and need to be the same ()
# get_path_date_time, get_report_path and need to be the same
def get_path_date_time(self): def get_path_date_time(self):
return self.path_date_time return self.path_date_time
@@ -186,12 +234,12 @@ class lf_report():
return self.path_date_time return self.path_date_time
def file_add_path(self, file): def file_add_path(self, file):
output_file = str(self.path_date_time)+'/'+ str(file) output_file = str(self.path_date_time) + '/' + str(file)
print("output file {}".format(output_file)) print("output file {}".format(output_file))
return output_file return output_file
def write_html(self): def write_html(self):
self.write_output_html = str(self.path_date_time)+'/'+ str(self.output_html) self.write_output_html = str(self.path_date_time) + '/' + str(self.output_html)
print("write_output_html: {}".format(self.write_output_html)) print("write_output_html: {}".format(self.write_output_html))
try: try:
test_file = open(self.write_output_html, "w") test_file = open(self.write_output_html, "w")
@@ -201,8 +249,8 @@ class lf_report():
print("write_html failed") print("write_html failed")
return self.write_output_html return self.write_output_html
def write_html_with_timestamp(self): def write_html_with_timestamp(self):
self.write_output_html = "{}/{}-{}".format(self.path_date_time,self.date,self.output_html) self.write_output_html = "{}/{}-{}".format(self.path_date_time, self.date, self.output_html)
print("write_output_html: {}".format(self.write_output_html)) print("write_output_html: {}".format(self.write_output_html))
try: try:
test_file = open(self.write_output_html, "w") test_file = open(self.write_output_html, "w")
@@ -212,161 +260,232 @@ class lf_report():
print("write_html failed") print("write_html failed")
return self.write_output_html return self.write_output_html
# https://wkhtmltopdf.org/usage/wkhtmltopdf.txt
# page_size A4, A3, Letter, Legal
# orientation Portrait , Landscape
def write_pdf(self, _page_size='A4', _orientation='Portrait'):
# write logic to generate pdf here
# wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
# sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
options = {"enable-local-file-access": None,
'orientation': _orientation,
'page-size': _page_size} # prevent error Blocked access to file
self.write_output_pdf = str(self.path_date_time) + '/' + str(self.output_pdf)
pdfkit.from_file(self.write_output_html, self.write_output_pdf, options=options)
# https://wkhtmltopdf.org/usage/wkhtmltopdf.txt # https://wkhtmltopdf.org/usage/wkhtmltopdf.txt
# page_size A4, A3, Letter, Legal # page_size A4, A3, Letter, Legal
# orientation Portrait , Landscape # orientation Portrait , Landscape
def write_pdf(self, _page_size = 'A4', _orientation = 'Portrait'): def write_pdf_with_timestamp(self, _page_size='A4', _orientation='Portrait'):
# write logic to generate pdf here # write logic to generate pdf here
# wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb # wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
# sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb # sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
options = {"enable-local-file-access" : None,
'orientation': _orientation,
'page-size': _page_size} # prevent error Blocked access to file
self.write_output_pdf = str(self.path_date_time)+'/'+ str(self.output_pdf)
pdfkit.from_file(self.write_output_html, self.write_output_pdf, options=options)
# https://wkhtmltopdf.org/usage/wkhtmltopdf.txt
# page_size A4, A3, Letter, Legal
# orientation Portrait , Landscape
def write_pdf_with_timestamp(self, _page_size = 'A4', _orientation = 'Portrait'):
# write logic to generate pdf here
# wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
# sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
options = {"enable-local-file-access" : None,
'orientation': _orientation,
'page-size': _page_size} # prevent error Blocked access to file
self.write_output_pdf = "{}/{}-{}".format(self.path_date_time,self.date,self.output_pdf)
pdfkit.from_file(self.write_output_html, self.write_output_pdf, options=options)
options = {"enable-local-file-access": None,
'orientation': _orientation,
'page-size': _page_size} # prevent error Blocked access to file
self.write_output_pdf = "{}/{}-{}".format(self.path_date_time, self.date, self.output_pdf)
pdfkit.from_file(self.write_output_html, self.write_output_pdf, options=options)
def generate_report(self): def generate_report(self):
self.write_html() self.write_html()
self.write_pdf() self.write_pdf()
def build_all(self): def build_all(self):
self.build_banner() self.build_banner()
self.start_content_div()
self.build_table_title() self.build_table_title()
self.build_table() self.build_table()
self.end_content_div()
def build_banner(self): def build_banner(self):
self.banner_html = """ # NOTE: {{ }} are the ESCAPED curly braces
<!DOCTYPE html> self.banner_html = """<!DOCTYPE html>
<html lang='en'> <html lang='en'>
<head> <head>
<meta charset='UTF-8'> <meta charset='UTF-8'>
<meta name='viewport' content='width=device-width, initial-scale=1' /> <meta name='viewport' content='width=device-width, initial-scale=1' />
<br> <style>
</head> body {{ margin: 0; padding: 0; }}
</style>
<title>BANNER </title></head> <link rel='stylesheet' href='report.css' />
<body> <link rel='stylesheet' href='custom.css' />
<div class='Section report_banner-1000x205' style='background-image:url("banner.png");background-repeat:no-repeat;padding:0;margin:0;min-width:1000px; min-height:205px;width:1000px; height:205px;max-width:1000px; max-height:205px;'> <title>{title}</title>
<br> </head>
<img align='right' style='padding:25;margin:5;width:200px;' src="CandelaLogo2-90dpi-200x90-trans.png" border='0' /> <body>
<div id='BannerBack'>
<div class='HeaderStyle'> <div id='Banner'>
<br> <br/>
<h1 class='TitleFontPrint' style='color:darkgreen;'>""" + str(self.title) + """</h1> <img id='BannerLogo' align='right' src="CandelaLogo2-90dpi-200x90-trans.png" border='0'/>
<h3 class='TitleFontPrint' style='color:darkgreen;'>""" + str(self.date) + """</h3> <div class='HeaderStyle'>
<br> <br>
<br> <h1 class='TitleFontPrint' style='color:darkgreen;'> {title} </h1>
<br> <h3 class='TitleFontPrint' style='color:darkgreen;'>{date}</h3>
<br> </div>
<br> </div>
</div> </div>
""" """.format(
title=self.title,
date=self.date,
)
self.html += self.banner_html self.html += self.banner_html
def build_table_title(self): def build_table_title(self):
self.table_title_html = """ self.table_title_html = """
<html lang='en'> <!-- Table Title-->
<head> <h3 align='left'>{title}</h3>
<meta charset='UTF-8'> """.format(title=self.table_title)
<meta name='viewport' content='width=device-width, initial-scale=1' />
<div class='HeaderStyle'>
<h2 class='TitleFontPrint' style='color:darkgreen;'>""" + str(self.table_title) + """</h2>
"""
self.html += self.table_title_html self.html += self.table_title_html
def start_content_div(self):
self.html += "\n<div class='contentDiv'>\n"
def build_text(self): def build_text(self):
# please do not use 'style=' tags unless you cannot override a class
self.text_html = """ self.text_html = """
<html lang='en'> <div class='HeaderStyle'>
<head> <h3 class='TitleFontPrint'>{text}</h3>\n
<meta charset='UTF-8'> </div>""".format(text=self.text)
<meta name='viewport' content='width=device-width, initial-scale=1' />
<div class='HeaderStyle'>
<h3 class='TitleFontPrint' style='color:darkgreen;'>""" + str(self.text) + """</h3>
"""
self.html += self.text_html self.html += self.text_html
def build_date_time(self): def build_date_time(self):
self.date_time = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-h-%m-m-%S-s")).replace(':','-') self.date_time = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-h-%m-m-%S-s")).replace(':', '-')
return self.date_time return self.date_time
def build_path_date_time(self): def build_path_date_time(self):
try: try:
self.path_date_time = os.path.join(self.path,self.date_time) self.path_date_time = os.path.join(self.path, self.date_time)
os.mkdir(self.path_date_time) os.mkdir(self.path_date_time)
except: except:
curr_dir_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) curr_dir_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
self.path_date_time = os.path.join(curr_dir_path,self.date_time) self.path_date_time = os.path.join(curr_dir_path, self.date_time)
os.mkdir(self.path_date_time) os.mkdir(self.path_date_time)
def build_table(self): def build_table(self):
self.dataframe_html = self.dataframe.to_html(index=False, justify='center') # have the index be able to be passed in. self.dataframe_html = self.dataframe.to_html(index=False,
justify='center') # have the index be able to be passed in.
self.html += self.dataframe_html self.html += self.dataframe_html
def test_setup_table(self, test_setup_data, value):
if test_setup_data is None:
return None
else:
var = ""
for i in test_setup_data:
var = var + "<tr><td>" + i + "</td><td colspan='3'>" + str(test_setup_data[i]) + "</td></tr>"
setup_information = """
<!-- Test Setup Information -->
<table width='700px' border='1' cellpadding='2' cellspacing='0' style='border-top-color: gray; border-top-style: solid; border-top-width: 1px; border-right-color: gray; border-right-style: solid; border-right-width: 1px; border-bottom-color: gray; border-bottom-style: solid; border-bottom-width: 1px; border-left-color: gray; border-left-style: solid; border-left-width: 1px'>
<tr>
<td>""" + str(value) + """</td>
<td>
<table width='100%' border='0' cellpadding='2' cellspacing='0' style='border-top-color: gray; border-top-style: solid; border-top-width: 1px; border-right-color: gray; border-right-style: solid; border-right-width: 1px; border-bottom-color: gray; border-bottom-style: solid; border-bottom-width: 1px; border-left-color: gray; border-left-style: solid; border-left-width: 1px'>
""" + var + """
</table>
</td>
</tr>
</table>
<br>
"""
self.html += setup_information
def build_footer(self):
self.footer_html = """<!DOCTYPE html>
<html lang='en'>
<footer>
<meta charset='UTF-8'>
<meta name='viewport' content='width=device-width, initial-scale=1' />
<style>
body {{ margin: 0; padding: 0; }}
</style>
<link rel='stylesheet' href='report.css' />
<link rel='stylesheet' href='custom.css' />
</footer>
<body>
<div class='FooterStyle'>
<a href="https://www.candelatech.com/">
<img id='BannerLogoFooter' align='right' src="candela_swirl_small-72h.png" border='0'/>
<p>Generate by Candela Technologies LANforge network testing tool</p>
<p><a href="https://www.candelatech.com">www.candelatech.com</a><p>
</div>
</body>
"""
self.html += self.footer_html
def build_footer_no_png(self):
self.footer_html = """<!DOCTYPE html>
<html lang='en'>
<footer>
<meta charset='UTF-8'>
<meta name='viewport' content='width=device-width, initial-scale=1' />
<style>
body {{ margin: 0; padding: 0; }}
</style>
<link rel='stylesheet' href='report.css' />
<link rel='stylesheet' href='custom.css' />
</footer>
<body>
<div class='FooterStyle'>
<p>Generate by Candela Technologies LANforge network testing tool</p>
<p><a href="https://www.candelatech.com">www.candelatech.com</a><p>
</div>
</body>
"""
self.html += self.footer_html
def build_custom(self): def build_custom(self):
self.html += self.custom_html self.html += self.custom_html
def build_objective(self): def build_objective(self):
self.obj_html = """ self.obj_html = """
<!-- Test Objective --> <!-- Test Objective -->
<h3 align='left'>""" + str(self.obj_title) + """</h3> <h3 align='left'>{title}</h3>
<p align='left' width='900'>""" + str(self.objective) + """</p> <p align='left' width='900'>{objective}</p>
""" """.format(title=self.obj_title,
objective=self.objective)
self.html += self.obj_html self.html += self.obj_html
def build_graph_title(self): def build_graph_title(self):
self.table_graph_html = """ self.table_graph_html = """
<html lang='en'> <div class='HeaderStyle'>
<head> <h2 class='TitleFontPrint' style='color:darkgreen;'>{title}</h2>
<meta charset='UTF-8'> """.format(title=self.graph_title)
<meta name='viewport' content='width=device-width, initial-scale=1' />
<div class='HeaderStyle'>
<h2 class='TitleFontPrint' style='color:darkgreen;'>""" + str(self.graph_title) + """</h2>
"""
self.html += self.table_graph_html self.html += self.table_graph_html
def build_graph(self): def build_graph(self):
self.graph_html_obj = """ self.graph_html_obj = """
<img align='center' style='padding:15;margin:5;width:1000px;' src=""" + "%s" % (self.graph_image) + """ border='1' /> <img align='center' style='padding:15px;margin:5px 5px 2em 5px;width:1000px;' src='{image}' border='1' />
<br><br> """.format(image=self.graph_image)
""" self.html += self.graph_html_obj
self.html +=self.graph_html_obj
def end_content_div(self):
self.html += "\n</div><!-- end contentDiv -->\n"
# Unit Test # Unit Test
if __name__ == "__main__": if __name__ == "__main__":
# Testing: generate data frame
# Testing: generate data frame
dataframe = pd.DataFrame({ dataframe = pd.DataFrame({
'product':['CT521a-264-1ac-1n','CT521a-1ac-1ax','CT522-264-1ac2-1n','CT523c-2ac2-db-10g-cu','CT523c-3ac2-db-10g-cu','CT523c-8ax-ac10g-cu','CT523c-192-2ac2-1ac-10g'], 'product': ['CT521a-264-1ac-1n', 'CT521a-1ac-1ax', 'CT522-264-1ac2-1n', 'CT523c-2ac2-db-10g-cu',
'radios':[1,1,2,2,6,9,3], 'CT523c-3ac2-db-10g-cu', 'CT523c-8ax-ac10g-cu', 'CT523c-192-2ac2-1ac-10g'],
'MIMO':['N','N','N','Y','Y','Y','Y'], 'radios': [1, 1, 2, 2, 6, 9, 3],
'stations':[200,64,200,128,384,72,192], 'MIMO': ['N', 'N', 'N', 'Y', 'Y', 'Y', 'Y'],
'mbps':[300,300,300,10000,10000,10000,10000] 'stations': [200, 64, 200, 128, 384, 72, 192],
'mbps': [300, 300, 300, 10000, 10000, 10000, 10000]
}) })
print(dataframe) print(dataframe)
# Testing: generate data frame # Testing: generate data frame
dataframe2 = pd.DataFrame({ dataframe2 = pd.DataFrame({
'station':[1,2,3,4,5,6,7], 'station': [1, 2, 3, 4, 5, 6, 7],
'time_seconds':[23,78,22,19,45,22,25] 'time_seconds': [23, 78, 22, 19, 45, 22, 25]
}) })
report = lf_report() report = lf_report()
@@ -385,11 +504,14 @@ if __name__ == "__main__":
report.set_table_dataframe(dataframe2) report.set_table_dataframe(dataframe2)
report.build_table() report.build_table()
#report.build_all() # report.build_all()
# report.build_footer()
report.build_footer_no_png()
html_file = report.write_html() html_file = report.write_html()
print("returned file ") print("returned file ")
print(html_file) print(html_file)
report.write_pdf() report.write_pdf()
print("report path {}".format(report.get_path())) print("report path {}".format(report.get_path()))

View File

@@ -36,7 +36,7 @@ if __name__ == "__main__":
'radios': [1, 1, 2, 2, 6, 9, 3], 'radios': [1, 1, 2, 2, 6, 9, 3],
'MIMO': ['N', 'N', 'N', 'Y', 'Y', 'Y', 'Y'], 'MIMO': ['N', 'N', 'N', 'Y', 'Y', 'Y', 'Y'],
'stations': [200, 64, 200, 128, 384, 72, 192], 'stations': [200, 64, 200, 128, 384, 72, 192],
'mbps': [300, 300, 300, 10000, 10000, 10000, 10000] '1 mbps': [300, 300, 300, 10000, 10000, 10000, 10000]
}) })
print(dataframe) print(dataframe)
@@ -96,7 +96,7 @@ if __name__ == "__main__":
_xaxis_categories=x_axis_values, _xaxis_categories=x_axis_values,
_graph_image_name="Bi-single_radio_2.4GHz", _graph_image_name="Bi-single_radio_2.4GHz",
_label=["bi-downlink", "bi-uplink", 'uplink'], _label=["bi-downlink", "bi-uplink", 'uplink'],
_color=None, _color=['darkorange', 'forestgreen','blueviolet'],
_color_edge='red') _color_edge='red')
graph_png = graph.build_bar_graph() graph_png = graph.build_bar_graph()

View File

@@ -10,7 +10,7 @@ Note: To Run this script gui should be opened with
This script is used to automate running Rate-vs-Range tests. You This script is used to automate running Rate-vs-Range tests. You
may need to view a Rate-vs-Range test configured through the GUI to understand may need to view a Rate-vs-Range test configured through the GUI to understand
the options and how best to input data. the options and how best to input data.
./lf_rvr_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \ ./lf_rvr_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \
--instance_name rvr-instance --config_name test_con --upstream 1.1.eth1 \ --instance_name rvr-instance --config_name test_con --upstream 1.1.eth1 \
--dut RootAP --duration 15s --station 1.1.wlan0 \ --dut RootAP --duration 15s --station 1.1.wlan0 \
@@ -30,7 +30,7 @@ the options and how best to input data.
Note: Note:
attenuator_mod: selects the attenuator modules, bit-field. attenuator_mod: selects the attenuator modules, bit-field.
This example uses 3, which is first two attenuator modules on Attenuator ID 1040. This example uses 3, which is first two attenuator modules on Attenuator ID 1040.
--raw_line 'line contents' will add any setting to the test config. This is --raw_line 'line contents' will add any setting to the test config. This is
useful way to support any options not specifically enabled by the useful way to support any options not specifically enabled by the
command options. command options.
@@ -45,7 +45,7 @@ show_log: 0
port_sorting: 0 port_sorting: 0
kpi_id: Rate vs Range kpi_id: Rate vs Range
bg: 0xE0ECF8 bg: 0xE0ECF8
test_rig: test_rig:
show_scan: 1 show_scan: 1
auto_helper: 0 auto_helper: 0
skip_2: 0 skip_2: 0
@@ -83,7 +83,7 @@ attenuations: 0..+50..950
attenuations2: 0..+50..950 attenuations2: 0..+50..950
chamber: 0 chamber: 0
tt_deg: 0..+45..359 tt_deg: 0..+45..359
cust_pkt_sz: cust_pkt_sz:
show_bar_labels: 1 show_bar_labels: 1
show_prcnt_tput: 0 show_prcnt_tput: 0
show_3s: 0 show_3s: 0
@@ -93,7 +93,7 @@ show_1m: 1
pause_iter: 0 pause_iter: 0
outer_loop_atten: 0 outer_loop_atten: 0
show_realtime: 1 show_realtime: 1
operator: operator:
mconn: 1 mconn: 1
mpkt: 1000 mpkt: 1000
tos: 0 tos: 0
@@ -118,10 +118,14 @@ if 'py-json' not in sys.path:
from cv_test_manager import cv_test as cvtest from cv_test_manager import cv_test as cvtest
from cv_test_manager import * from cv_test_manager import *
class RvrTest(cvtest): class RvrTest(cvtest):
def __init__(self, def __init__(self,
lf_host="localhost", lf_host="localhost",
lf_port=8080, lf_port=8080,
ssh_port=22,
local_path="",
graph_groups=None,
lf_user="lanforge", lf_user="lanforge",
lf_password="lanforge", lf_password="lanforge",
instance_name="rvr_instance", instance_name="rvr_instance",
@@ -145,7 +149,7 @@ class RvrTest(cvtest):
self.lf_host = lf_host self.lf_host = lf_host
self.lf_port = lf_port self.lf_port = lf_port
self.lf_user = lf_user self.lf_user = lf_user
self.lf_password =lf_password self.lf_password = lf_password
self.instance_name = instance_name self.instance_name = instance_name
self.config_name = config_name self.config_name = config_name
self.dut = dut self.dut = dut
@@ -162,12 +166,14 @@ class RvrTest(cvtest):
self.raw_lines = raw_lines self.raw_lines = raw_lines
self.raw_lines_file = raw_lines_file self.raw_lines_file = raw_lines_file
self.sets = sets self.sets = sets
self.ssh_port = ssh_port
self.local_path = local_path
self.graph_groups = graph_groups
def setup(self): def setup(self):
# Nothing to do at this time. # Nothing to do at this time.
return return
def run(self): def run(self):
self.sync_cv() self.sync_cv()
time.sleep(2) time.sleep(2)
@@ -206,18 +212,18 @@ class RvrTest(cvtest):
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name, self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets, self.config_name, self.sets,
self.pull_report, self.lf_host, self.lf_user, self.lf_password, self.pull_report, self.lf_host, self.lf_user, self.lf_password,
cv_cmds) cv_cmds, ssh_port=self.ssh_port, local_lf_report_dir=self.local_path,
graph_groups_file=self.graph_groups)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
def main(): def main():
parser = argparse.ArgumentParser(""" parser = argparse.ArgumentParser("""
Open this file in an editor and read the top notes for more details. Open this file in an editor and read the top notes for more details.
Example: Example:
""" """
) )
@@ -236,35 +242,40 @@ def main():
help="Specify requested upload speed. Percentage of theoretical is also supported. Default: 0") help="Specify requested upload speed. Percentage of theoretical is also supported. Default: 0")
parser.add_argument("--duration", default="", parser.add_argument("--duration", default="",
help="Specify duration of each traffic run") help="Specify duration of each traffic run")
parser.add_argument("--graph_groups", help="File to save graph_groups to", default=None)
parser.add_argument("--report_dir", default="")
args = parser.parse_args() args = parser.parse_args()
cv_base_adjust_parser(args) cv_base_adjust_parser(args)
CV_Test = RvrTest(lf_host = args.mgr, CV_Test = RvrTest(lf_host=args.mgr,
lf_port = args.port, lf_port=args.port,
lf_user = args.lf_user, lf_user=args.lf_user,
lf_password = args.lf_password, lf_password=args.lf_password,
instance_name = args.instance_name, instance_name=args.instance_name,
config_name = args.config_name, config_name=args.config_name,
upstream = args.upstream, upstream=args.upstream,
pull_report = args.pull_report, pull_report=args.pull_report,
load_old_cfg = args.load_old_cfg, load_old_cfg=args.load_old_cfg,
download_speed = args.download_speed, download_speed=args.download_speed,
upload_speed = args.upload_speed, upload_speed=args.upload_speed,
duration = args.duration, duration=args.duration,
dut = args.dut, dut=args.dut,
station = args.station, station=args.station,
enables = args.enable, enables=args.enable,
disables = args.disable, disables=args.disable,
raw_lines = args.raw_line, raw_lines=args.raw_line,
raw_lines_file = args.raw_lines_file, raw_lines_file=args.raw_lines_file,
sets = args.set sets=args.set,
graph_groups=args.graph_groups
) )
CV_Test.setup() CV_Test.setup()
CV_Test.run() CV_Test.run()
CV_Test.check_influx_kpi(args) CV_Test.check_influx_kpi(args)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -0,0 +1,19 @@
{
"mgr":"192.168.0.101",
"port":"8080",
"lf_user":"lanforge",
"lf_password":"lanforge",
"instance_name":"rx-sensitivity-instance",
"config_name":"test_con",
"upstream":"1.1.eth1",
"dut":"asus_5g",
"duration":"15s",
"station":"1.1.eth2",
"download_speed":"85%",
"upload_speed":"0",
"pull_report": true,
"raw_line": ["txo_preamble: VHT", "txo_mcs: 4 OFDM, HT, VHT;5 OFDM, HT, VHT;6 OFDM, HT, VHT;7 OFDM, HT, VHT", "spatial_streams: 3", "bandw_options: 80", "txo_sgi: ON", "txo_retries: No Retry", "txo_txpower: 17"]
}

View File

@@ -0,0 +1,387 @@
#!/usr/bin/env python3
"""
Note: To Run this script gui should be opened with
path: cd LANforgeGUI_5.4.3 (5.4.3 can be changed with GUI version)
pwd (Output : /home/lanforge/LANforgeGUI_5.4.3)
./lfclient.bash -cli-socket 3990
This script is used to automate running RX Sensitivity tests. You
may need to view a RX Sensitivity test configured through the GUI to understand
the options and how best to input data.
./lf_rx_sensitivity_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \
--instance_name rx-sensitivity-instance --config_name test_con --upstream 1.1.eth2 \
--dut linksys-8450 --duration 15s --station 1.1.sta01500 \
--download_speed 85% --upload_speed 0 \
--raw_line 'txo_preamble: VHT' \
--raw_line 'txo_mcs: 4 OFDM, HT, VHT;5 OFDM, HT, VHT;6 OFDM, HT, VHT;7 OFDM, HT, VHT' \
--raw_line 'spatial_streams: 3' \
--raw_line 'bandw_options: 80' \
--raw_line 'txo_sgi: ON' \
--raw_line 'txo_retries: No Retry' \
--raw_line 'txo_txpower: 17' \
--test_rig Testbed-01 --pull_report \
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \
--influx_bucket ben \
--influx_tag testbed Ferndale-01
Note:
--raw_line 'line contents' will add any setting to the test config. This is
useful way to support any options not specifically enabled by the
command options.
--set modifications will be applied after the other config has happened,
so it can be used to override any other config.
Example of raw text config for RX Sensitivity, to show other possible options:
show_events: 1
show_log: 0
port_sorting: 2
kpi_id: RX Sensitivity
bg: 0xE0ECF8
test_rig:
show_scan: 1
auto_helper: 0
skip_ac: 0
skip_ax: 0
skip_2: 0
skip_5: 0
skip_5b: 1
skip_dual: 0
skip_tri: 1
selected_dut: ea8300
duration: 15000
settle_time: 1000
sndbuf: 0
rcvbuf: 0
traffic_port: 1.1.157 sta01500
upstream_port: 1.1.1 eth1
path_loss: 10
speed: 85%
speed2: 0Kbps
min_rssi_bound: -150
max_rssi_bound: 0
channels: AUTO
modes: Auto
pkts: 1024
spatial_streams: 1
security_options: AUTO
bandw_options: 20
traffic_types: TCP
directions: DUT Transmit
txo_preamble: OFDM
txo_mcs: 0 CCK, OFDM, HT, VHT;1 CCK, OFDM, HT, VHT;2 CCK, OFDM, HT, VHT;3 CCK, OFDM, HT, VHT
txo_retries: No Retry
txo_sgi: ON
txo_txpower: 15
attenuator: 0
attenuator2: 0
attenuator_mod: 255
attenuator_mod2: 255
attenuations: 0..+50..100
attenuations2: 0..+50..950
chamber: 0
tt_deg: 0..+45..359
cust_pkt_sz:
show_bar_labels: 1
show_prcnt_tput: 0
show_3s: 0
show_ll_graphs: 0
show_gp_graphs: 1
show_1m: 1
pause_iter: 0
outer_loop_atten: 0
show_realtime: 1
operator:
mconn: 1
mpkt: 1000
tos: 0
loop_iterations: 1
"""
import sys
import os
import argparse
import time
import json
from os import path
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
from cv_test_manager import cv_test
from cv_test_manager import *
class RxSensitivityTest(cv_test):
def __init__(self,
lf_host="localhost",
lf_port=8080,
lf_user="lanforge",
lf_password="lanforge",
ssh_port=22,
local_path="",
instance_name="dpt_instance",
config_name="dpt_config",
upstream="1.1.eth2",
pull_report=False,
load_old_cfg=False,
upload_speed="0",
download_speed="85%",
duration="15s",
station="1.1.sta01500",
dut="NA",
enables=[],
disables=[],
raw_lines=[],
raw_lines_file="",
sets=[],
graph_groups=None,
report_dir=""
):
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
self.lf_host = lf_host
self.lf_port = lf_port
self.lf_user = lf_user
self.lf_password = lf_password
self.instance_name = instance_name
self.config_name = config_name
self.dut = dut
self.duration = duration
self.upstream = upstream
self.station = station
self.pull_report = pull_report
self.load_old_cfg = load_old_cfg
self.test_name = "RX Sensitivity"
self.upload_speed = upload_speed
self.download_speed = download_speed
self.enables = enables
self.disables = disables
self.raw_lines = raw_lines
self.raw_lines_file = raw_lines_file
self.sets = sets
self.graph_groups = graph_groups
self.report_dir = report_dir
self.ssh_port = ssh_port
self.local_path = local_path
def setup(self):
# Nothing to do at this time.
return
def run(self):
self.sync_cv()
time.sleep(2)
self.sync_cv()
blob_test = "rxsens-test-latest-"
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
self.show_text_blob(None, None, False)
# Test related settings
cfg_options = []
### HERE###
self.apply_cfg_options(cfg_options, self.enables, self.disables, self.raw_lines, self.raw_lines_file)
# cmd line args take precedence and so come last in the cfg array.
if self.upstream != "":
cfg_options.append("upstream_port: " + self.upstream)
if self.station != "":
cfg_options.append("traffic_port: " + self.station)
if self.download_speed != "":
cfg_options.append("speed: " + self.download_speed)
if self.upload_speed != "":
cfg_options.append("speed2: " + self.upload_speed)
if self.duration != "":
cfg_options.append("duration: " + self.duration)
if self.dut != "":
cfg_options.append("selected_dut: " + self.dut)
# We deleted the scenario earlier, now re-build new one line at a time.
self.build_cfg(self.config_name, blob_test, cfg_options)
cv_cmds = []
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets,
self.pull_report, self.lf_host, self.lf_user, self.lf_password,
cv_cmds, ssh_port=self.ssh_port, local_path=self.local_path,
graph_groups_file=self.graph_groups)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
def main():
parser = argparse.ArgumentParser(description="""
IMPORTANT: Start lanforge with socket 3990 : ./lfclient.bash -cli-socket 3990
lfclient.bash is located in the LANforgeGUI_X.X.X directory
On local or remote system: ./lfclient.bash -cli-socket 3990 -s LF_MGR
On local system the -s LF_MGR will be local_host if not provided
Open this file in an editor and read the top notes for more details.
Example:
./lf_rx_sensitivity_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \
--instance_name rx-sensitivity-instance --config_name test_con --upstream 1.1.eth2 \
--dut linksys-8450 --duration 15s --station 1.1.sta01500 \
--download_speed 85% --upload_speed 0 \
--raw_line 'txo_preamble: VHT' \
--raw_line 'txo_mcs: 4 OFDM, HT, VHT;5 OFDM, HT, VHT;6 OFDM, HT, VHT;7 OFDM, HT, VHT' \
--raw_line 'spatial_streams: 3' \
--raw_line 'bandw_options: 80' \
--raw_line 'txo_sgi: ON' \
--raw_line 'txo_retries: No Retry' \
--raw_line 'txo_txpower: 17' \
--test_rig Testbed-01 --pull_report \
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \
--influx_bucket ben \
--influx_tag testbed Ferndale-01
Example 2:
./lf_dataplane_test.py --json <name>.json
see sample json file: lf_dataplane_config.json
Sample <name>.json between using eth1 and eth2
{
"mgr":"192.168.0.101",
"port":"8080",
"lf_user":"lanforge",
"lf_password":"lanforge",
"instance_name":"dataplane-instance",
"config_name":"test_con",
"upstream":"1.1.eth1",
"dut":"asus_5g",
"duration":"15s",
"station":"1.1.eth2",
"download_speed":"85%",
"upload_speed":"0",
"raw_line": ["txo_preamble: VHT", "txo_mcs: 4 OFDM, HT, VHT;5 OFDM, HT, VHT;6 OFDM, HT, VHT;7 OFDM, HT, VHT", "spatial_streams: 3", "bandw_options: 80", "txo_sgi: ON", "txo_retries: No Retry", "txo_txpower: 17"]
}
Sample <name>.json between using eth1 and station 1.1.sta0002
{
"mgr":"192.168.0.101",
"port":"8080",
"lf_user":"lanforge",
"lf_password":"lanforge",
"instance_name":"dataplane-instance",
"config_name":"test_con",
"upstream":"1.1.eth1",
"dut":"asus_5g",
"duration":"15s",
"station":"1.1.sta0002",
"download_speed":"85%",
"upload_speed":"0",
"raw_line": ["txo_preamble: VHT", "txo_mcs: 4 OFDM, HT, VHT;5 OFDM, HT, VHT;6 OFDM, HT, VHT;7 OFDM, HT, VHT", "spatial_streams: 3", "bandw_options: 80", "txo_sgi: ON", "txo_retries: No Retry", "txo_txpower: 17"]
}
"""
)
cv_add_base_parser(parser) # see cv_test_manager.py
parser.add_argument('--json', help="--json <config.json> json input file", default="")
parser.add_argument("-u", "--upstream", type=str, default="",
help="Upstream port for wifi capacity test ex. 1.1.eth2")
parser.add_argument("--station", type=str, default="",
help="Station to be used in this test, example: 1.1.sta01500")
parser.add_argument("--dut", default="",
help="Specify DUT used by this test, example: linksys-8450")
parser.add_argument("--download_speed", default="",
help="Specify requested download speed. Percentage of theoretical is also supported. Default: 85%%.")
parser.add_argument("--upload_speed", default="",
help="Specify requested upload speed. Percentage of theoretical is also supported. Default: 0")
parser.add_argument("--duration", default="",
help="Specify duration of each traffic run")
parser.add_argument("--graph_groups", help="File to save graph_groups to", default=None)
parser.add_argument("--report_dir", default="")
args = parser.parse_args()
# use json config file
if args.json != "":
try:
with open(args.json, 'r') as json_config:
json_data = json.load(json_config)
except:
print("Error reading {}".format(args.json))
# json configuation takes presidence to command line
# TODO see if there is easier way to search presence, look at parser args
if "mgr" in json_data:
args.mgr = json_data["mgr"]
if "port" in json_data:
args.port = json_data["port"]
if "lf_user" in json_data:
args.lf_user = json_data["lf_user"]
if "lf_password" in json_data:
args.lf_password = json_data["lf_password"]
if "instance_name" in json_data:
args.instance_name = json_data["instance_name"]
if "config_name" in json_data:
args.config_name = json_data["config_name"]
if "upstream" in json_data:
args.upstream = json_data["upstream"]
if "dut" in json_data:
args.dut = json_data["dut"]
if "duration" in json_data:
args.duration = json_data["duration"]
if "station" in json_data:
args.station = json_data["station"]
if "download_speed" in json_data:
args.download_speed = json_data["download_speed"]
if "upload_speed" in json_data:
args.upload_speed = json_data["upload_speed"]
if "pull_report" in json_data:
args.pull_report = json_data["pull_report"]
if "raw_line" in json_data:
# the json_data is a list , need to make into a list of lists, to match command line raw_line paramaters
# https://www.tutorialspoint.com/convert-list-into-list-of-lists-in-python
json_data_tmp = [[x] for x in json_data["raw_line"]]
args.raw_line = json_data_tmp
cv_base_adjust_parser(args)
CV_Test = RxSensitivityTest(lf_host=args.mgr,
lf_port=args.port,
lf_user=args.lf_user,
lf_password=args.lf_password,
instance_name=args.instance_name,
config_name=args.config_name,
upstream=args.upstream,
pull_report=args.pull_report,
load_old_cfg=args.load_old_cfg,
download_speed=args.download_speed,
upload_speed=args.upload_speed,
duration=args.duration,
dut=args.dut,
station=args.station,
enables=args.enable,
disables=args.disable,
raw_lines=args.raw_line,
raw_lines_file=args.raw_lines_file,
sets=args.set,
graph_groups=args.graph_groups
)
CV_Test.setup()
CV_Test.run()
CV_Test.check_influx_kpi(args)
if __name__ == "__main__":
main()

View File

@@ -28,8 +28,7 @@ the options and how best to input data.
--set 'Multiple Assoc Stability' 0 \ --set 'Multiple Assoc Stability' 0 \
--set 'Downlink MU-MIMO' 0 \ --set 'Downlink MU-MIMO' 0 \
--set 'AP Coexistence' 0 \ --set 'AP Coexistence' 0 \
--set 'Long Term Stability' 0 \ --set 'Long Term Stability' 0
--test_rig Testbed-01
Note: Note:
--raw_line 'line contents' will add any setting to the test config. This is --raw_line 'line contents' will add any setting to the test config. This is
@@ -61,7 +60,6 @@ notes1: smaller CT810a chamber. CT704b and CT714 4-module attenuators are used.
notes2: mounted on the sides of the DUT chamber are used to communicate to the DUT. DUT is facing forward at notes2: mounted on the sides of the DUT chamber are used to communicate to the DUT. DUT is facing forward at
notes3: the zero-rotation angle. notes3: the zero-rotation angle.
bg: 0xE0ECF8 bg: 0xE0ECF8
test_rig: TR-398 test bed
show_scan: 1 show_scan: 1
auto_helper: 1 auto_helper: 1
skip_2: 0 skip_2: 0
@@ -278,8 +276,7 @@ def main():
--set 'Multiple Assoc Stability' 0 \ --set 'Multiple Assoc Stability' 0 \
--set 'Downlink MU-MIMO' 0 \ --set 'Downlink MU-MIMO' 0 \
--set 'AP Coexistence' 0 \ --set 'AP Coexistence' 0 \
--set 'Long Term Stability' 0 \ --set 'Long Term Stability' 0
--test_rig Testbed-01
""" """
) )

View File

@@ -25,7 +25,7 @@ Note: This is a test file which will run a wifi capacity test.
--instance_name wct_instance --config_name wifi_config --upstream 1.1.eth1 --batch_size 1,5,25 --loop_iter 1 \ --instance_name wct_instance --config_name wifi_config --upstream 1.1.eth1 --batch_size 1,5,25 --loop_iter 1 \
--protocol UDP-IPv4 --duration 6000 --pull_report --stations 1.1.sta0000,1.1.sta0001 \ --protocol UDP-IPv4 --duration 6000 --pull_report --stations 1.1.sta0000,1.1.sta0001 \
--create_stations --radio wiphy0 --ssid test-ssid --security open --paswd [BLANK] \ --create_stations --radio wiphy0 --ssid test-ssid --security open --paswd [BLANK] \
--test_rig Testbed-01 --test_rig Testbed-01 --set DUT_NAME linksys-8450
Note: Note:
@@ -34,6 +34,8 @@ Note:
--stations == Enter stations to use for wifi capacity --stations == Enter stations to use for wifi capacity
--set DUT_NAME XXXX == Determines which DUT the wifi capacity test should use to get details on
Example of raw text config for Capacity, to show other possible options: Example of raw text config for Capacity, to show other possible options:
sel_port-0: 1.1.eth1 sel_port-0: 1.1.eth1
@@ -353,7 +355,10 @@ class WiFiCapacityTest(cv_test):
influx_host="localhost", influx_host="localhost",
influx_port=8086, influx_port=8086,
report_dir="", report_dir="",
graph_groups=None graph_groups=None,
test_rig="",
local_lf_report_dir="",
debug=False,
): ):
super().__init__(lfclient_host=lfclient_host, lfclient_port=lf_port) super().__init__(lfclient_host=lfclient_host, lfclient_port=lf_port)
@@ -390,6 +395,9 @@ class WiFiCapacityTest(cv_test):
self.influx_port = influx_port self.influx_port = influx_port
self.report_dir = report_dir self.report_dir = report_dir
self.graph_groups = graph_groups self.graph_groups = graph_groups
self.test_rig = test_rig
self.local_lf_report_dir = local_lf_report_dir
self.debug = debug
def setup(self): def setup(self):
if self.create_stations and self.stations != "": if self.create_stations and self.stations != "":
@@ -445,6 +453,8 @@ class WiFiCapacityTest(cv_test):
cfg_options.append("ul_rate: " + self.upload_rate) cfg_options.append("ul_rate: " + self.upload_rate)
if self.download_rate != "": if self.download_rate != "":
cfg_options.append("dl_rate: " + self.download_rate) cfg_options.append("dl_rate: " + self.download_rate)
if self.test_rig != "":
cfg_options.append("test_rig: " + self.test_rig)
cfg_options.append("save_csv: 1") cfg_options.append("save_csv: 1")
@@ -465,7 +475,8 @@ class WiFiCapacityTest(cv_test):
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name, self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets, self.config_name, self.sets,
self.pull_report, self.lfclient_host, self.lf_user, self.lf_password, self.pull_report, self.lfclient_host, self.lf_user, self.lf_password,
cv_cmds, graph_groups_file=self.graph_groups) cv_cmds, graph_groups_file=self.graph_groups, local_lf_report_dir=self.local_lf_report_dir,
debug=self.debug)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
@@ -519,6 +530,9 @@ def main():
parser.add_argument("--report_dir", default="") parser.add_argument("--report_dir", default="")
parser.add_argument("--scenario", default="") parser.add_argument("--scenario", default="")
parser.add_argument("--graph_groups", help="File to save graph groups to", default=None) parser.add_argument("--graph_groups", help="File to save graph groups to", default=None)
parser.add_argument("--local_lf_report_dir", help="--local_lf_report_dir <where to pull reports to> default '' put where dataplane script run from",default="")
parser.add_argument("--debug", default=False)
args = parser.parse_args() args = parser.parse_args()
cv_base_adjust_parser(args) cv_base_adjust_parser(args)
@@ -550,7 +564,10 @@ def main():
raw_lines=args.raw_line, raw_lines=args.raw_line,
raw_lines_file=args.raw_lines_file, raw_lines_file=args.raw_lines_file,
sets=args.set, sets=args.set,
graph_groups=args.graph_groups graph_groups=args.graph_groups,
test_rig=args.test_rig,
local_lf_report_dir=args.local_lf_report_dir,
debug=args.debug
) )
WFC_Test.setup() WFC_Test.setup()
WFC_Test.run() WFC_Test.run()

View File

@@ -98,6 +98,13 @@ class GenTest(LFCliBase):
if (speedtest_max_ping is not None): if (speedtest_max_ping is not None):
self.generic_endps_profile.speedtest_max_ping = float(speedtest_max_ping) self.generic_endps_profile.speedtest_max_ping = float(speedtest_max_ping)
def check_tab_exists(self):
response = self.json_get("generic")
if response is None:
return False
else:
return True
def start(self, print_pass=False, print_fail=False): def start(self, print_pass=False, print_fail=False):
self.station_profile.admin_up() self.station_profile.admin_up()
temp_stas = [] temp_stas = []
@@ -290,6 +297,8 @@ python3 ./test_generic.py
client=args.client, client=args.client,
_debug_on=args.debug) _debug_on=args.debug)
if not generic_test.check_tab_exists():
raise ValueError("Error received from GUI, please ensure generic tab is enabled")
generic_test.cleanup(station_list) generic_test.cleanup(station_list)
generic_test.build() generic_test.build()
if not generic_test.passes(): if not generic_test.passes():

View File

@@ -0,0 +1,521 @@
#!/usr/bin/env python3
"""
NAME: test_ip_variable_time.py
PURPOSE:
test_ip_variable_time.py will create stations and endpoints to generate and verify layer-3 traffic over ipv4 or ipv6.
This script replaces the functionality of test_ipv4_variable_time.py and test_ipv6_variable_time.py
This Script has two working modes:
Mode 1:
When station is not available,
This script will create a variable number of stations each with their own set of cross-connects and endpoints.
It will then create layer 3 traffic over a specified amount of time, testing for increased traffic at regular intervals.
This test will pass if all stations increase traffic over the full test duration.
Mode 2:
When station is already available This script will create layer3 cross-connects and endpoints It will then
create layer 3 traffic over a specified amount of time, testing for increased traffic at regular intervals.
This test will pass if all stations increase traffic over the full test duration.
Use './test_ip_variable_time.py --help' to see command line usage and options
Copyright 2021 Candela Technologies Inc
License: Free to distribute and modify. LANforge systems must be licensed.
"""
import sys
import os
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
import argparse
from LANforge import LFUtils
from realm import Realm
import time
import datetime
class IPVariableTime(Realm):
def __init__(self,
ssid=None,
security=None,
password=None,
sta_list=[],
create_sta=True,
name_prefix=None,
upstream=None,
radio=None,
host="localhost",
port=8080,
mode=0,
ap=None,
traffic_type=None,
side_a_min_rate=56, side_a_max_rate=0,
side_b_min_rate=56, side_b_max_rate=0,
number_template="00000",
test_duration="5m",
use_ht160=False,
ipv6=False,
_debug_on=False,
_exit_on_error=False,
_exit_on_fail=False):
super().__init__(lfclient_host=host,
lfclient_port=port),
self.upstream = upstream
self.host = host
self.port = port
self.ssid = ssid
self.sta_list = sta_list
self.create_sta = create_sta
self.security = security
self.password = password
self.radio = radio
self.mode = mode
self.ap = ap
self.traffic_type = traffic_type
self.number_template = number_template
self.debug = _debug_on
# self.json_post("/cli-json/set_resource", {
# "shelf":1,
# "resource":all,
# "max_staged_bringup": 30,
# "max_trying_ifup": 15,
# "max_station_bringup": 6
# })
self.name_prefix = name_prefix
self.test_duration = test_duration
self.station_profile = self.new_station_profile()
self.cx_profile = self.new_l3_cx_profile()
self.station_profile.lfclient_url = self.lfclient_url
self.station_profile.ssid = self.ssid
self.station_profile.ssid_pass = self.password
self.station_profile.security = self.security
self.station_profile.number_template_ = self.number_template
self.station_profile.debug = self.debug
self.station_profile.use_ht160 = use_ht160
if self.station_profile.use_ht160:
self.station_profile.mode = 9
self.station_profile.mode = mode
if self.ap is not None:
self.station_profile.set_command_param("add_sta", "ap", self.ap)
self.cx_profile.host = self.host
self.cx_profile.port = self.port
self.ipv6 = ipv6
self.cx_profile.name_prefix = self.name_prefix
self.cx_profile.side_a_min_bps = side_a_min_rate
self.cx_profile.side_a_max_bps = side_a_max_rate
self.cx_profile.side_b_min_bps = side_b_min_rate
self.cx_profile.side_b_max_bps = side_b_max_rate
def start(self, print_pass=False, print_fail=False):
if self.create_sta:
self.station_profile.admin_up()
# to-do- check here if upstream port got IP
temp_stas = self.station_profile.station_names.copy()
if self.wait_for_ip(temp_stas, ipv4=not self.ipv6, ipv6=self.ipv6):
self._pass("All stations got IPs")
else:
self._fail("Stations failed to get IPs")
self.exit_fail()
self.cx_profile.start_cx()
def stop(self):
self.cx_profile.stop_cx()
if self.create_sta:
self.station_profile.admin_down()
def pre_cleanup(self):
self.cx_profile.cleanup_prefix()
if self.create_sta:
for sta in self.sta_list:
self.rm_port(sta, check_exists=True)
def cleanup(self):
self.cx_profile.cleanup()
if self.create_sta:
self.station_profile.cleanup()
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=self.station_profile.station_names,
debug=self.debug)
def build(self):
if self.create_sta:
self.station_profile.use_security(self.security, self.ssid, self.password)
self.station_profile.set_number_template(self.number_template)
print("Creating stations")
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
self._pass("PASS: Station build finished")
self.cx_profile.create(endp_type=self.traffic_type, side_a=self.sta_list,
side_b=self.upstream,
sleep_time=0)
def main():
parser = Realm.create_basic_argparse(
prog='test_ip_variable_time.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
Create stations to test connection and traffic on VAPs of varying security types (WEP, WPA, WPA2, WPA3, Open)
over ipv4 or ipv6
''',
description='''\
test_ip_variable_time.py:
--------------------
Generic command layout:
python3 ./test_ip_variable_time.py
--upstream_port eth1
--radio wiphy0
--num_stations 32
--security {open|wep|wpa|wpa2|wpa3}
--mode 1
{"auto" : "0",
"a" : "1",
"b" : "2",
"g" : "3",
"abg" : "4",
"abgn" : "5",
"bgn" : "6",
"bg" : "7",
"abgnAC" : "8",
"anAC" : "9",
"an" : "10",
"bgnAC" : "11",
"abgnAX" : "12",
"bgnAX" : "13"}
--ssid netgear
--password admin123
--test_duration 2m (default)
--monitor_interval_ms
--a_min 3000
--b_min 1000
--ap "00:0e:8e:78:e1:76"
--output_format csv
--traffic_type lf_udp
--report_file ~/Documents/results.csv (Example of csv file output - please use another extension for other file formats)
--compared_report ~/Documents/results_prev.csv (Example of csv file retrieval - please use another extension for other file formats) - UNDER CONSTRUCTION
--layer3_cols 'name','tx bytes','rx bytes','dropped' (column names from the GUI to print on report - please read below to know what to put here according to preferences)
--port_mgr_cols 'ap','ip' (column names from the GUI to print on report - please read below to know what to put here according to preferences)
--debug
python3 ./test_ip_variable_time.py
--upstream_port eth1 (upstream Port)
--traffic_type lf_udp (traffic type, lf_udp | lf_tcp)
--test_duration 5m (duration to run traffic 5m --> 5 Minutes)
--create_sta False (False, means it will not create stations and use the sta_names specified below)
--sta_names sta000,sta001,sta002 (used if --create_sta False, comma separated names of stations)
===============================================================================
** FURTHER INFORMATION **
Using the layer3_cols flag:
Currently the output function does not support inputting the columns in layer3_cols the way they are displayed in the GUI. This quirk is under construction. To output
certain columns in the GUI in your final report, please match the according GUI column display to it's counterpart to have the columns correctly displayed in
your report.
GUI Column Display Layer3_cols argument to type in (to print in report)
Name | 'name'
EID | 'eid'
Run | 'run'
Mng | 'mng'
Script | 'script'
Tx Rate | 'tx rate'
Tx Rate (1 min) | 'tx rate (1&nbsp;min)'
Tx Rate (last) | 'tx rate (last)'
Tx Rate LL | 'tx rate ll'
Rx Rate | 'rx rate'
Rx Rate (1 min) | 'rx rate (1&nbsp;min)'
Rx Rate (last) | 'rx rate (last)'
Rx Rate LL | 'rx rate ll'
Rx Drop % | 'rx drop %'
Tx PDUs | 'tx pdus'
Tx Pkts LL | 'tx pkts ll'
PDU/s TX | 'pdu/s tx'
Pps TX LL | 'pps tx ll'
Rx PDUs | 'rx pdus'
Rx Pkts LL | 'pps rx ll'
PDU/s RX | 'pdu/s tx'
Pps RX LL | 'pps rx ll'
Delay | 'delay'
Dropped | 'dropped'
Jitter | 'jitter'
Tx Bytes | 'tx bytes'
Rx Bytes | 'rx bytes'
Replays | 'replays'
TCP Rtx | 'tcp rtx'
Dup Pkts | 'dup pkts'
Rx Dup % | 'rx dup %'
OOO Pkts | 'ooo pkts'
Rx OOO % | 'rx ooo %'
RX Wrong Dev | 'rx wrong dev'
CRC Fail | 'crc fail'
RX BER | 'rx ber'
CX Active | 'cx active'
CX Estab/s | 'cx estab/s'
1st RX | '1st rx'
CX TO | 'cx to'
Pattern | 'pattern'
Min PDU | 'min pdu'
Max PDU | 'max pdu'
Min Rate | 'min rate'
Max Rate | 'max rate'
Send Buf | 'send buf'
Rcv Buf | 'rcv buf'
CWND | 'cwnd'
TCP MSS | 'tcp mss'
Bursty | 'bursty'
A/B | 'a/b'
Elapsed | 'elapsed'
Destination Addr | 'destination addr'
Source Addr | 'source addr'
''')
parser.add_argument('--mode', help='Used to force mode of stations')
parser.add_argument('--ap', help='Used to force a connection to a particular AP')
parser.add_argument('--traffic_type', help='Select the Traffic Type [lf_udp, lf_tcp, udp, tcp], type will be '
'adjusted automatically between ipv4 and ipv6 based on use of --ipv6 flag'
, required=True)
parser.add_argument('--output_format', help='choose either csv or xlsx')
parser.add_argument('--report_file', help='where you want to store results', default=None)
parser.add_argument('--a_min', help='--a_min bps rate minimum for side_a', default=256000)
parser.add_argument('--b_min', help='--b_min bps rate minimum for side_b', default=256000)
parser.add_argument('--test_duration', help='--test_duration sets the duration of the test', default="2m")
parser.add_argument('--layer3_cols', help='Columns wished to be monitored from layer 3 endpoint tab',
default=['name', 'tx bytes', 'rx bytes', 'tx rate', 'rx rate'])
parser.add_argument('--port_mgr_cols', help='Columns wished to be monitored from port manager tab',
default=['ap', 'ip', 'parent dev'])
parser.add_argument('--compared_report', help='report path and file which is wished to be compared with new report',
default=None)
parser.add_argument('--monitor_interval',
help='how frequently do you want your monitor function to take measurements; \, 35s, 2h',
default='10s')
parser.add_argument('--ipv6', help='Sets the test to use IPv6 traffic instead of IPv4', action='store_true')
parser.add_argument('--influx_host')
parser.add_argument('--influx_token', help='Username for your Influx database')
parser.add_argument('--influx_bucket', help='Password for your Influx database')
parser.add_argument('--influx_org', help='Name of your Influx database')
parser.add_argument('--influx_port', help='Port where your influx database is located', default=8086)
parser.add_argument('--influx_tag', action='append', nargs=2,
help='--influx_tag <key> <val> Can add more than one of these.')
parser.add_argument('--influx_mgr',
help='IP address of the server your Influx database is hosted if different from your LANforge Manager',
default=None)
parser.add_argument('--create_sta', help='Used to force a connection to a particular AP', default=True)
parser.add_argument('--sta_names', help='Used to force a connection to a particular AP', default="sta0000")
args = parser.parse_args()
create_sta = True
if args.create_sta == "False":
create_sta = False
num_sta = 2
if (args.num_stations is not None) and (int(args.num_stations) > 0):
num_sta = int(args.num_stations)
# Create directory
# if file path with output file extension is not given...
# check if home/lanforge/report-data exists. if not, save
# in new folder based in current file's directory
if args.report_file is None:
new_file_path = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-h-%M-m-%S-s")).replace(':',
'-') + '_test_ip_variable_time' # create path name
try:
path = os.path.join('/home/lanforge/report-data/', new_file_path)
os.mkdir(path)
except:
curr_dir_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
path = os.path.join(curr_dir_path, new_file_path)
os.mkdir(path)
systeminfopath = str(path) + '/systeminfo.txt'
if args.output_format in ['csv', 'json', 'html', 'hdf', 'stata', 'pickle', 'pdf', 'png', 'parquet',
'xlsx']:
report_f = str(path) + '/data.' + args.output_format
output = args.output_format
else:
print(
'Not supporting this report format or cannot find report format provided. Defaulting to csv data file '
'output type, naming it data.csv.')
report_f = str(path) + '/data.csv'
output = 'csv'
else:
systeminfopath = str(args.report_file).split('/')[-1]
report_f = args.report_file
if args.output_format is None:
output = str(args.report_file).split('.')[-1]
else:
output = args.output_format
print("IP Test Report Data: {}".format(report_f))
# Retrieve last data file
compared_rept = None
if args.compared_report:
compared_report_format = args.compared_report.split('.')[-1]
# if compared_report_format not in ['csv', 'json', 'dta', 'pkl','html','xlsx','parquet','h5']:
if compared_report_format != 'csv':
print(ValueError("Cannot process this file type. Please select a different file and re-run script."))
exit(1)
else:
compared_rept = args.compared_report
if create_sta:
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta - 1, padding_number_=10000,
radio=args.radio)
else:
station_list = args.sta_names.split(",")
CX_TYPES = ("tcp", "udp", "lf_tcp", "lf_udp")
if (args.traffic_type is None) or (args.traffic_type not in CX_TYPES):
print("cx_type needs to be lf_tcp, lf_udp, tcp, or udp, bye")
exit(1)
if args.ipv6:
if args.traffic_type == "tcp" or args.traffic_type == "lf_tcp":
args.traffic_type = "lf_tcp6"
if args.traffic_type == "udp" or args.traffic_type == "lf_udp":
args.traffic_type = "lf_udp6"
else:
if args.traffic_type == "tcp":
args.traffic_type = "lf_tcp"
if args.traffic_type == "udp":
args.traffic_type = "lf_udp"
ip_var_test = IPVariableTime(host=args.mgr,
port=args.mgr_port,
number_template="0000",
sta_list=station_list,
create_sta=create_sta,
name_prefix="VT",
upstream=args.upstream_port,
ssid=args.ssid,
password=args.passwd,
radio=args.radio,
security=args.security,
test_duration=args.test_duration,
use_ht160=False,
side_a_min_rate=args.a_min,
side_b_min_rate=args.b_min,
mode=args.mode,
ap=args.ap,
ipv6=args.ipv6,
traffic_type=args.traffic_type,
_debug_on=args.debug)
ip_var_test.pre_cleanup()
ip_var_test.build()
# exit()
if create_sta:
if not ip_var_test.passes():
print(ip_var_test.get_fail_message())
ip_var_test.exit_fail()
try:
layer3connections = ','.join([[*x.keys()][0] for x in ip_var_test.json_get('endp')['endpoint']])
except:
raise ValueError('Try setting the upstream port flag if your device does not have an eth1 port')
if type(args.layer3_cols) is not list:
layer3_cols = list(args.layer3_cols.split(","))
# send col names here to file to reformat
else:
layer3_cols = args.layer3_cols
# send col names here to file to reformat
if type(args.port_mgr_cols) is not list:
port_mgr_cols = list(args.port_mgr_cols.split(","))
# send col names here to file to reformat
else:
port_mgr_cols = args.port_mgr_cols
# send col names here to file to reformat
if args.debug:
print("Layer 3 Endp column names are...")
print(layer3_cols)
print("Port Manager column names are...")
print(port_mgr_cols)
print("Layer 3 Endp column names are...")
print(layer3_cols)
print("Port Manager column names are...")
print(port_mgr_cols)
try:
monitor_interval = Realm.parse_time(args.monitor_interval).total_seconds()
except ValueError as error:
print(str(error))
print(ValueError(
"The time string provided for monitor_interval argument is invalid. Please see supported time stamp increments and inputs for monitor_interval in --help. "))
exit(1)
ip_var_test.start(False, False)
# if args.influx_mgr is None:
# manager = args.mgr
# else:
# manager = args.influx_mgr
if args.influx_org is not None:
from influx2 import RecordInflux
grapher = RecordInflux(_influx_host=args.influx_host,
_influx_port=args.influx_port,
_influx_org=args.influx_org,
_influx_token=args.influx_token,
_influx_bucket=args.influx_bucket)
devices = [station.split('.')[-1] for station in station_list]
tags = dict()
tags['script'] = 'test_ip_variable_time'
try:
for k in args.influx_tag:
tags[k[0]] = k[1]
except:
pass
grapher.monitor_port_data(longevity=Realm.parse_time(args.test_duration).total_seconds(),
devices=devices,
monitor_interval=Realm.parse_time(args.monitor_interval).total_seconds(),
tags=tags)
ip_var_test.cx_profile.monitor(layer3_cols=layer3_cols,
sta_list=station_list,
# port_mgr_cols=port_mgr_cols,
report_file=report_f,
systeminfopath=systeminfopath,
duration_sec=Realm.parse_time(args.test_duration).total_seconds(),
monitor_interval_ms=monitor_interval,
created_cx=layer3connections,
output_format=output,
compared_report=compared_rept,
script_name='test_ip_variable_time',
arguments=args,
debug=args.debug)
ip_var_test.stop()
if create_sta:
if not ip_var_test.passes():
print(ip_var_test.get_fail_message())
ip_var_test.exit_fail()
LFUtils.wait_until_ports_admin_up(port_list=station_list)
if ip_var_test.passes():
ip_var_test.success()
ip_var_test.cleanup()
print("IP Variable Time Test Report Data: {}".format(report_f))
if __name__ == "__main__":
main()

View File

@@ -17,7 +17,8 @@ from LANforge import LFUtils
import realm import realm
import time import time
import pprint import pprint
from test_ipv4_variable_time import IPV4VariableTime from test_ip_variable_time import IPVariableTime
class TTLSTest(LFCliBase): class TTLSTest(LFCliBase):
def __init__(self, host="localhost", port=8080, def __init__(self, host="localhost", port=8080,
@@ -79,11 +80,11 @@ class TTLSTest(LFCliBase):
self.key = wep_key self.key = wep_key
self.ca_cert = ca_cert self.ca_cert = ca_cert
self.eap = eap self.eap = eap
self.identity = identity # eap identity self.identity = identity # eap identity
self.anonymous_identity = anonymous_identity self.anonymous_identity = anonymous_identity
self.phase1 = phase1 self.phase1 = phase1
self.phase2 = phase2 self.phase2 = phase2
self.ttls_passwd = ttls_passwd #eap passwd self.ttls_passwd = ttls_passwd # eap passwd
self.pin = pin self.pin = pin
self.pac_file = pac_file self.pac_file = pac_file
self.private_key = private_key self.private_key = private_key
@@ -124,9 +125,9 @@ class TTLSTest(LFCliBase):
self.station_profile.mode = 0 self.station_profile.mode = 0
# Layer3 Traffic # Layer3 Traffic
self.l3_cx_obj_udp = IPV4VariableTime(host=self.host, port=self.port, self.l3_cx_obj_udp = IPVariableTime(host=self.host, port=self.port,
create_sta=False, sta_list=self.sta_list, traffic_type="lf_udp", create_sta=False, sta_list=self.sta_list, traffic_type="lf_udp",
upstream=self.upstream_port) upstream=self.upstream_port)
self.l3_cx_obj_udp.cx_profile.name_prefix = "udp-" self.l3_cx_obj_udp.cx_profile.name_prefix = "udp-"
self.l3_cx_obj_udp.cx_profile.side_a_min_bps = 128000 self.l3_cx_obj_udp.cx_profile.side_a_min_bps = 128000
@@ -137,9 +138,9 @@ class TTLSTest(LFCliBase):
self.l3_cx_obj_udp.cx_profile.side_b_min_pdu = 1500 self.l3_cx_obj_udp.cx_profile.side_b_min_pdu = 1500
self.l3_cx_obj_udp.cx_profile.report_timer = 1000 self.l3_cx_obj_udp.cx_profile.report_timer = 1000
self.l3_cx_obj_tcp = IPV4VariableTime(host=self.host, port=self.port, self.l3_cx_obj_tcp = IPVariableTime(host=self.host, port=self.port,
create_sta=False, sta_list=self.sta_list, traffic_type="lf_tcp", create_sta=False, sta_list=self.sta_list, traffic_type="lf_tcp",
upstream=self.upstream_port) upstream=self.upstream_port)
self.l3_cx_obj_tcp.cx_profile.name_prefix = "tcp-" self.l3_cx_obj_tcp.cx_profile.name_prefix = "tcp-"
self.l3_cx_obj_tcp.cx_profile.side_a_min_bps = 128000 self.l3_cx_obj_tcp.cx_profile.side_a_min_bps = 128000
self.l3_cx_obj_tcp.cx_profile.side_a_max_bps = 128000 self.l3_cx_obj_tcp.cx_profile.side_a_max_bps = 128000
@@ -172,7 +173,7 @@ class TTLSTest(LFCliBase):
passwd=self.ttls_passwd, passwd=self.ttls_passwd,
realm=self.ttls_realm, realm=self.ttls_realm,
domain=self.domain, domain=self.domain,
hessid=self.hessid ) hessid=self.hessid)
if self.ieee80211w: if self.ieee80211w:
self.station_profile.set_command_param("add_sta", "ieee80211w", self.ieee80211w) self.station_profile.set_command_param("add_sta", "ieee80211w", self.ieee80211w)
if self.enable_pkc: if self.enable_pkc:
@@ -241,7 +242,7 @@ class TTLSTest(LFCliBase):
if (len(sta_list) == len(ip_map)) and (len(sta_list) == len(associated_map)): if (len(sta_list) == len(ip_map)) and (len(sta_list) == len(associated_map)):
self._pass("PASS: All stations associated with IP", print_pass) self._pass("PASS: All stations associated with IP", print_pass)
else: else:
self._fail("FAIL: Not all stations able to associate/get IP", print_fail) self._fail("FAIL: Not all stations able to associate/get IP", print_fail)
if self.debug: if self.debug:
print("sta_list", sta_list) print("sta_list", sta_list)
@@ -255,7 +256,6 @@ class TTLSTest(LFCliBase):
# please see test_ipv4_variable_time for example of generating traffic # please see test_ipv4_variable_time for example of generating traffic
return self.passes() return self.passes()
def stop(self): def stop(self):
# Bring stations down # Bring stations down
self.station_profile.admin_down() self.station_profile.admin_down()
@@ -313,11 +313,11 @@ class TTLSTest(LFCliBase):
else: else:
self._fail("%s did not report traffic: %s" % (name, postVal), print_fail) self._fail("%s did not report traffic: %s" % (name, postVal), print_fail)
def main():
def main():
parser = LFCliBase.create_basic_argparse( parser = LFCliBase.create_basic_argparse(
prog='test_ipv4_ttls.py', prog='test_ipv4_ttls.py',
#formatter_class=argparse.RawDescriptionHelpFormatter, # formatter_class=argparse.RawDescriptionHelpFormatter,
formatter_class=argparse.RawTextHelpFormatter, formatter_class=argparse.RawTextHelpFormatter,
epilog='''Demonstration showing wpa2-ent ttls authentication''', epilog='''Demonstration showing wpa2-ent ttls authentication''',
@@ -340,25 +340,28 @@ test_ipv4_ttls.py:
for agroup in parser._action_groups: for agroup in parser._action_groups:
if agroup.title == "required arguments": if agroup.title == "required arguments":
required = agroup required = agroup
#if required is not None: # if required is not None:
optional = None optional = None
for agroup in parser._action_groups: for agroup in parser._action_groups:
if agroup.title == "optional arguments": if agroup.title == "optional arguments":
optional = agroup optional = agroup
if optional is not None: if optional is not None:
optional.add_argument('--a_min', help='--a_min bps rate minimum for side_a', default=256000) optional.add_argument('--a_min', help='--a_min bps rate minimum for side_a', default=256000)
optional.add_argument('--b_min', help='--b_min bps rate minimum for side_b', default=256000) optional.add_argument('--b_min', help='--b_min bps rate minimum for side_b', default=256000)
optional.add_argument('--test_duration', help='--test_duration sets the duration of the test', default="5m") optional.add_argument('--test_duration', help='--test_duration sets the duration of the test', default="5m")
optional.add_argument('--key-mgmt', help="--key-mgt: { %s }"%", ".join(realm.wpa_ent_list()), default="WPA-EAP") optional.add_argument('--key-mgmt', help="--key-mgt: { %s }" % ", ".join(realm.wpa_ent_list()),
default="WPA-EAP")
optional.add_argument('--wpa_psk', help='wpa-ent pre shared key', default="[BLANK]") optional.add_argument('--wpa_psk', help='wpa-ent pre shared key', default="[BLANK]")
optional.add_argument('--eap', help='--eap eap method to use', default="TTLS") optional.add_argument('--eap', help='--eap eap method to use', default="TTLS")
optional.add_argument('--identity', help='--identity eap identity string', default="testuser") optional.add_argument('--identity', help='--identity eap identity string', default="testuser")
optional.add_argument('--ttls_passwd', help='--ttls_passwd eap password string', default="testpasswd") optional.add_argument('--ttls_passwd', help='--ttls_passwd eap password string', default="testpasswd")
optional.add_argument('--ttls_realm', help='--ttls_realm 802.11u home realm to use', default="localhost.localdomain") optional.add_argument('--ttls_realm', help='--ttls_realm 802.11u home realm to use',
default="localhost.localdomain")
optional.add_argument('--domain', help='--domain 802.11 domain to use', default="localhost.localdomain") optional.add_argument('--domain', help='--domain 802.11 domain to use', default="localhost.localdomain")
optional.add_argument('--hessid', help='--hessid 802.11u HESSID (MAC addr format/peer for WDS)', default="00:00:00:00:00:01") optional.add_argument('--hessid', help='--hessid 802.11u HESSID (MAC addr format/peer for WDS)',
default="00:00:00:00:00:01")
optional.add_argument('--ieee80211w', help='--ieee80211w <disabled(0),optional(1),required(2)', default='1') optional.add_argument('--ieee80211w', help='--ieee80211w <disabled(0),optional(1),required(2)', default='1')
optional.add_argument('--use_hs20', help='use HotSpot 2.0', default=False) optional.add_argument('--use_hs20', help='use HotSpot 2.0', default=False)
optional.add_argument('--enable_pkc', help='enable opportunistic PMKSA WPA2 key caching', default=False) optional.add_argument('--enable_pkc', help='enable opportunistic PMKSA WPA2 key caching', default=False)
@@ -368,7 +371,7 @@ test_ipv4_ttls.py:
num_stations_converted = int(args.num_stations) num_stations_converted = int(args.num_stations)
num_sta = num_stations_converted num_sta = num_stations_converted
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta-1, padding_number_=10000) station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta - 1, padding_number_=10000)
ttls_test = TTLSTest(host=args.mgr, port=args.mgr_port, ttls_test = TTLSTest(host=args.mgr, port=args.mgr_port,
ssid=args.ssid, ssid=args.ssid,
password=args.passwd, password=args.passwd,

File diff suppressed because it is too large Load Diff

400
py-scripts/test_l4.py Executable file
View File

@@ -0,0 +1,400 @@
#!/usr/bin/env python3
"""
NAME: test_l4.py
PURPOSE:
test_l4.py will create stations and endpoints to generate and verify layer-4 traffic
This script will monitor the urls/s, bytes-rd, or bytes-wr attribute of the endpoints.
These attributes can be tested over FTP using a --ftp flag.
If the the monitored value does not continually increase, this test will not pass.
This script replaces the functionality of test_ipv4_l4.py, test_ipv4_l4_ftp_upload.py, test_ipv4_l4_ftp_urls_per_ten.py,
test_ipv4_l4_ftp_wifi.py, test_ipv4_l4_urls_per_ten.py, test_ipv4_l4_urls_per_ten.py, test_ipv4_l4_wifi.py
EXAMPLE (urls/s):
./test_l4.py --upstream_port eth1 --radio wiphy0 --num_stations 3 --security {open|wep|wpa|wpa2|wpa3}
--ssid netgear --passwd admin123 --requests_per_ten 600 --mode 1 --num_tests 1 --test_type 'urls'
--url "dl http://10.40.0.1 /dev/null" --ap "00:0e:8e:78:e1:76" --target_per_ten 600 --output_format csv
--report_file ~/Documents/results.csv --test_duration 2m --debug
EXAMPLE (bytes-wr):
./test_l4.py --upstream_port eth1 --radio wiphy0 --num_stations 3 --security {open|wep|wpa|wpa2|wpa3}
--ssid netgear --passwd admin123 --test_duration 2m --url "ul http://10.40.0.1 /dev/null"
--requests_per_ten 600 --test_type bytes-wr --debug
EXAMPLE (bytes-rd):
./test_l4.py --upstream_port eth1 (optional) --radio wiphy0 (required) --num_stations 3 (optional)
--security {open|wep|wpa|wpa2|wpa3} (required) --ssid netgear (required)
--url "dl http://10.40.0.1 /dev/null" (required) --password admin123 (required)
--test_duration 2m (optional) --test_type bytes-rd --debug (optional)
EXAMPLE (ftp urls/s):
./test_l4.py --upstream_port eth1 --radio wiphy0 --num_stations 3 --security {open|wep|wpa|wpa2|wpa3}
--ssid netgear --passwd admin123 --test_duration 2m --interval 1s --mode 1 --ap "00:0e:8e:78:e1:76"
--requests_per_ten 600 --num_tests 1 --ftp --test_type 'urls'
--url "ul ftp://lanforge:lanforge@10.40.0.1/example.txt /home/lanforge/example.txt" --debug
EXAMPLE (ftp bytes-wr):
./test_l4.py --upstream_port eth1 --radio wiphy0 --num_stations 3 --security {open|wep|wpa|wpa2|wpa3}
--ssid netgear --passwd admin123 --test_duration 2m --url "ul ftp://10.40.0.1 /dev/null"
--requests_per_ten 600 --ftp --test_type bytes-wr --debug
EXAMPLE (ftp bytes-rd):
./test_l4.py --upstream_port eth1 (optional) --radio wiphy0 (required) --num_stations 3 (optional)
--security {open|wep|wpa|wpa2|wpa3} (required) --ssid netgear (required)
--url "dl ftp://10.40.0.1 /dev/null" (required) --password admin123 (required)
--test_duration 2m (optional) --ftp --test_type bytes-rd --debug (optional)
Use './test_l4.py --help' to see command line usage and options
Copyright 2021 Candela Technologies Inc
License: Free to distribute and modify. LANforge systems must be licensed.
"""
import sys
import os
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
import argparse
from LANforge.lfcli_base import LFCliBase
from LANforge import LFUtils
import realm
import time
import datetime
from realm import TestGroupProfile
from port_utils import PortUtils
class IPV4L4(LFCliBase):
def __init__(self,
host="localhost",
port=8080,
ssid=None,
security=None,
password=None,
url=None,
ftp_user=None,
ftp_passwd=None,
requests_per_ten=None,
station_list=None,
test_duration="2m",
ap=None,
mode=0,
target_requests_per_ten=60,
number_template="00000",
num_tests=1,
radio="wiphy0",
_debug_on=False,
upstream_port="eth1",
ftp=False,
source=None,
dest=None,
test_type=None,
_exit_on_error=False,
_exit_on_fail=False):
super().__init__(host, port, _debug=_debug_on, _exit_on_fail=_exit_on_fail)
self.host = host
self.port = port
self.radio = radio
self.upstream_port = upstream_port
self.ssid = ssid
self.security = security
self.password = password
self.url = url
self.mode = mode
self.ap = ap
self.debug = _debug_on
self.requests_per_ten = int(requests_per_ten)
self.number_template = number_template
self.test_duration = test_duration
self.sta_list = station_list
self.num_tests = int(num_tests)
self.target_requests_per_ten = int(target_requests_per_ten)
self.local_realm = realm.Realm(lfclient_host=self.host, lfclient_port=self.port)
self.station_profile = self.local_realm.new_station_profile()
self.cx_profile = self.local_realm.new_l4_cx_profile()
self.port_util = PortUtils(self.local_realm)
self.station_profile.lfclient_url = self.lfclient_url
self.station_profile.ssid = self.ssid
self.station_profile.ssid_pass = self.password
self.station_profile.security = self.security
self.station_profile.number_template_ = self.number_template
self.station_profile.mode = self.mode
self.test_type = test_type
self.ftp_user = ftp_user
self.ftp_passwd = ftp_passwd
self.source = source
self.dest = dest
if self.ap is not None:
self.station_profile.set_command_param("add_sta", "ap", self.ap)
self.cx_profile.url = self.url
self.cx_profile.test_type = self.test_type
self.cx_profile.requests_per_ten = self.requests_per_ten
self.cx_profile.target_requests_per_ten = self.target_requests_per_ten
self.ftp = ftp
if self.ftp and 'ftp://' not in self.url:
print("WARNING! FTP test chosen, but ftp:// not present in url!")
test_types = {'urls', 'bytes-wr', 'bytes-rd'}
if self.test_type not in test_types:
raise ValueError(
"Unknown test type: %s\nValid test types are urls, bytes-rd, or bytes-wr" % self.test_type)
def build(self):
# Build stations
self.station_profile.use_security(self.security, self.ssid, self.password)
print("Creating stations")
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
self._pass("PASS: Station build finished")
temp_url = self.url.split(" ")
if temp_url[0] == 'ul' or temp_url[0] == 'dl':
if len(temp_url) == 2:
if self.url.startswith("ul") and self.source not in self.url:
self.cx_profile.url += " " + self.source
elif self.url.startswith("dl") and self.dest not in self.url:
self.cx_profile.url += " " + self.dest
else:
raise ValueError("ul or dl required in url to indicate direction")
if self.ftp:
if self.ftp_user is not None and self.ftp_passwd is not None:
if ("%s:%s" % (self.ftp_user, self.ftp_passwd)) not in self.url:
temp_url = self.url.split("//")
temp_url = ("//%s:%s@" % (self.ftp_user, self.ftp_passwd)).join(temp_url)
self.cx_profile.url = temp_url
self.cx_profile.create(ports=self.station_profile.station_names, sleep_time=.5, debug_=self.debug,
suppress_related_commands_=True)
else:
self.cx_profile.create(ports=self.station_profile.station_names, sleep_time=.5, debug_=self.debug,
suppress_related_commands_=None)
def start(self, print_pass=False, print_fail=False):
if self.ftp:
self.port_util.set_ftp(port_name=self.local_realm.name_to_eid(self.upstream_port)[2], resource=1, on=True)
temp_stas = self.sta_list.copy()
self.station_profile.admin_up()
if self.local_realm.wait_for_ip(temp_stas):
self._pass("All stations got IPs", print_pass)
else:
self._fail("Stations failed to get IPs", print_fail)
exit(1)
self.cx_profile.start_cx()
print("Starting test")
def stop(self):
self.cx_profile.stop_cx()
if self.ftp:
self.port_util.set_ftp(port_name=self.local_realm.name_to_eid(self.upstream_port)[2], resource=1, on=False)
self.station_profile.admin_down()
def cleanup(self, sta_list):
self.cx_profile.cleanup()
self.station_profile.cleanup(sta_list)
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=sta_list,
debug=self.debug)
def main():
parser = LFCliBase.create_basic_argparse(
prog='test_l4',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
This script will monitor the urls/s, bytes-rd, or bytes-wr attribute of the endpoints.
''',
description='''\
test_l4.py:
--------------------
Generic command example:
python3 ./test_l4.py
--upstream_port eth1 \\
--radio wiphy0 \\
--num_stations 3 \\
--security {open|wep|wpa|wpa2|wpa3} \\
--ssid netgear \\
--passwd admin123 \\
--requests_per_ten 600 \\
--mode 1
{"auto" : "0",
"a" : "1",
"b" : "2",
"g" : "3",
"abg" : "4",
"abgn" : "5",
"bgn" : "6",
"bg" : "7",
"abgnAC" : "8",
"anAC" : "9",
"an" : "10",
"bgnAC" : "11",
"abgnAX" : "12",
"bgnAX" : "13"} \\
--num_tests 1 \\
--url "dl http://10.40.0.1 /dev/null" \\
--ap "00:0e:8e:78:e1:76"
--target_per_ten 600 \\
--output_format csv \\
--report_file ~/Documents/results.csv \\
--test_duration 2m \\
--debug
''')
required = None
for agroup in parser._action_groups:
if agroup.title == "required arguments":
required = agroup
# if required is not None:
optional = None
for agroup in parser._action_groups:
if agroup.title == "optional arguments":
optional = agroup
if optional is not None:
optional.add_argument('--requests_per_ten', help='--requests_per_ten number of request per ten minutes',
default=600)
optional.add_argument('--num_tests', help='--num_tests number of tests to run. Each test runs 10 minutes',
default=1)
optional.add_argument('--url', help='--url specifies upload/download, address, and dest',
default="dl http://10.40.0.1 /dev/null")
optional.add_argument('--test_duration', help='duration of test', default="2m")
optional.add_argument('--target_per_ten',
help='--target_per_ten target number of request per ten minutes. test will check for 90 percent this value',
default=600)
optional.add_argument('--mode', help='Used to force mode of stations')
optional.add_argument('--ap', help='Used to force a connection to a particular AP')
optional.add_argument('--report_file', help='where you want to store results')
optional.add_argument('--output_format', help='choose csv or xlsx') # update once other forms are completed
optional.add_argument('--ftp', help='Use ftp for the test', action='store_true')
optional.add_argument('--test_type', help='Choose type of test to run {urls, bytes-rd, bytes-wr}',
default='bytes-rd')
optional.add_argument('--ftp_user', help='--ftp_user sets the username to be used for ftp', default=None)
optional.add_argument('--ftp_passwd', help='--ftp_user sets the password to be used for ftp', default=None)
optional.add_argument('--dest',
help='--dest specifies the destination for the file, should be used when downloading',
default="/dev/null")
optional.add_argument('--source',
help='--source specifies the source of the file, should be used when uploading',
default="/var/www/html/data_slug_4K.bin")
args = parser.parse_args()
num_sta = 2
if (args.num_stations is not None) and (int(args.num_stations) > 0):
num_stations_converted = int(args.num_stations)
num_sta = num_stations_converted
if args.report_file is None:
if args.output_format in ['csv', 'json', 'html', 'hdf', 'stata', 'pickle', 'pdf', 'parquet', 'png', 'df',
'xlsx']:
output_form = args.output_format.lower()
print("Defaulting file output placement to /home/lanforge.")
rpt_file = '/home/data.' + output_form
else:
print("Defaulting data file output type to Excel")
rpt_file = '/home/lanforge/data.xlsx'
output_form = 'xlsx'
else:
rpt_file = args.report_file
if args.output_format is None:
output_form = str(args.report_file).split('.')[-1]
else:
output_form = args.output_format
# Create directory
if args.report_file is None:
try:
homedir = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-%M")).replace(':',
'-') + 'test_l4'
path = os.path.join('/home/lanforge/report-data/', homedir)
os.mkdir(path)
except:
path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
print('Saving file to local directory')
else:
pass
if args.report_file is None:
if args.output_format in ['csv', 'json', 'html', 'hdf', 'stata', 'pickle', 'pdf', 'png', 'df', 'parquet',
'xlsx']:
rpt_file = path + '/data.' + args.output_format
output = args.output_format
else:
print('Defaulting data file output type to Excel')
rpt_file = path + '/data.xlsx'
output = 'xlsx'
else:
rpt_file = args.report_file
if args.output_format is None:
output = str(args.report_file).split('.')[-1]
else:
output = args.output_format
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta - 1, padding_number_=10000,
radio=args.radio)
ip_test = IPV4L4(host=args.mgr, port=args.mgr_port,
ssid=args.ssid,
password=args.passwd,
radio=args.radio,
upstream_port=args.upstream_port,
security=args.security,
station_list=station_list,
url=args.url,
mode=args.mode,
ap=args.ap,
ftp=args.ftp,
ftp_user=args.ftp_user,
ftp_passwd=args.ftp_passwd,
source=args.source,
dest=args.dest,
test_type=args.test_type,
_debug_on=args.debug,
test_duration=args.test_duration,
num_tests=args.num_tests,
target_requests_per_ten=args.target_per_ten,
requests_per_ten=args.requests_per_ten)
ip_test.cleanup(station_list)
ip_test.build()
ip_test.start()
try:
layer4traffic = ','.join([[*x.keys()][0] for x in ip_test.local_realm.json_get('layer4')['endpoint']])
except:
pass
ip_test.cx_profile.monitor(col_names=['name', 'bytes-rd', 'urls/s', 'bytes-wr'],
report_file=rpt_file,
duration_sec=ip_test.local_realm.parse_time(args.test_duration).total_seconds(),
created_cx=layer4traffic,
output_format=output_form,
script_name='test_l4',
arguments=args,
debug=args.debug)
ip_test.stop()
if not ip_test.passes():
print(ip_test.get_fail_message())
exit(1)
time.sleep(30)
ip_test.cleanup(station_list)
if ip_test.passes():
print("Full test passed")
if __name__ == "__main__":
main()

View File

@@ -1,14 +1,13 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# Create and modify WAN Links from the command line.
# Written by Candela Technologies Inc.
# Updated by: Erin Grimes
import sys import sys
if sys.version_info[0] != 3: if sys.version_info[0] != 3:
print("This script requires Python 3") print("This script requires Python 3")
exit(1) exit(1)
if 'py-json' not in sys.path: if 'py-json' not in sys.path:
sys.path.append('../py-json') sys.path.append('../py-json')
import argparse import argparse
from LANforge.lfcli_base import LFCliBase from LANforge.lfcli_base import LFCliBase
from LANforge.LFUtils import * from LANforge.LFUtils import *
@@ -17,50 +16,33 @@ import time
import create_wanlink import create_wanlink
class LANtoWAN(Realm): class LANtoWAN(Realm):
def __init__(self, host, port, ssid, security, password, def __init__(self, args):
lan_port="eth2", super().__init__(args['host'], args['port'])
wan_port="eth3", self.args = args
prefix='sta', self.lan_port="eth2"
number_template="00000", self.wan_port="eth3"
radio="wiphy0", # self.prefix='sta'
sta_list = [], # self.number_template="00000"
side_a_min_rate=56, side_a_max_rate=0, self.radio="wiphy0"
side_b_min_rate=56, side_b_max_rate=0, # self.sta_list = []
upstream = None, # self.side_a_min_rate=0
_debug_on=False, # self.side_a_max_rate=56
_exit_on_error=False, # self.side_b_min_rate=0
_exit_on_fail=False): # self.side_b_max_rate=56
super().__init__(host, port) self._debug_on=False
self.upstream = upstream self._exit_on_error=False
self.host = host self._exit_on_fail=False
self.port = port
self.ssid = ssid
self.radio = radio
self.sta_list = sta_list
self.security = security
self.password = password
self.timeout = 120
self.lan_port = lan_port
self.wan_port = wan_port
self.prefix = prefix
self.number_template = number_template
self.station_profile = self.new_station_profile()
self.cx_profile = self.new_l3_cx_profile()
def create_wanlinks(self, shelf=1, resource=1, max_rate=1544000):
self.cx_profile.side_a_min_bps = side_a_min_rate
self.cx_profile.side_a_max_bps = side_a_max_rate
self.cx_profile.side_b_min_bps = side_b_min_rate
self.cx_profile.side_b_max_bps = side_b_max_rate
def create_wanlinks(self, shelf=1, resource=1, latency=20, max_rate=1544000):
print("Creating wanlinks") print("Creating wanlinks")
# print("the latency is {laten}\n".format(laten=self.latency))
# create redirects for wanlink # create redirects for wanlink
url = "/cli-json/add_rdd" url = "/cli-json/add_rdd"
data = { data = {
"shelf": shelf, "shelf": shelf,
"resource": resource, "resource": resource,
"port": "rd0a", "port": "rd0a",
"peer_ifname": "rd1a" "peer_ifname": "rd1a"
} }
self.json_post(url, data) self.json_post(url, data)
@@ -69,7 +51,7 @@ class LANtoWAN(Realm):
data = { data = {
"shelf": shelf, "shelf": shelf,
"resource": resource, "resource": resource,
"port": "rd1a", "port": "rd1a",
"peer_ifname": "rd0a" "peer_ifname": "rd0a"
} }
self.json_post(url, data) self.json_post(url, data)
@@ -78,35 +60,29 @@ class LANtoWAN(Realm):
# create wanlink endpoints # create wanlink endpoints
url = "/cli-json/add_wl_endp" url = "/cli-json/add_wl_endp"
data = { data = {
"alias": "wlan0", "alias": "wlan1",
"shelf": shelf, "shelf": shelf,
"resource": resource, "resource": resource,
"port": "rd0a", "port": "rd0a",
"latency": latency, "latency": self.args['latency_A'],
"max_rate": max_rate "max_rate": self.args['rate_A']
} }
self.json_post(url, data) self.json_post(url, data)
url = "/cli-json/add_wl_endp" url = "/cli-json/add_wl_endp"
data = { data = {
"alias": "wlan1", "alias": "wlan2",
"shelf": shelf, "shelf": shelf,
"resource": resource, "resource": resource,
"port": "rd1a", "port": "rd1a",
"latency": latency, "latency": self.args['latency_B'],
"max_rate": max_rate "max_rate": self.args['rate_B']
} }
self.json_post(url, data) self.json_post(url, data)
create_wanlink.main(base_url='http://'+self.host+':8080') create_wanlink.main('http://'+self.args['host']+':8080', self.args)
time.sleep(.05)
def run(self):
#self.cx_profile.use_wpa2(True, self.ssid, self.password)
self.station_profile.create(radio="wiphy0", num_stations=3, debug=False)
def cleanup(self): pass def cleanup(self): pass
def main(): def main():
parser = LFCliBase.create_basic_argparse( parser = LFCliBase.create_basic_argparse(
prog='test_wanlink.py', prog='test_wanlink.py',
@@ -115,7 +91,6 @@ def main():
if group.title == "required arguments": if group.title == "required arguments":
required_args=group required_args=group
break break
#if required_args is not None:
optional_args=None optional_args=None
for group in parser._action_groups: for group in parser._action_groups:
@@ -123,28 +98,37 @@ def main():
optional_args=group optional_args=group
break break
if optional_args is not None: if optional_args is not None:
optional_args.add_argument('--lanport', help='Select the port you want for lanport', default='wiphy0') # optional_args.add_argument('--lanport', help='Select the port you want for lanport', default='wiphy0')
optional_args.add_argument('--wanport', help='Select the port you want for wanport', default='wiphy1') # optional_args.add_argument('--wanport', help='Select the port you want for wanport', default='wiphy1'
for group in parser._action_groups: optional_args.add_argument('--rate', help='The maximum rate of transfer at both endpoints (bits/s)', default=1000000)
if group.title == "optional arguments": optional_args.add_argument('--rate_A', help='The max rate of transfer at endpoint A (bits/s)', default=None)
optional_args=group optional_args.add_argument('--rate_B', help='The maximum rate of transfer (bits/s)', default=None)
break optional_args.add_argument('--latency', help='The delay of both ports', default=20)
#if optional_args is not None: optional_args.add_argument('--latency_A', help='The delay of port A', default=None)
args = parser.parse_args() optional_args.add_argument('--latency_B', help='The delay of port B', default=None)
num_sta=4 # todo: packet loss A and B
station_list = portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta - 1, padding_number_=10000, # todo: jitter A and B
radio=args.radio) for group in parser._action_groups:
ltw=LANtoWAN(host=args.mgr, if group.title == "optional arguments":
port=args.mgr_port, optional_args=group
ssid=args.ssid, break
sta_list=station_list, parseargs = parser.parse_args()
security=args.security, args = {
password=args.passwd, "host": parseargs.mgr,
lan_port=args.lanport, "port": parseargs.mgr_port,
wan_port=args.wanport) "ssid": parseargs.ssid,
"security": parseargs.security,
"password": parseargs.passwd,
"latency": parseargs.latency,
"latency_A": (parseargs.latency_A if parseargs.latency_A is not None else parseargs.latency),
"latency_B": (parseargs.latency_B if parseargs.latency_B is not None else parseargs.latency),
"rate": (parseargs.rate),
"rate_A": (parseargs.rate_A if parseargs.rate_A is not None else parseargs.rate),
"rate_B": (parseargs.rate_B if parseargs.rate_B is not None else parseargs.rate)
}
ltw=LANtoWAN(args)
ltw.create_wanlinks() ltw.create_wanlinks()
#ltw.run()
ltw.cleanup() ltw.cleanup()
if __name__ == "__main__": if __name__ == "__main__":
main() main()

74
py-scripts/tools/CT_US_001.bash Executable file
View File

@@ -0,0 +1,74 @@
#!/bin/bash
# This bash script creates/updates a DUT, creates/updates a chamberview scenario,
# loads and builds that scenario, runs wifi capacity test, and saves the kpi.csv info
# into influxdb. As final step, it builds a grafana dashboard for the KPI information.
set -x
# Define some common variables. This will need to be changed to match your own testbed.
# MGR is LANforge GUI machine
MGR=192.168.100.116
#MGR=localhost
# Candela internal influx
INFLUXTOKEN=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ==
INFLUX_HOST=192.168.100.201
INFLUX_BUCKET=lanforge_qa_testing
INFLUX_ORG=Candela
GRAFANATOKEN=eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ==
GRAFANA_HOST=192.168.100.201
GROUP_FILE=/tmp/lf_cv_rpt_filelocation.txt
TESTBED=CT_US-001
DUT=ASUSRT-AX88U
UPSTREAM=eth2
#LF_WAN_PORT=eth3
MGR_PORT=8080
if [ -f $HOME/influx_vars.sh ]
then
# Put private keys and other variable overrides in here.
. $HOME/influx_vars.sh
fi
# Create/update new DUT.
#Replace my arguments with your setup. Separate your ssid arguments with spaces and ensure the names are lowercase
echo "Make new DUT"
./create_chamberview_dut.py --lfmgr ${MGR} --port ${MGR_PORT} --dut_name ${DUT} \
--ssid "ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=3c:7c:3f:55:4d:64" \
--ssid "ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=3c:7c:3f:55:4d:64" \
--sw_version "asus_version" --hw_version asus11ax --serial_num 0001 --model_num 88R
# Create/update chamber view scenario and apply and build it.
# Easiest way to get these lines is to build it in the GUI and then
# copy/tweak what it shows in the 'Text Output' tab after saving and re-opening
# the scenario.
echo "Build Chamber View Scenario"
#change the lfmgr to your system, set the radio to a working radio on your LANforge system, same with the ethernet port.
./create_chamberview.py --lfmgr ${MGR} --port ${MGR_PORT} --delete_scenario \
--create_scenario ucentral-scenario \
--raw_line "profile_link 1.1 STA-AC 1 'DUT: $DUT Radio-1' NA wiphy1,AUTO -1 NA" \
--raw_line "profile_link 1.1 STA-AC 1 'DUT: $DUT Radio-1' NA wiphy3,AUTO -1 NA" \
--raw_line "profile_link 1.1 upstream-dhcp 1 NA NA $UPSTREAM,AUTO -1 NA" \
# Run capacity test on the stations created by the chamber view scenario.
# Submit the KPI data into the influxdb.
#config_name doesn't matter, change the influx_host to your LANforge device,
# NOTE: My influx token is unlucky and starts with a '-', but using the syntax below
# with '=' right after the argument keyword works as hoped.
echo "run wifi capacity test"
./lf_wifi_capacity_test.py --config_name Custom --pull_report --influx_host ${INFLUX_HOST} \
--influx_port 8086 --influx_org ${INFLUX_ORG} --influx_token=${INFLUXTOKEN} --influx_bucket ${INFLUX_BUCKET} --mgr ${MGR} \
--port ${MGR_PORT} \
--instance_name testing --upstream 1.1.$UPSTREAM --test_rig ${TESTBED} --graph_groups ${GROUP_FILE} \
--batch_size "100" --protocol "TCP-IPv4" --duration 20000 --pull_report
# Build grafana dashboard and graphs view for the KPI in the capacity test.
#echo "Adding grafana dashboard"
#./grafana_profile.py --create_custom --title ${TESTBED} --influx_bucket ${INFLUX_BUCKET} --grafana_token ${GRAFANATOKEN} \
# --grafana_host ${GRAFANA_HOST} --testbed ${TESTBED} --graph-groups ${GROUPS} --scripts Dataplane --scripts 'WiFi Capacity'
rm ${GROUP_FILE}

View File

@@ -0,0 +1,168 @@
{
"ct_us_001":{
"Notes":[
"The json is used to orchastrate the tests to be run on testbed ct_us_001",
"This json file is used as an input to the ./lf_check.py file",
"The variables that are all capitalized below are replaced with configuration",
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
"The replacement is loosely coupled so the upper and lower case convention is used",
"to identify replaced strings in the lf_check.py code."
]
},
"test_parameters":{
"test_bed": "CT-US-001",
"lf_mgr_ip": "192.168.100.116",
"lf_mgr_port": "8080",
"dut_name": "ASUSRT-AX88U",
"dut_bssid": "3c:7c:3f:55:4d:64",
"test_timeout": 200,
"load_blank_db": false,
"load_factory_default_db": true,
"load_custom_db": false,
"custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com",
"host_ip_production": "192.168.100.201",
"email_list_test": "chuck.rekiere@candelatech.com,logan.lipke@candelatech.com",
"host_ip_test": "192.168.100.201",
"email_title_txt": "Lanforge QA Testing CT-US-001",
"email_txt": "Lanforge QA Testing CT-US-001 "
},
"test_network":{
"http_test_ip": "10.40.0.10",
"ftp_test_ip": "10.40.0.10",
"test_ip": "192.168.0.104"
},
"test_generic":{
"radio_used": "wiphy1",
"ssid_used": "asus11ax-5",
"ssid_pw_used": "hello123",
"security_used": "wpa2",
"num_sta": 1,
"col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth2"
},
"test_database":{
"database_config": "True",
"database_host": "192.168.100.201",
"database_port": "8086",
"database_token": "-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ==",
"database_org": "Candela",
"database_bucket": "lanforge_qa_testing",
"dut_set_name": "DUT_NAME ASUSRT-AX88U",
"database_tag": "testbed CT-US-001",
"test_rig": "CT-US-001"
},
"test_dashboard":{
"dashboard_config": "True",
"dashboard_host": "192.168.100.201",
"dashboard_token": "eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ=="
},
"test_blog":{
"blog_config": "True",
"blog_host": "192.168.100.153",
"blog_token": "60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742",
"blog_authors": "Matthew",
"blog_customer": "candela",
"blog_user_push": "lanforge",
"blog_password_push": "lanforge",
"blog_flag": "--kpi_to_ghost"
},
"radio_dict":{
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"1","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"},
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"1","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"}
},
"test_suites":{
"suite_l3":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"skip","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"suite_wc_dp":{
"CT-US-001_create_chamberview_dut_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview_dut.py","args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=3c:7c:3f:55:4d:64'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=3c:7c:3f:55:4d:64'",
" --sw_version '3.5.4' --hw_version 5.12.14+ --serial_num ct523c-3b7b --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview.py","args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy3,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
]
},
"CT-US-001_wifi_capacity_ATH10K(9984)":{"enabled":"TRUE","load_db":"skip","command":"lf_wifi_capacity_test.py","args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan1",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_dataplane_ATH10K(9984)":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan1",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_create_chamberview_dut_2":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview_dut.py","args":"",
"args_list":[
"--lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=3c:7c:3f:55:4d:64'",
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=3c:7c:3f:55:4d:64'",
" --sw_version '3.5.4' --hw_version 5.12.14+ --serial_num ct523c-3b7b --model_num DUT_NAME"
]
},
"CT-US-001_create_chamberview_2":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview.py","args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ct-us-001-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy3,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\" "
]
},
"CT-US-001_wifi_capacity_AX210":{"enabled":"TRUE","load_db":"skip","command":"lf_wifi_capacity_test.py","args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan3",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-001_dataplane_AX210":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan3",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
}
}
}

View File

@@ -0,0 +1,226 @@
{
"ct_us_002":{
"Notes":[
"The json is used to orchastrate the tests to be run on testbed ct_us_001",
"This json file is used as an input to the ./lf_check.py file",
"The variables that are all capitalized below are replaced with configuration",
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
"The replacement is loosely coupled so the upper and lower case convention is used",
"to identify replaced strings in the lf_check.py code."
]
},
"test_parameters":{
"test_bed": "CT-US-002",
"lf_mgr_ip": "192.168.100.200",
"lf_mgr_port": "8080",
"dut_name": "Ruckus-R750",
"dut_bssid": "4c:b1:cd:18:e8:ec",
"test_timeout": 1200,
"load_blank_db": false,
"load_factory_default_db": true,
"load_custom_db": false,
"custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com",
"host_ip_production": "192.168.100.201",
"email_list_test": "chuck.rekiere@candelatech.com,logan.lipke@candelatech.com,matthew.stidham@candelatech.com",
"host_ip_test": "192.168.100.201",
"email_title_txt": "Lanforge QA Testing CT-US-002",
"email_txt": "Lanforge QA Testing CT-US-002"
},
"test_network":{
"http_test_ip": "10.40.0.10",
"ftp_test_ip": "10.40.0.10",
"test_ip": "192.168.0.104"
},
"test_generic":{
"radio_used": "wiphy1",
"ssid_used": "ruckus-r750-5g",
"ssid_pw_used": "hello123",
"security_used": "wpa2",
"num_sta": 4,
"col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth2"
},
"test_database":{
"database_config": "True",
"database_host": "192.168.100.201",
"database_port": "8086",
"database_token": "-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ==",
"database_org": "Candela",
"database_bucket": "lanforge_qa_testing",
"dut_set_name": "DUT_NAME Ruckus-R750",
"database_tag": "testbed CT-US-002",
"test_rig": "CT-US-002"
},
"test_dashboard":{
"dashboard_config": "True",
"dashboard_host": "192.168.100.201",
"dashboard_token": "eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ=="
},
"test_blog":{
"blog_config": "True",
"blog_host": "192.168.100.153",
"blog_token": "60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742",
"blog_authors": "Matthew",
"blog_customer": "candela",
"blog_user_push": "lanforge",
"blog_password_push": "lanforge",
"blog_flag": "--kpi_to_ghost"
},
"radio_dict":{
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"4","SSID":"ruckus-r750-5g","PASSWD":"hello123","SECURITY":"wpa2"},
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"4","SSID":"ruckus-r750-5g","PASSWD":"hello123","SECURITY":"wpa2"}
},
"test_suites":{
"suite_two":{
"test_l3_longevity":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==ct523c-vap,ssid_pw==ct523c-vap,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"auto_suite":{
"CT-US-002_create_chamberview_dut_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview_dut.py","args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'",
" --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'",
" --sw_version '3.5.4' --hw_version 5.12.14+ --serial_num ct523c-3b7b --model_num DUT_NAME"
]},
"CT-US-002_create_chamberview_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview.py","args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-002_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
" --instance_name ap-auto-instance --config_name test_con --upstream 1.1.eth1",
" --dut5_0 'DUT_NAME lanforge DUT_BSSID (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID (1)'",
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 0",
" --set 'Multi Band Performance' 1 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
" --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
},
"suite_wc_dp":{
"CT-US-002_create_chamberview_dut_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview_dut.py","args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'",
" --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'",
" --sw_version '3.5.4' --hw_version 5.12.14+ --serial_num ct523c-3b7b --model_num DUT_NAME"
]},
"CT-US-002_create_chamberview_1":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview.py","args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-002_wifi_capacity_ATH10k(9984)":{"enabled":"TRUE","load_db":"skip","command":"lf_wifi_capacity_test.py","args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan1",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-002_dataplane_ATH10k(9984) CT-US-002":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut DUT_NAME --duration 30s --station 1.1.wlan1",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' ",
" --raw_line 'spatial_streams: 4' --pull_report --local_lf_report_dir REPORT_PATH ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-002_create_chamberview_dut_2":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview_dut.py","args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
" --ssid 'ssid_idx=0 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'",
" --ssid 'ssid_idx=1 ssid=ruckus-r750-5g security=WPA2 password=hello123 bssid=4c:b1:cd:18:e8:ec'",
" --sw_version '3.5.4' --hw_version 5.12.14+ --serial_num ct523c-3b7b --model_num DUT_NAME"
]},
"CT-US-002_create_chamberview_2":{"enabled":"TRUE","load_db":"skip","command":"create_chamberview.py","args":"",
"args_list":[
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
" --create_scenario ucentral-scenario ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
]
},
"CT-US-002_wifi_capacity_AX200 CT-US-002":{"enabled":"TRUE","load_db":"skip","command":"lf_wifi_capacity_test.py","args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan4 ",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-002_dataplane_AX200":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
" --config_name test_con --upstream 1.1.eth2 --dut DUT_NAME --duration 30s --station 1.1.wlan4",
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
" --raw_line 'spatial_streams: 4' --pull_report --local_lf_report_dir REPORT_PATH",
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"CT-US-002_lf_ap_auto_test": {
"enabled": "TRUE",
"command": "lf_ap_auto_test.py",
"args": "",
"args_list":[
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
"--instance_name ap-auto-instance --config_name test_con --upstream 1.1.eth1",
"--dut5_0 'DUT_NAME lanforge DUT_BSSID (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID (1)'",
"--max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
"--radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
"--set 'Multi Band Performance' 1 --set 'Skip 2.4 Ghz Tests' 1 --pull_report --local_lf_report_dir REPORT_PATH",
"--test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
"--influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
]
},
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",
"args_list":[
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
" --influx_tag DATABASE_TAG "
]
}
}
}
}

View File

@@ -0,0 +1,72 @@
{
"test_parameters":{
"test_timeout": 200,
"load_blank_db": false,
"load_factory_default_db": true,
"load_custom_db": false,
"custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,chuck.rekiere@candelatech.com,",
"host_ip_production": "192.168.95.6",
"email_list_test": "chuck.rekiere@candelatech.com,chuck.rekiere@gmail.com",
"host_ip_test": "192.168.95.6",
"lf_mgr": "192.168.100.116",
"email_title_txt": "Lanforge QA Testing CT-US-001",
"email_txt": "Lanforge QA Testing CT-US-001 "
},
"test_network":{
"http_test_ip": "10.40.0.10",
"ftp_test_ip": "10.40.0.10",
"test_ip": "192.168.0.104"
},
"test_generic":{
"radio_used": "wiphy1",
"ssid_used": "asus11ax-5",
"ssid_pw_used": "hello123",
"security_used": "wpa2",
"num_sta": 4,
"col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth1"
},
"radio_dict":{
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"4","SSID":"ct523c-vap","PASSWD":"ct523c-vap","SECURITY":"wpa2"},
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"4","SSID":"ct523c-vap","PASSWD":"ct523c-vap","SECURITY":"wpa2"}
},
"test_suites":{
"suite_one":{
"create_l3":{"enabled":"TRUE","command":"create_l4.py","args":"--radio RADIO_USED --ssid SSID_USED --passwd SSID_PW_USED --security SECURITY_USED --debug"},
"create_l4":{"enabled":"TRUE","command":"create_l4.py","args":"RADIO_1_CFG --debug"},
"create_l4_2":{"enabled":"TRUE","command":"create_l4.py","args":"--radio wiphy1 --ssid ct523c-vap --passwd ct523c-vap --security wpa2 --debug"}
},
"suite_two":{
"test_l3_longevity":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth1 --radio 'radio==wiphy1,stations==4,ssid==ct523c-vap,ssid_pw==ct523c-vap,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"suite_dp_short":{
"dataplane_0":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "}
},
"suite_dp_cmd":{
"dataplane_0":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "}
},
"suite_dp_long":{
"dataplane_00":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"dataplane_01":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"dataplane_02":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"dataplane_03":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"dataplane_04":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"dataplane_05":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"dataplane_06":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"dataplane_07":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"dataplane_08":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"dataplane_09":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "},
"dataplane_10":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--mgr 192.168.100.116 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.sta0000 --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1' --pull_report --influx_host 192.168.100.201 --influx_port 8086 --influx_org Candela --influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== --influx_bucket lanforge_qa_testing --influx_tag testbed CT-US-001 "}
},
"TEST_DICTIONARY":{
"dataplane_0":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--json lf_dp.json --influx_json lf_influx_db.json"},
"dataplane_1":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--json lf_dp.json --influx_json lf_influx_db.json"},
"dataplane_2":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--json lf_dp.json --influx_json lf_influx_db.json"}
}
}
}

View File

@@ -0,0 +1,61 @@
{
"test_parameters":{
"test_timeout": 200,
"load_blank_db": false,
"load_factory_default_db": true,
"load_custom_db": false,
"custom_db": "DFLT_ETH1_GEN",
"email_list_production": "chuck.rekiere@candelatech.com",
"host_ip_production": "192.168.95.6",
"email_list_test": "chuck.rekiere@candelatech.com",
"host_ip_test": "192.168.95.6",
"lf_mgr": "192.168.100.116",
"email_title_txt": "Dataplane Test",
"email_txt": "Dataplane Regression testing "
},
"test_network":{
"http_test_ip": "10.40.0.10",
"ftp_test_ip": "10.40.0.10",
"test_ip": "192.168.0.104"
},
"test_generic":{
"radio_used": "wiphy1",
"ssid_used": "ct523c-vap",
"ssid_pw_used": "ct523c-vap",
"security_used": "wpa2",
"num_sta": 4,
"col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth1"
},
"radio_dict":{
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"4","SSID":"ct523c-vap","PASSWD":"ct523c-vap","SECURITY":"wpa2"},
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"4","SSID":"ct523c-vap","PASSWD":"ct523c-vap","SECURITY":"wpa2"}
},
"test_suites":{
"suite_one":{
"create_l3":{"enabled":"TRUE","command":"create_l4.py","args":"--radio RADIO_USED --ssid SSID_USED --passwd SSID_PW_USED --security SECURITY_USED --debug"},
"create_l4":{"enabled":"TRUE","command":"create_l4.py","args":"RADIO_1_CFG --debug"},
"create_l4_2":{"enabled":"TRUE","command":"create_l4.py","args":"--radio wiphy1 --ssid ct523c-vap --passwd ct523c-vap --security wpa2 --debug"}
},
"suite_two":{
"test_l3_longevity":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth1 --radio 'radio==wiphy1,stations==4,ssid==ct523c-vap,ssid_pw==ct523c-vap,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"suite_dp":{
"create_station":{"enabled":"TRUE","command":"create_station.py","args":"--radio RADIO_USED --start_id 2 --num_stations 1 --ssid SSID_USED --passwd SSID_PW_USED --security SECURITY_USED --debug"},
"data_plane_0":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--json lf_dp.json --influx_json lf_influx_db.json"},
"data_plane_1":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--json lf_dp.json --influx_json lf_influx_db.json"},
"data_plane_2":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--json lf_dp.json --influx_json lf_influx_db.json"}
},
"TEST_DICTIONARY":{
"create_station":{"enabled":"TRUE","command":"create_station.py","args":"--radio RADIO_USED --start_id 2 --num_stations 1 --ssid SSID_USED --passwd SSID_PW_USED --security SECURITY_USED --debug"},
"data_plane_0":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--json lf_dp.json --influx_json lf_influx_db.json"},
"data_plane_1":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--json lf_dp.json --influx_json lf_influx_db.json"},
"data_plane_2":{"enabled":"TRUE","load_db":"skip","command":"lf_dataplane_test.py","args":"--json lf_dp.json --influx_json lf_influx_db.json"},
"lf_ap_auto_test": {"enabled":"True","command":"lf_ap_auto_test.py","args": "--instance_name ap-auto-instance --config_name test_con --upstream 1.1.eth1 --dut5_0 'linksys-8450 lanforge 04:f0:21:2c:41:84 (1)' --dut2_0 'linksys-8450 lanforge 04:f0:21:2c:41:84 (1)' --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1 --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1 --set 'Multi Band Performance' 1 --set 'Skip 2.4 Ghz Tests' 1 set Stability 0 --set Multi-Station Throughput vs Pkt Size 0 --set Throughput vs Pkt Size 0 --set Capacity 0 --set Band-Steering 0 pull_report"}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,89 @@
{
"test_parameters":{
"test_timeout": 200,
"load_blank_db": false,
"load_factory_default_db": true,
"load_custom_db": false,
"custom_db": "DFLT_ETH1_GEN",
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com",
"host_ip_production": "192.168.95.6",
"email_list_test": "chuck.rekiere@candelatech.com,logan.lipke@candelatech.com",
"host_ip_test": "192.168.95.6",
"lf_mgr": "192.168.100.116",
"email_title_txt": "Lanforge QA Testing CT-US-001 - Scripts",
"email_txt": "Lanforge QA Testing CT-US-001 - Scripts"
},
"test_network":{
"http_test_ip": "10.40.0.10",
"ftp_test_ip": "10.40.0.10",
"test_ip": "192.168.0.104"
},
"test_generic":{
"radio_used": "wiphy1",
"ssid_used": "asus11ax-5",
"ssid_pw_used": "hello123",
"security_used": "wpa2",
"num_sta": 4,
"col_names": "name,tx_byptes,rx_bytes,dropped",
"upstream_port": "eth1"
},
"radio_dict":{
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"4","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"},
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"4","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"}
},
"test_suites":{
"suite_short":{
"create_l3":{"enabled":"TRUE","command":"create_l4.py","args":"--radio RADIO_USED --ssid SSID_USED --passwd SSID_PW_USED --security SECURITY_USED --debug"},
"test_l3_longevity":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth1 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"suite_l3":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"NONE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth1 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
},
"suite_daily":{
"test_l3_longevity":{"enabled":"TRUE","load_db":"NONE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth1 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"},
"example_security_connection0":{"enabled":"TRUE","command":"example_security_connection.py","args":"--num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"},
"example_security_connection1":{"enabled":"TRUE","command":"example_security_connection.py","args":"--num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"},
"example_security_connection2":{"enabled":"TRUE","command":"example_security_connection.py","args":"--num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"},
"example_security_connection3":{"enabled":"TRUE","command":"example_security_connection.py","args":"--num_stations 4 --ssid asus11ax-5 --passwd hello123 --radio wiphy1 --security wpa2 --debug"},
"sta_connect2":{"enabled":"TRUE","command":"sta_connect2.py","args":"--dut_ssid asus11ax-5 --dut_passwd hello123 --dut_security wpa2"},
"sta_connect_example":{"enabled":"FALSE","command":"sta_connect_example.py","args":""},
"test_fileio":{"enabled":"TRUE","command":"test_fileio.py","args":"--macvlan_parent eth2 --num_ports 3 --use_macvlans --first_mvlan_ip 192.168.92.13 --netmask 255.255.255.0 --test_duration 30s --gateway 192.168.92.1"},
"test_generic0":{"enabled":"FALSE","command":"test_generic.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type lfping --dest 10.40.0.1 --debug"},
"test_generic1":{"enabled":"FALSE","command":"test_generic.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type speedtest --speedtest_min_up 20 --speedtest_min_dl 20 --speedtest_max_ping 150 --security wpa2 --debug"},
"test_generic2":{"enabled":"FALSE","command":"test_generic.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type iperf3 --debug"},
"test_generic3":{"enabled":"FALSE","command":"test_generic.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --num_stations 4 --type lfcurl --dest 10.40.0.1 --file_output /home/lanforge/Documents/lfcurl_output.txt --debug"},
"testgroup":{"enabled":"FALSE","command":"testgroup.py","args":"--group_name group1 --add_group --list_groups --debug"},
"testgroup5":{"enabled":"FALSE","command":"testgroup.py","args":"--num_stations 4 --ssid lanforge --passwd password --security wpa2 --radio wiphy0 --group_name group0 --add_group"},
"test_ip_connection-ipv4":{"enabled":"TRUE","command":"test_ip_connection.py","args":"--radio wiphy1 --num_stations 4 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"test_ip_variable_time0-ipv4":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format excel --layer3_cols name,tx_bytes,rx_bytes,dropped --traffic_type lf_udp --debug"},
"test_ip_variable_time1-ipv4":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format csv --layer3_cols name,tx_bytes,rx_bytes,dropped --traffic_type lf_udp --debug"},
"test_ip_connection-ipv6":{"enabled":"FALSE","command":"test_ip_connection.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --ipv6 --debug"},
"test_ip_variable_time0-ipv6":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format excel --layer3_cols name,tx_bytes,rx_bytes,dropped --ipv6 --traffic_type lf_udp --debug"},
"test_ip_variable_time1-ipv6":{"enabled":"TRUE","command":"test_ip_variable_time.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --test_duration 15s --output_format csv --layer3_cols name,tx_bytes,rx_bytes,dropped --ipv6 --traffic_type lf_udp --debug"},
"test_l4_bytes-rd":{"enabled":"TRUE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --test_type bytes-rd --test_duration 15s --url 'dl http://10.40.0.1 /dev/null' --debug"},
"test_l4_bytes-wr":{"enabled":"FALSE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --test_type bytes-wr --test_duration 15s --url 'ul http://10.40.0.1' --debug"},
"test_l4_urls_s":{"enabled":"TRUE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --test_type urls --test_duration 15s --requests_per_ten 600 --target_per_ten 600 --url 'dl http://10.40.0.1 /dev/null' --debug"},
"test_l4_ftp_bytes-rd":{"enabled":"TRUE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type bytes-rd --test_duration 15s --url 'dl ftp://10.40.0.1 /dev/null' --debug"},
"test_l4_ftp_bytes-wr":{"enabled":"FALSE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type bytes-wr --test_duration 15s --url 'ul ftp://10.40.0.1' --debug"},
"test_l4_ftp_urls_s":{"enabled":"TRUE","command":"test_l4.py","args":"--radio wiphy1 --num_stations 4 --security wpa2 --ssid asus11ax-5 --passwd hello123 --ftp --test_type urls --requests_per_ten 600 --target_per_ten 600 --test_duration 15s --url 'dl ftp://10.40.0.1 /dev/null' --debug"},
"test_l3_longevity_1":{"enabled":"TRUE","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth1 --radio 'radio==wiphy0,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"},
"test_l3_powersave_traffic":{"enabled":"TRUE","command":"test_l3_powersave_traffic.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"test_status_msg":{"enabled":"TRUE","command":"test_status_msg.py","args":"--action run_test"},
"test_wanlink":{"enabled":"TRUE","command":"test_wanlink.py","args":"--debug"},
"create_bridge":{"enabled":"TRUE","command":"create_bridge.py","args":"--radio wiphy1 --upstream_port eth1 --target_device sta0000 --debug"},
"create_l3":{"enabled":"TRUE","command":"create_l3.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_l4":{"enabled":"TRUE","command":"create_l4.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_macvlan":{"enabled":"TRUE","command":"create_macvlan.py","args":"--radio wiphy1 --macvlan_parent eth1 --debug"},
"create_station":{"enabled":"TRUE","command":"create_station.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_vap":{"enabled":"TRUE","command":"create_vap.py","args":"--radio wiphy1 --ssid asus11ax-5 --passwd hello123 --security wpa2 --debug"},
"create_qvlan":{"enabled":"TRUE","command":"create_qvlan.py","args":"--radio wiphy1 --qvlan_parent eth1"},
"wlan_capacity_calculator1":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11abg -t Voice -p 48 -m 106 -e WEP -q Yes -b 1 2 5.5 11 -pre Long -s N/A -co G.711 -r Yes -c Yes"},
"wlan_capacity_calculator2":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11n -t Voice -d 17 -ch 40 -gu 800 -high 9 -e WEP -q Yes -ip 5 -mc 42 -b 6 9 12 24 -m 1538 -co G.729 -pl Greenfield -cw 15 -r Yes -c Yes"},
"wlan_capacity_calculator3":{"enabled":"TRUE","command":"./wlan_capacity_calculator.py","args":"-sta 11ac -t Voice -d 9 -spa 3 -ch 20 -gu 800 -high 1 -e TKIP -q Yes -ip 3 -mc 0 -b 6 12 24 54 -m 1518 -co Greenfield -cw 15 -rc Yes"}
}
}
}

View File

@@ -58,7 +58,7 @@ RADIO_DICT: {
"RADIO_4_CFG":{"KEY":"RADIO_4_CFG","RADIO":"wiphy1","STATIONS":"4","SSID":"ssid-wpa3","PASSWD":"ssidpw-wpa3","SECURITY":"wpa3"} "RADIO_4_CFG":{"KEY":"RADIO_4_CFG","RADIO":"wiphy1","STATIONS":"4","SSID":"ssid-wpa3","PASSWD":"ssidpw-wpa3","SECURITY":"wpa3"}
} }
# Not used
[LF_MGR] [LF_MGR]
LF_MGR_IP= localhost LF_MGR_IP= localhost
LF_MGR_PORT=8080 LF_MGR_PORT=8080

View File

@@ -14,12 +14,18 @@ import subprocess
def main(): def main():
print("Installing Script Python3 Dependencies") print("Installing Script Python3 Dependencies")
packages = ['pandas', 'plotly', 'numpy', 'cryptography', 'paramiko', 'bokeh','pyarrow', 'websocket-client', 'xlsxwriter',\ packages = ['pandas', 'plotly', 'numpy', 'cryptography', 'paramiko', 'bokeh','pyarrow', 'websocket-client', 'xlsxwriter',\
'pyshark', 'influxdb', 'influxdb-client', 'matplotlib', 'pdfkit', 'pip-search', 'pyserial', 'pexpect-serial' ,'scp'] 'pyshark', 'influxdb', 'influxdb-client', 'matplotlib', 'pdfkit', 'pip-search', 'pyserial', 'pexpect-serial' ,'scp', 'pyjwt']
packages_installed = [] packages_installed = []
packages_failed =[] packages_failed =[]
subprocess.call("pip3 uninstall jwt", shell=True)
for package in packages: for package in packages:
<<<<<<< HEAD
command = "pip3 install {} ".format(package) command = "pip3 install {} ".format(package)
res = subprocess.call(command, shell = True) res = subprocess.call(command, shell = True)
=======
command = "pip3 install {} >/tmp/pip3-stdout 2>/tmp/pip3-stderr".format(package)
res = subprocess.call(command, shell=True)
>>>>>>> 0ef021e1165cbaa612e5128bc48d6abfbb7b887b
if res == 0: if res == 0:
print("Package {} install SUCCESS Returned Value: {} ".format(package, res)) print("Package {} install SUCCESS Returned Value: {} ".format(package, res))
packages_installed.append(package) packages_installed.append(package)

View File

@@ -5,11 +5,11 @@
# into influxdb. As final step, it builds a grafana dashboard for the KPI information. # into influxdb. As final step, it builds a grafana dashboard for the KPI information.
# Define some common variables. This will need to be changed to match your own testbed. # Define some common variables. This will need to be changed to match your own testbed.
MGR=10.0.0.202 MGR=192.168.93.51
INFLUX_MGR=192.168.100.201 INFLUX_MGR=192.168.100.201
#INFLUXTOKEN=Tdxwq5KRbj1oNbZ_ErPL5tw_HUH2wJ1VR4dwZNugJ-APz__mEFIwnqHZdoobmQpt2fa1VdWMlHQClR8XNotwbg== #INFLUXTOKEN=Tdxwq5KRbj1oNbZ_ErPL5tw_HUH2wJ1VR4dwZNugJ-APz__mEFIwnqHZdoobmQpt2fa1VdWMlHQClR8XNotwbg==
INFLUXTOKEN=31N9QDhjJHBu4eMUlMBwbK3sOjXLRAhZuCzZGeO8WVCj-xvR8gZWWvRHOcuw-5RHeB7xBFnLs7ZV023k4koR1A== INFLUXTOKEN=31N9QDhjJHBu4eMUlMBwbK3sOjXLRAhZuCzZGeO8WVCj-xvR8gZWWvRHOcuw-5RHeB7xBFnLs7ZV023k4koR1A==
TESTBED=Stidmatt-01 TESTBED=Heather
INFLUXBUCKET=stidmatt INFLUXBUCKET=stidmatt
#GRAFANATOKEN=eyJrIjoiZTJwZkZlemhLQVNpY3hiemRjUkNBZ3k2RWc3bWpQWEkiLCJuIjoibWFzdGVyIiwiaWQiOjF9 #GRAFANATOKEN=eyJrIjoiZTJwZkZlemhLQVNpY3hiemRjUkNBZ3k2RWc3bWpQWEkiLCJuIjoibWFzdGVyIiwiaWQiOjF9
GRAFANATOKEN=eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ== GRAFANATOKEN=eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ==
@@ -37,18 +37,25 @@ echo "Build Chamber View Scenario"
echo "run wifi capacity test" echo "run wifi capacity test"
./lf_wifi_capacity_test.py --config_name Custom --create_stations --radio wiphy1 --pull_report --influx_host ${INFLUX_MGR} \ ./lf_wifi_capacity_test.py --config_name Custom --create_stations --radio wiphy1 --pull_report --influx_host ${INFLUX_MGR} \
--influx_port 8086 --influx_org Candela --influx_token ${INFLUXTOKEN} --influx_bucket ${INFLUXBUCKET} --mgr ${MGR} \ --influx_port 8086 --influx_org Candela --influx_token ${INFLUXTOKEN} --influx_bucket ${INFLUXBUCKET} --mgr ${MGR} \
--instance_name testing --upstream eth1 --test_rig ${TESTBED} --graph_groups lf_cv_rpt_filelocation.txt --duration 15s --instance_name testing --upstream eth1 --test_rig ${TESTBED} --graph_groups lf_cv_rpt_filelocation.txt --duration 15s --local_lf_report_dir ${REPORT_PATH}
#config_name doesn't matter, change the influx_host to your LANforge device, #config_name doesn't matter, change the influx_host to your LANforge device,
echo "run Dataplane test" echo "run Dataplane test"
./lf_dataplane_test.py --mgr ${MGR} --instance_name dataplane-instance --config_name test_config --upstream 1.1.eth1 \ ./lf_dataplane_test.py --mgr ${MGR} --instance_name dataplane-instance --config_name test_config --upstream 1.1.eth1 \
--station 1.1.06 --dut linksys-8450 --influx_host ${INFLUX_MGR} --influx_port 8086 --influx_org Candela --influx_token ${INFLUXTOKEN} \ --station 1.1.06 --dut linksys-8450 --influx_host ${INFLUX_MGR} --influx_port 8086 --influx_org Candela --influx_token ${INFLUXTOKEN} \
--influx_bucket ${INFLUXBUCKET} --influx_tag testbed ${TESTBED} --graph_groups lf_cv_rpt_filelocation.txt --duration 15s --influx_bucket ${INFLUXBUCKET} --influx_tag testbed ${TESTBED} --graph_groups lf_cv_rpt_filelocation.txt --duration 15s --pull_report --local_lf_report_dir ${REPORT_PATH}
# Build grafana dashboard and graphs view for the KPI in the capacity test. # Build grafana dashboard and graphs view for the KPI in the capacity test.
./grafana_profile.py --create_custom --title ${TESTBED} --influx_bucket ${INFLUXBUCKET} --mgr ${MGR} --grafana_token \ #./grafana_profile.py --create_custom --title ${TESTBED} --influx_bucket ${INFLUXBUCKET} --mgr ${MGR} --grafana_token \
${GRAFANATOKEN} --grafana_host ${INFLUX_MGR} --testbed ${TESTBED} --graph_groups_file lf_cv_rpt_filelocation.txt \ #${GRAFANATOKEN} --grafana_host ${INFLUX_MGR} --testbed ${TESTBED} --graph_groups_file lf_cv_rpt_filelocation.txt \
--scripts Dataplane --datasource 'InfluxDB stidmatt bucket' #--scripts Dataplane --datasource 'InfluxDB stidmatt bucket'
./ghost_profile.py --ghost_token ${GHOST_TOKEN} --ghost_host ${GHOST_MGR} --authors ${AUTHOR} --customer ${CUSTOMER} \
--user_push ${USER_PUSH} --password_push ${PASSWORD_PUSH} --kpi_to_ghost --grafana_token ${GRAFANATOKEN} --grafana_host ${INFLUX_MGR} \
--grafana_bucket ${INFLUXBUCKET} --influx_host ${INFLUX_MGR} --influx_org Candela --influx_token ${INFLUXTOKEN} \
--influx_bucket ${INFLUXBUCKET} --parent_folder ${REPORT_PATH}
rm lf_cv_rpt_filelocation.txt rm lf_cv_rpt_filelocation.txt

View File

@@ -0,0 +1,51 @@
#!/bin/bash
INFLUX_TOKEN=31N9QDhjJHBu4eMUlMBwbK3sOjXLRAhZuCzZGeO8WVCj-xvR8gZWWvRHOcuw-5RHeB7xBFnLs7ZV023k4koR1A==
INFLUX_HOST=c7-grafana.candelatech.com
INFLUX_BUCKET=stidmatt
GRAFANA_TOKEN=eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ==
GHOST_TOKEN=60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742
SSID=lanforge
PASSWORD=password
SECURITY=wpa2
TEST_RIG=ct523c-ccbc
DUT_NAME=linksys-8450
MGR=192.168.1.7
RADIO=wiphy1
UPSTREAM=1.1.eth1
NOW=$(date +"%Y-%m-%d-%H-%M")
mkdir ${NOW}
#./scenario.py --mgr ${MGR} --load BLANK
#./create_l3.py --mgr ${MGR} --radio ${RADIO} --ssid ${SSID} --password ${PASSWORD} --security ${SECURITY}
# Create/update new DUT.
#Replace my arguments with your setup. Separate your ssid arguments with spaces and ensure the names are lowercase
echo "Make new DUT"
./create_chamberview_dut.py --lfmgr ${MGR} --dut_name DUT_TO_GRAFANA_DUT \
--ssid "ssid_idx=0 ssid=lanforge security=WPA2 password=password bssid=04:f0:21:2c:41:84"
# Create/update chamber view scenario and apply and build it.
echo "Build Chamber View Scenario"
#change the lfmgr to your system, set the radio to a working radio on your LANforge system, same with the ethernet port.
./create_chamberview.py --lfmgr ${MGR} --create_scenario DUT_TO_GRAFANA_SCENARIO \
--line "Resource=1.1 Profile=default Amount=4 Uses-1=wiphy1 DUT=DUT_TO_GRAFANA_DUT Traffic=wiphy1 Freq=-1" \
--line "Resource=1.1 Profile=upstream Amount=1 Uses-1=eth1 DUT=DUT_TO_GRAFANA_DUT Traffic=eth1 Freq=-1"
./lf_wifi_capacity_test.py --mgr 192.168.1.7 --lf_user lanforge --lf_password lanforge --instance_name ${DUT_NAME} \
--config_name wifi_config --upstream ${UPSTREAM} --radio wiphy0 --ssid ${SSID} --paswd ${PASSWORD} --security ${SECURITY} \
--influx_host ${INFLUX_HOST} --influx_org Candela --influx_token ${INFLUX_TOKEN} --influx_bucket ${INFLUX_BUCKET} \
--test_rig ${TEST_RIG} --influx_tag testbed ${TEST_RIG} --set DUT_NAME ${DUT_NAME} \
--local_lf_report_dir /home/matthew/Documents/candela/lanforge-scripts/py-scripts/${NOW}
./lf_dataplane_test.py --mgr 192.168.1.7 --lf_user lanforge --lf_password lanforge --instance_name wct_instance \
--config_name wifi_config --upstream ${UPSTREAM} --influx_host ${INFLUX_HOST} --influx_org Candela \
--influx_token ${INFLUX_TOKEN} --influx_bucket ${INFLUX_BUCKET} --test_rig ${TEST_RIG} --influx_tag testbed ${TEST_RIG} \
--station 1.1.sta00000 --raw_line 'traffic_types: UDP;TCP' --set DUT_NAME ${DUT_NAME} \
--local_lf_report_dir /home/matthew/Documents/candela/lanforge-scripts/py-scripts/${NOW} --pull_report
./ghost_profile.py --ghost_token ${GHOST_TOKEN} --ghost_host v-centos8s.candelatech.com --authors Matthew --customer candela \
--user_push lanforge --password_push lanforge --kpi_to_ghost --grafana_token ${GRAFANA_TOKEN} \
--grafana_host c7-grafana.candelatech.com --grafana_bucket lanforge_qa_testing \
--parent_folder /home/matthew/Documents/candela/lanforge-scripts/py-scripts/${NOW}