old lanforge-scripts test

Signed-off-by: shivamcandela <shivam.thakur@candelatech.com>
This commit is contained in:
shivamcandela
2021-06-21 00:20:38 +05:30
parent c777ea7b7f
commit e1474f11cf
38 changed files with 388 additions and 13495 deletions

View File

@@ -1,334 +0,0 @@
#!/usr/bin/env python3
"""
Note: To Run this script gui should be opened with
path: cd LANforgeGUI_5.4.3 (5.4.3 can be changed with GUI version)
pwd (Output : /home/lanforge/LANforgeGUI_5.4.3)
./lfclient.bash -cli-socket 3990
This script is used to automate running Dataplane tests. You
may need to view a Dataplane test configured through the GUI to understand
the options and how best to input data.
./lf_dataplane_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \
--instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 \
--dut linksys-8450 --duration 15s --station 1.1.sta01500 \
--download_speed 85% --upload_speed 0 \
--raw_line 'pkts: Custom;60;142;256;512;1024;MTU' \
--raw_line 'cust_pkt_sz: 88 1200' \
--raw_line 'directions: DUT Transmit;DUT Receive' \
--raw_line 'traffic_types: UDP;TCP' \
--test_rig Testbed-01 --pull_report \
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \
--influx_bucket ben \
--influx_tag testbed Ferndale-01
Note:
--raw_line 'line contents' will add any setting to the test config. This is
useful way to support any options not specifically enabled by the
command options.
--set modifications will be applied after the other config has happened,
so it can be used to override any other config.
Example of raw text config for Dataplane, to show other possible options:
show_events: 1
show_log: 0
port_sorting: 0
kpi_id: Dataplane Pkt-Size
notes0: ec5211 in bridge mode, wpa2 auth.
bg: 0xE0ECF8
test_rig:
show_scan: 1
auto_helper: 0
skip_2: 0
skip_5: 0
skip_5b: 1
skip_dual: 0
skip_tri: 1
selected_dut: ea8300
duration: 15000
traffic_port: 1.1.157 sta01500
upstream_port: 1.1.2 eth2
path_loss: 10
speed: 85%
speed2: 0Kbps
min_rssi_bound: -150
max_rssi_bound: 0
channels: AUTO
modes: Auto
pkts: Custom;60;142;256;512;1024;MTU
spatial_streams: AUTO
security_options: AUTO
bandw_options: AUTO
traffic_types: UDP;TCP
directions: DUT Transmit;DUT Receive
txo_preamble: OFDM
txo_mcs: 0 CCK, OFDM, HT, VHT
txo_retries: No Retry
txo_sgi: OFF
txo_txpower: 15
attenuator: 0
attenuator2: 0
attenuator_mod: 255
attenuator_mod2: 255
attenuations: 0..+50..950
attenuations2: 0..+50..950
chamber: 0
tt_deg: 0..+45..359
cust_pkt_sz: 88 1200
show_bar_labels: 1
show_prcnt_tput: 0
show_3s: 0
show_ll_graphs: 0
show_gp_graphs: 1
show_1m: 1
pause_iter: 0
outer_loop_atten: 0
show_realtime: 1
operator:
mconn: 1
mpkt: 1000
tos: 0
loop_iterations: 1
"""
import sys
import os
import argparse
import time
import json
from os import path
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
from cv_test_manager import cv_test
from cv_test_manager import *
class DataplaneTest(cv_test):
def __init__(self,
lf_host="localhost",
lf_port=8080,
lf_user="lanforge",
lf_password="lanforge",
ssh_port=22,
local_path="",
instance_name="dpt_instance",
config_name="dpt_config",
upstream="1.1.eth2",
pull_report=False,
load_old_cfg=False,
upload_speed="0",
download_speed="85%",
duration="15s",
station="1.1.sta01500",
dut="NA",
enables=[],
disables=[],
raw_lines=[],
raw_lines_file="",
sets=[],
graph_groups=None,
report_dir=""
):
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
self.lf_host = lf_host
self.lf_port = lf_port
self.lf_user = lf_user
self.lf_password = lf_password
self.instance_name = instance_name
self.config_name = config_name
self.dut = dut
self.duration = duration
self.upstream = upstream
self.station = station
self.pull_report = pull_report
self.load_old_cfg = load_old_cfg
self.test_name = "Dataplane"
self.upload_speed = upload_speed
self.download_speed = download_speed
self.enables = enables
self.disables = disables
self.raw_lines = raw_lines
self.raw_lines_file = raw_lines_file
self.sets = sets
self.graph_groups = graph_groups
self.report_dir = report_dir
self.ssh_port = ssh_port
self.local_path = local_path
def setup(self):
# Nothing to do at this time.
return
def run(self):
self.sync_cv()
time.sleep(2)
self.sync_cv()
blob_test = "dataplane-test-latest-"
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
self.show_text_blob(None, None, False)
# Test related settings
cfg_options = []
### HERE###
self.apply_cfg_options(cfg_options, self.enables, self.disables, self.raw_lines, self.raw_lines_file)
# cmd line args take precedence and so come last in the cfg array.
if self.upstream != "":
cfg_options.append("upstream_port: " + self.upstream)
if self.station != "":
cfg_options.append("traffic_port: " + self.station)
if self.download_speed != "":
cfg_options.append("speed: " + self.download_speed)
if self.upload_speed != "":
cfg_options.append("speed2: " + self.upload_speed)
if self.duration != "":
cfg_options.append("duration: " + self.duration)
if self.dut != "":
cfg_options.append("selected_dut: " + self.dut)
# We deleted the scenario earlier, now re-build new one line at a time.
self.build_cfg(self.config_name, blob_test, cfg_options)
cv_cmds = []
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets,
self.pull_report, self.lf_host, self.lf_user, self.lf_password,
cv_cmds, ssh_port=self.ssh_port, local_path=self.local_path,
graph_groups_file=self.graph_groups)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
def main():
parser = argparse.ArgumentParser("""
Open this file in an editor and read the top notes for more details.
Example:
./lf_dataplane_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \
--instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 \
--dut linksys-8450 --duration 15s --station 1.1.sta01500 \
--download_speed 85% --upload_speed 0 \
--raw_line 'pkts: Custom;60;142;256;512;1024;MTU' \
--raw_line 'cust_pkt_sz: 88 1200' \
--raw_line 'directions: DUT Transmit;DUT Receive' \
--raw_line 'traffic_types: UDP;TCP' \
--test_rig Testbed-01 --pull_report \
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \
--influx_bucket ben \
--influx_tag testbed Ferndale-01
"""
)
cv_add_base_parser(parser) # see cv_test_manager.py
parser.add_argument('--json', help="--json <config.json> json input file", default="")
parser.add_argument("-u", "--upstream", type=str, default="",
help="Upstream port for wifi capacity test ex. 1.1.eth2")
parser.add_argument("--station", type=str, default="",
help="Station to be used in this test, example: 1.1.sta01500")
parser.add_argument("--dut", default="",
help="Specify DUT used by this test, example: linksys-8450")
parser.add_argument("--download_speed", default="",
help="Specify requested download speed. Percentage of theoretical is also supported. Default: 85%")
parser.add_argument("--upload_speed", default="",
help="Specify requested upload speed. Percentage of theoretical is also supported. Default: 0")
parser.add_argument("--duration", default="",
help="Specify duration of each traffic run")
parser.add_argument("--graph_groups", help="File to save graph_groups to", default=None)
parser.add_argument("--report_dir", default="")
args = parser.parse_args()
# TODO
if args.json != "":
try:
with open(args.json, 'r') as json_config:
json_data = json.load(json_config)
except:
print("Error reading {}".format(args.json))
# json configuation takes presidence to command line
# TODO see if there is easier way to search presence, look at parser args
if "mgr" in json_data:
args.mgr = json_data["mgr"]
if "port" in json_data:
args.port = json_data["port"]
if "lf_user" in json_data:
args.lf_user = json_data["lf_user"]
if "lf_password" in json_data:
args.lf_password = json_data["lf_password"]
if "instance_name" in json_data:
args.instance_name = json_data["instance_name"]
if "config_name" in json_data:
args.config_name = json_data["config_name"]
if "upstream" in json_data:
args.upstream = json_data["upstream"]
if "dut" in json_data:
args.dut = json_data["dut"]
if "duration" in json_data:
args.duration = json_data["duration"]
if "station" in json_data:
args.station = json_data["station"]
if "download_speed" in json_data:
args.download_speed = json_data["download_speed"]
if "upload_speed" in json_data:
args.upload_speed = json_data["upload_speed"]
if "raw_line" in json_data:
# the json_data is a list , need to make into a list of lists, to match command line raw_line paramaters
# https://www.tutorialspoint.com/convert-list-into-list-of-lists-in-python
json_data_tmp = [[x] for x in json_data["raw_line"]]
args.raw_line = json_data_tmp
cv_base_adjust_parser(args)
print(args)
#exit(1)
# if json present use json config will override
CV_Test = DataplaneTest(lf_host = args.mgr,
lf_port = args.port,
lf_user = args.lf_user,
lf_password = args.lf_password,
instance_name = args.instance_name,
config_name = args.config_name,
upstream = args.upstream,
pull_report = args.pull_report,
load_old_cfg = args.load_old_cfg,
download_speed = args.download_speed,
upload_speed = args.upload_speed,
duration = args.duration,
dut = args.dut,
station = args.station,
enables = args.enable,
disables = args.disable,
raw_lines = args.raw_line, # this is interesting.
raw_lines_file = args.raw_lines_file,
sets = args.set,
graph_groups = args.graph_groups
)
CV_Test.setup()
CV_Test.run()
CV_Test.check_influx_kpi(args)
if __name__ == "__main__":
main()

View File

@@ -1,146 +0,0 @@
#!/usr/bin/python3
'''
NAME:
lf_pdf_search.py
PURPOSE:
lf_pdf_search.py will run a pdf grep looking for specific information in pdf files
"pdfgrep -r --include 'ASA*.pdf' 'ASA End Date'"
EXAMPLE:
lf_pdf_search.py
NOTES:
1. copy lf_pdf_search.py to a directory that has the pdf information
TO DO NOTES:
'''
import datetime
import pprint
import sys
if sys.version_info[0] != 3:
print("This script requires Python3")
exit()
import os
import socket
import logging
import time
from time import sleep
import argparse
import json
import configparser
import subprocess
import csv
import shutil
import os.path
import xlsxwriter
import re
import pandas as pd
class lf_pdf_search():
def __init__(self):
self.renewal_info = ""
self.timeout = 10
self.outfile = "pdf_search"
self.result = ""
self.stdout_log_txt = ""
self.stdout_log = ""
self.stderr_log_txt = ""
self.stderr_log = ""
self.processed_log_txt = ""
self.dataframe = ""
self.pdf_search_csv = ""
def get_data(self):
# o.k. a little over kill here , just save data to file to help debug if something goes wrong
if self.outfile is not None:
self.stdout_log_txt = self.outfile
self.stdout_log_txt = self.stdout_log_txt + "-{}-stdout.txt".format("test")
self.stdout_log = open(self.stdout_log_txt, 'w+')
self.stderr_log_txt = self.outfile
self.stderr_log_txt = self.stderr_log_txt + "-{}-stderr.txt".format("test")
#self.logger.info("stderr_log_txt: {}".format(stderr_log_txt))
self.stderr_log = open(self.stderr_log_txt, 'w+')
print("Names {} {}".format(self.stdout_log.name, self.stderr_log.name))
# have ability to pass in a specific command
command = "pdfgrep -r --include 'ASA*.pdf' 'ASA End Date'"
print("running {}".format(command))
process = subprocess.Popen(['pdfgrep','-r','--include','ASA*.pdf','ASA End Date'], shell=False, stdout=self.stdout_log, stderr=self.stderr_log, universal_newlines=True)
try:
process.wait(timeout=int(self.timeout))
self.result = "SUCCESS"
except subprocess.TimeoutExpired:
process.terminate()
self.result = "TIMEOUT"
self.stdout_log.close()
self.stderr_log.close()
return self.stdout_log_txt
def preprocess_data(self):
pass
# this method uses pandas dataframe - will use for data manipulation,
# the data mainupulation may be done in other manners
def datafile_to_dataframe(self):
# note the error_bad_lines=False will miss one of the lines
delimiter_list = [':']
try:
self.dataframe = pd.read_csv(self.stdout_log_txt, delimiter = [':'])
#self.dataframe = pd.read_csv(self.stdout_log_txt, sep = ':')
except:
print("one of the files may have a SN: in it need to correct ")
self.dataframe = pd.read_csv(self.stdout_log_txt, delimiter = ':', error_bad_lines=False)
#print(self.dataframe)
print("saving data to .csv")
# this removes the extention of .txt
self.pdf_search_csv= self.stdout_log_txt[:-4]
self.pdf_search_csv = self.pdf_search_csv + ".csv"
self.pdf_search_csv = self.dataframe.to_csv(self.pdf_search_csv,mode='w',index=False)
def main():
# arguments
parser = argparse.ArgumentParser(
prog='lf_pdf_search.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
lf_pdf_search.py : for running scripts listed in lf_check_config.ini file
''',
description='''\
lf_pdf_search.py
-----------
Summary :
---------
show renewas
''')
parser.add_argument('--outfile', help="--outfile <Output Generic Name> used as base name for all files generated", default="")
parser.add_argument('--logfile', help="--logfile <logfile Name> logging for output of lf_pdf_search script", default="lf_pdf_search.log")
args = parser.parse_args()
pdf_search = lf_pdf_search()
output_file = pdf_search.get_data()
pdf_search.datafile_to_dataframe()
print("output file: {}".format(str(output_file)))
print("END lf_pdf_search.py")
if __name__ == "__main__":
main()

View File

@@ -1,118 +0,0 @@
#!/usr/bin/python3
'''
NAME:
lf_read_json.py
PURPOSE:
Test out reading configuration data from a .json style config file
EXAMPLE:
./lf_read_json.py --file <name>.json
NOTES:
TO DO NOTES:
'''
import sys
if sys.version_info[0] != 3:
print("This script requires Python3")
exit()
from time import sleep
import argparse
import json
class lf_read_json():
def __init__(self):
self.timeout = 10
def preprocess_data(self):
pass
def main():
# arguments
parser = argparse.ArgumentParser(
prog='lf_read_json.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
lf_read_json.py : read json
''',
description='''\
lf_read_json.py
-----------
Summary :
---------
./lf_dataplane_json.py --mgr 192.168.0.101 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth1 --dut asus_5g --duration 15s --station 1.1.13.sta0002 --download_speed 85% --upload_speed 0 --raw_line 'pkts: Custom;60;MTU' --raw_line 'cust_pkt_sz: 88 1200' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1
''')
parser.add_argument('--json', help="--json <config.json> json input file", default="config.json")
args = parser.parse_args()
config_json = args.json
print("config_json {}".format(config_json))
with open(config_json, 'r') as config_file:
config_data = json.load(config_file)
print(config_data)
print("mgr: {}".format(config_data["mgr"]))
#print("raw_line: {}".format(config_data["raw_line"]))
raw = []
raw = config_data["raw_line"]
print(raw)
# raw is a list
raw2 = [[x] for x in raw]
print(raw2)
'''
for r in raw_lines:
cfg_options.append(r[0])
'''
'''./lf_dataplane_json.py --mgr 192.168.0.101 --port 8080 --lf_user lanforge --lf_password lanforge --instance_name dataplane-instance --config_name test_con --upstream 1.1.eth1 --dut asus_5g --duration 15s --station 1.1.13.sta0002 --download_speed 85% --upload_speed 0 --raw_line 'pkts: Custom;60;MTU' --raw_line 'cust_pkt_sz: 88 1200' --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20' --raw_line 'spatial_streams: 1'
Namespace(config_name='test_con', disable=[], download_speed='85%', duration='15s', dut='asus_5g', enable=[], graph_groups=None, influx_bucket=None, influx_host=None, influx_org=None, influx_port=8086, influx_tag=[], influx_token=None, instance_name='dataplane-instance', json='', lf_password='lanforge', lf_user='lanforge', load_old_cfg=False, mgr='192.168.0.101', port=8080, pull_report=False,
correct version:
raw_line=[['pkts: Custom;60;MTU'], ['cust_pkt_sz: 88 1200'], ['directions: DUT Transmit'], ['traffic_types: UDP'], ['bandw_options: 20'], ['spatial_streams: 1']], raw_lines_file='', report_dir='', set=[], station='1.1.13.sta0002', test_rig='', upload_speed='0', upstream='1.1.eth1')
'''
''' Incorrect version
raw_line={'pkts': ['Custom', '60', 'MTU'], 'cust_pkt_sz': ['88', '1200'], 'directions': 'DUT Transmit', 'traffic_types': 'UDP', 'bandw_options': '20', 'stpatial_streams': '1'}
'''
'''cfg_options = []
for r in raw:
print(r)
test = '{}:{}'.format(r,raw[r])
cfg_options.append(test)
print(cfg_options)
'''
#dave = []
#for key,val in raw.items(): dave.append(raw.items())
#print(dave)
if "mgr" in config_data:
print("mgr present")
print("END lf_read_json.py")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,297 @@
#!/usr/bin/env python3
"""test_ipv4_variable_time.py will create stations and endpoints to generate and verify layer-3 traffic.
This script will create a variable number of stations each with their own set of cross-connects and endpoints.
It will then create layer 3 traffic over a specified amount of time, testing for increased traffic at regular intervals.
This test will pass if all stations increase traffic over the full test duration.
Use './test_ipv4_variable_time.py --help' to see command line usage and options
Copyright 2021 Candela Technologies Inc
License: Free to distribute and modify. LANforge systems must be licensed.
"""
import sys
import os
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
import argparse
from LANforge import LFUtils
from realm import Realm
from test_base import TestBase
import time
import datetime
class IPV4VariableTime(Realm, TestBase):
def __init__(self,
ssid=None,
security=None,
password=None,
sta_list=[],
name_prefix=None,
upstream=None,
radio=None,
host="localhost",
port=8080,
mode=0,
ap=None,
monitor=False,
side_a_min_rate=56, side_a_max_rate=0,
side_b_min_rate=56, side_b_max_rate=0,
number_template="00000", test_duration="5m", use_ht160=False,
_debug_on=False,
_exit_on_error=False,
_exit_on_fail=False):
super().__init__(lfclient_host=host,
lfclient_port=port)
self.upstream = upstream
self.host = host
self.port = port
self.ssid = ssid
self.sta_list = sta_list
self.security = security
self.password = password
self.radio = radio
self.mode = mode
self.ap = ap
self.number_template = number_template
self.debug = _debug_on
# self.json_post("/cli-json/set_resource", {
# "shelf":1,
# "resource":all,
# "max_staged_bringup": 30,
# "max_trying_ifup": 15,
# "max_station_bringup": 6
# })
self.name_prefix = name_prefix
self.test_duration = test_duration
self.station_profile = self.new_station_profile(ver = 2, station_list = sta_list)
self.cx_profile = self.new_l3_cx_profile(ver = 2)
#station profile settings
self.station_profile.lfclient_url = self.lfclient_url
self.station_profile.ssid = self.ssid
self.station_profile.ssid_pass = self.password
self.station_profile.security = self.security
self.station_profile.number_template_ = self.number_template
self.station_profile.debug = self.debug
self.station_profile.use_security(self.security, self.ssid, self.password)
self.station_profile.set_number_template(self.number_template)
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
self.station_profile.use_ht160 = use_ht160
self.station_profile.mode = mode
if self.ap is not None:
self.station_profile.set_command_param("add_sta", "ap", self.ap)
#cx profile settings
self.cx_profile.host = self.host
self.cx_profile.port = self.port
self.cx_profile.name_prefix = self.name_prefix
self.cx_profile.side_a_min_bps = side_a_min_rate
self.cx_profile.side_a_max_bps = side_a_max_rate
self.cx_profile.side_b_min_bps = side_b_min_rate
self.cx_profile.side_b_max_bps = side_b_max_rate
self.profiles.extend([self.station_profile, self.cx_profile])
def main():
optional = []
optional.append({'name': '--mode', 'help': 'Used to force mode of stations'})
optional.append({'name': '--ap', 'help': 'Used to force a connection to a particular AP'})
optional.append({'name': '--output_format', 'help': 'choose either csv or xlsx'})
optional.append({'name': '--report_file', 'help': 'where you want to store results', 'default': None})
optional.append({'name': '--a_min', 'help': '--a_min bps rate minimum for side_a', 'default': 256000})
optional.append({'name': '--b_min', 'help': '--b_min bps rate minimum for side_b', 'default': 256000})
optional.append(
{'name': '--test_duration', 'help': '--test_duration sets the duration of the test', 'default': "2m"})
optional.append({'name': '--layer3_cols', 'help': 'Columns wished to be monitored from layer 3 endpoint tab',
'default': ['name', 'tx bytes', 'rx bytes']})
optional.append({'name': '--port_mgr_cols', 'help': 'Columns wished to be monitored from port manager tab',
'default': ['ap', 'ip', 'parent dev']})
optional.append(
{'name': '--compared_report', 'help': 'report path and file which is wished to be compared with new report',
'default': None})
optional.append({'name': '--monitor_interval',
'help': 'frequency of monitor polls - ex: 250ms, 35s, 2h',
'default': '2s'})
optional.append({'name': '--monitor',
'help': 'whether test data should be recorded and stored in a report'})
parser = Realm.create_basic_argparse(
prog='test_ipv4_variable_time.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
Create stations to test connection and traffic on VAPs of varying security types (WEP, WPA, WPA2, WPA3, Open)
''',
description='''\
test_ipv4_variable_time.py:
--------------------
Generic command layout:
python3 ./test_ipv4_variable_time.py
--upstream_port eth1
--radio wiphy0
--num_stations 32
--security {open|wep|wpa|wpa2|wpa3}
--mode 1
{"auto" : "0",
"a" : "1",
"b" : "2",
"g" : "3",
"abg" : "4",
"abgn" : "5",
"bgn" : "6",
"bg" : "7",
"abgnAC" : "8",
"anAC" : "9",
"an" : "10",
"bgnAC" : "11",
"abgnAX" : "12",
"bgnAX" : "13"}
--ssid netgear
--password admin123
--test_duration 2m (default)
--monitor_interval_ms
--monitor
--a_min 3000
--b_min 1000
--ap "00:0e:8e:78:e1:76"
--output_format csv
--report_file ~/Documents/results.csv (Example of csv file output - please use another extension for other file formats)
--compared_report ~/Documents/results_prev.csv (Example of csv file retrieval - please use another extension for other file formats) - UNDER CONSTRUCTION
--layer3_cols'name','tx bytes','rx bytes','dropped' (column names from the GUI to print on report - please read below to know what to put here according to preferences)
--port_mgr_cols 'ap','ip' (column names from the GUI to print on report - please read below to know what to put here according to preferences)
--debug
===============================================================================
** FURTHER INFORMATION **
Using the layer3_cols flag:
Currently the output function does not support inputting the columns in layer3_cols the way they are displayed in the GUI. This quirk is under construction. To output
certain columns in the GUI in your final report, please match the according GUI column display to it's counterpart to have the columns correctly displayed in
your report.
GUI Column Display Layer3_cols argument to type in (to print in report)
Name | 'name'
EID | 'eid'
Run | 'run'
Mng | 'mng'
Script | 'script'
Tx Rate | 'tx rate'
Tx Rate (1 min) | 'tx rate (1&nbsp;min)'
Tx Rate (last) | 'tx rate (last)'
Tx Rate LL | 'tx rate ll'
Rx Rate | 'rx rate'
Rx Rate (1 min) | 'rx rate (1&nbsp;min)'
Rx Rate (last) | 'rx rate (last)'
Rx Rate LL | 'rx rate ll'
Rx Drop % | 'rx drop %'
Tx PDUs | 'tx pdus'
Tx Pkts LL | 'tx pkts ll'
PDU/s TX | 'pdu/s tx'
Pps TX LL | 'pps tx ll'
Rx PDUs | 'rx pdus'
Rx Pkts LL | 'pps rx ll'
PDU/s RX | 'pdu/s tx'
Pps RX LL | 'pps rx ll'
Delay | 'delay'
Dropped | 'dropped'
Jitter | 'jitter'
Tx Bytes | 'tx bytes'
Rx Bytes | 'rx bytes'
Replays | 'replays'
TCP Rtx | 'tcp rtx'
Dup Pkts | 'dup pkts'
Rx Dup % | 'rx dup %'
OOO Pkts | 'ooo pkts'
Rx OOO % | 'rx ooo %'
RX Wrong Dev | 'rx wrong dev'
CRC Fail | 'crc fail'
RX BER | 'rx ber'
CX Active | 'cx active'
CX Estab/s | 'cx estab/s'
1st RX | '1st rx'
CX TO | 'cx to'
Pattern | 'pattern'
Min PDU | 'min pdu'
Max PDU | 'max pdu'
Min Rate | 'min rate'
Max Rate | 'max rate'
Send Buf | 'send buf'
Rcv Buf | 'rcv buf'
CWND | 'cwnd'
TCP MSS | 'tcp mss'
Bursty | 'bursty'
A/B | 'a/b'
Elapsed | 'elapsed'
Destination Addr | 'destination addr'
Source Addr | 'source addr'
''',
more_optional=optional)
args = parser.parse_args()
num_sta = 2
if (args.num_stations is not None) and (int(args.num_stations) > 0):
num_sta = int(args.num_stations)
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta - 1, padding_number_=10000,
radio=args.radio)
#transfer below to l3cxprofile2 or base_profile-----------------------#
# try:
# layer3connections = ','.join([[*x.keys()][0] for x in ip_var_test.json_get('endp')['endpoint']])
# except:
# raise ValueError('Try setting the upstream port flag if your device does not have an eth1 port')
# if type(args.layer3_cols) is not list:
# layer3_cols = list(args.layer3_cols.split(","))
# # send col names here to file to reformat
# else:
# layer3_cols = args.layer3_cols
# # send col names here to file to reformat
# if type(args.port_mgr_cols) is not list:
# port_mgr_cols = list(args.port_mgr_cols.split(","))
# # send col names here to file to reformat
# else:
# port_mgr_cols = args.port_mgr_cols
# # send col names here to file to reformat
# if args.debug:
# print("Layer 3 Endp column names are...")
# print(layer3_cols)
# print("Port Manager column names are...")
# print(port_mgr_cols)
ip_var_test = IPV4VariableTime(host=args.mgr,
port=args.mgr_port,
number_template="0000",
sta_list=station_list,
name_prefix="VT",
upstream=args.upstream_port,
ssid=args.ssid,
password=args.passwd,
radio=args.radio,
security=args.security,
test_duration=args.test_duration,
use_ht160=False,
side_a_min_rate=args.a_min,
side_b_min_rate=args.b_min,
mode=args.mode,
ap=args.ap,
_debug_on=args.debug)
ip_var_test.begin()
if __name__ == "__main__":
main()