More reliable way to parse CSV files

Signed-off-by: Matthew Stidham <stidmatt@gmail.com>
This commit is contained in:
Matthew Stidham
2021-05-12 09:56:12 -07:00
parent 9b7ca46751
commit 371cde5843
5 changed files with 77 additions and 47 deletions

View File

@@ -13,6 +13,7 @@ import argparse
from cv_test_reports import lanforge_reports as lf_rpt
from csv_to_influx import *
def cv_base_adjust_parser(args):
if args.test_rig != "":
# TODO: In future, can use TestRig once that GUI update has propagated
@@ -284,7 +285,7 @@ class cv_test(Realm):
# cv_cmds: Array of raw chamber-view commands, such as "cv click 'button-name'"
# These (and the sets) are applied after the test is created and before it is started.
def create_and_run_test(self, load_old_cfg, test_name, instance_name, config_name, sets,
pull_report, lf_host, lf_user, lf_password, cv_cmds, graphgroupsfile=None):
pull_report, lf_host, lf_user, lf_password, cv_cmds, graph_groups_file=None):
load_old = "false"
if load_old_cfg:
load_old = "true"
@@ -307,7 +308,7 @@ class cv_test(Realm):
self.auto_save_report(instance_name)
for kv in sets:
cmd = "cv set '%s' '%s' '%s'"%(instance_name, kv[0], kv[1]);
cmd = "cv set '%s' '%s' '%s'" % (instance_name, kv[0], kv[1])
print("Running CV set command: ", cmd)
self.run_cv_cmd(cmd)
@@ -336,10 +337,14 @@ class cv_test(Realm):
location = location.replace('\"Report Location:::', '')
location = location.replace('\"', '')
report = lf_rpt()
print(graphgroupsfile)
if graphgroupsfile is not None:
filelocation = open(graphgroupsfile, 'a')
filelocation.write(location + '/kpi.csv\n')
print(graph_groups_file)
if graph_groups_file is not None:
filelocation = open(graph_groups_file, 'a')
if pull_report:
location2 = location.replace('/home/lanforge/html-reports/', '')
filelocation.write(location2 + '/kpi.csv\n')
else:
filelocation.write(location + '/kpi.csv\n')
filelocation.close()
print(location)
self.report_dir = location
@@ -350,7 +355,7 @@ class cv_test(Realm):
report_location=location)
except Exception as e:
print("SCP failed, user %s, password %s, dest %s", (lf_user, lf_password, lf_host))
raise e#Exception("Could not find Reports")
raise e # Exception("Could not find Reports")
break
# Of if test stopped for some reason and could not generate report.
@@ -409,7 +414,7 @@ class cv_test(Realm):
print("All done posting to influx.\n")
#************************** chamber view **************************
# ************************** chamber view **************************
def add_text_blob_line(self,
scenario_name="Automation",
Resources="1.1",

View File

@@ -95,7 +95,9 @@ class UseGrafana(LFCliBase):
title=None,
bucket=None,
graph_groups=None,
testbed=None):
graph_groups_file=None,
testbed=None,
datasource='InfluxDB'):
options = string.ascii_lowercase + string.ascii_uppercase + string.digits
uid = ''.join(random.choice(options) for i in range(9))
input1 = dict()
@@ -120,8 +122,12 @@ class UseGrafana(LFCliBase):
panels = list()
index = 1
for scriptname in scripts:
for graph_group in graph_groups:
if graph_groups_file:
print("graph_groups_file: %s" % graph_groups_file)
target_csvs = open(graph_groups_file).read().split('\n')
graph_groups = self.get_graph_groups(target_csvs) # Get the list of graph groups which are in the tests we ran
for scriptname in graph_groups.keys():
for graph_group in graph_groups[scriptname]:
panel = dict()
gridpos = dict()
@@ -185,7 +191,7 @@ class UseGrafana(LFCliBase):
panel['bars'] = False
panel['dashes'] = False
panel['dashLength'] = 10
panel['datasource'] = 'InfluxDB'
panel['datasource'] = datasource
panel['fieldConfig'] = fieldConfig
panel['fill'] = 0
panel['fillGradient'] = 0
@@ -243,25 +249,35 @@ class UseGrafana(LFCliBase):
input1['title'] = title
input1['uid'] = uid
input1['version'] = 11
# print(json.dumps(input1, indent=2))
return self.GR.create_dashboard_from_dict(dictionary=json.dumps(input1))
def get_graph_groups(self,
target_csvs):
groups = []
def read_csv(self, file):
csv=open(file).read().split('\n')
rows=list()
for x in csv:
if len(x) > 0:
rows.append(x.split('\t'))
return rows
def get_values(self, csv, target):
value=csv[0].index(target)
results=[]
for row in csv[1:]:
results.append(row[value])
return results
def get_graph_groups(self,target_csvs):
dictionary=dict()
for target_csv in target_csvs:
with open(target_csv) as fp:
line = fp.readline()
line = line.split('\t')
graph_group_index = line.index('Graph-Group')
line = fp.readline()
while line:
line = line.split('\t') #split the line by tabs to separate each item in the string
graphgroup = line[graph_group_index]
groups.append(graphgroup)
line = fp.readline()
print(groups)
return list(set(groups))
if len(target_csv) > 1:
csv=self.read_csv(target_csv)
scripts=list(set(self.get_values(csv,'test-id')))
for value in ['Graph-Group']:
for script in scripts:
dictionary[script]=list(set(self.get_values(csv,value)))
print(dictionary)
return dictionary
def main():
parser = LFCliBase.create_basic_argparse(
@@ -304,10 +320,13 @@ def main():
optional.add_argument('--scripts', help='Scripts to graph in Grafana', default=None, action='append')
optional.add_argument('--title', help='title of your Grafana Dashboard', default=None)
optional.add_argument('--influx_bucket', help='Name of your Influx Bucket', default=None)
optional.add_argument('--graph-groups', help='How you want to filter your graphs on your dashboard',
optional.add_argument('--graph_groups', help='How you want to filter your graphs on your dashboard',
action='append', default=[None])
optional.add_argument('--graph_groups_file', help='File which determines how you want to filter your graphs on your dashboard',
default=None)
optional.add_argument('--testbed', help='Which testbed you want to query', default=None)
optional.add_argument('--kpi', help='KPI file(s) which you want to graph form', action='append', default=None)
optional.add_argument('--datasource', help='Name of Influx database if different from InfluxDB', default='InfluxDB')
args = parser.parse_args()
Grafana = UseGrafana(args.grafana_token,
@@ -334,7 +353,9 @@ def main():
title=args.title,
bucket=args.influx_bucket,
graph_groups=args.graph_groups,
testbed=args.testbed)
graph_groups_file=args.graph_groups_file,
testbed=args.testbed,
datasource=args.datasource)
if __name__ == "__main__":

View File

@@ -134,7 +134,7 @@ class DataplaneTest(cvtest):
raw_lines=[],
raw_lines_file="",
sets=[],
graphgroups=None
graph_groups=None
):
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
@@ -158,7 +158,7 @@ class DataplaneTest(cvtest):
self.raw_lines = raw_lines
self.raw_lines_file = raw_lines_file
self.sets = sets
self.graphgroups = graphgroups
self.graph_groups = graph_groups
def setup(self):
# Nothing to do at this time.
@@ -203,7 +203,7 @@ class DataplaneTest(cvtest):
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets,
self.pull_report, self.lf_host, self.lf_user, self.lf_password,
cv_cmds, graphgroupsfile=self.graphgroups)
cv_cmds, graph_groups_file=self.graph_groups)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
@@ -246,7 +246,7 @@ def main():
help="Specify requested upload speed. Percentage of theoretical is also supported. Default: 0")
parser.add_argument("--duration", default="",
help="Specify duration of each traffic run")
parser.add_argument("--graphgroups", help="File to save graphgroups to", default=None)
parser.add_argument("--graph_groups", help="File to save graph_groups to", default=None)
args = parser.parse_args()
@@ -271,7 +271,7 @@ def main():
raw_lines = args.raw_line,
raw_lines_file = args.raw_lines_file,
sets = args.set,
graphgroups = args.graphgroups
graph_groups = args.graph_groups
)
CV_Test.setup()
CV_Test.run()

View File

@@ -352,7 +352,7 @@ class WiFiCapacityTest(cv_test):
influx_host="localhost",
influx_port=8086,
report_dir="",
graphgroups=None
graph_groups=None
):
super().__init__(lfclient_host=lfclient_host, lfclient_port=lf_port)
@@ -388,7 +388,7 @@ class WiFiCapacityTest(cv_test):
self.influx_host = influx_host,
self.influx_port = influx_port
self.report_dir = report_dir
self.graphgroups = graphgroups
self.graph_groups = graph_groups
def setup(self):
if self.create_stations and self.stations != "":
@@ -462,7 +462,7 @@ class WiFiCapacityTest(cv_test):
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets,
self.pull_report, self.lfclient_host, self.lf_user, self.lf_password,
cv_cmds, graphgroupsfile=self.graphgroups)
cv_cmds, graph_groups_file=self.graph_groups)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
@@ -515,7 +515,7 @@ def main():
help="ssid Password")
parser.add_argument("--report_dir", default="")
parser.add_argument("--scenario", default="")
parser.add_argument("--graphgroups", help="File to save graphgroups to", default=None)
parser.add_argument("--graph_groups", help="File to save graph groups to", default=None)
args = parser.parse_args()
cv_base_adjust_parser(args)
@@ -547,7 +547,7 @@ def main():
raw_lines=args.raw_line,
raw_lines_file=args.raw_lines_file,
sets=args.set,
graphgroups=args.graphgroups
graph_groups=args.graph_groups
)
WFC_Test.setup()
WFC_Test.run()

View File

@@ -9,12 +9,15 @@ MGR=192.168.1.6
INFLUX_MGR=192.168.100.201
#INFLUXTOKEN=Tdxwq5KRbj1oNbZ_ErPL5tw_HUH2wJ1VR4dwZNugJ-APz__mEFIwnqHZdoobmQpt2fa1VdWMlHQClR8XNotwbg==
INFLUXTOKEN=31N9QDhjJHBu4eMUlMBwbK3sOjXLRAhZuCzZGeO8WVCj-xvR8gZWWvRHOcuw-5RHeB7xBFnLs7ZV023k4koR1A==
TESTBED=Stidmatt-01
TESTBED=Stidmatt-02
INFLUXBUCKET=stidmatt
#GRAFANATOKEN=eyJrIjoiZTJwZkZlemhLQVNpY3hiemRjUkNBZ3k2RWc3bWpQWEkiLCJuIjoibWFzdGVyIiwiaWQiOjF9
GRAFANATOKEN=eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ==
GROUPS=lf_cv_rpt_filelocation.txt
rm lf_cv_rpt_filelocation.txt
touch lf_cv_rpt_filelocation.txt
# Create/update new DUT.
#Replace my arguments with your setup. Separate your ssid arguments with spaces and ensure the names are lowercase
echo "Make new DUT"
@@ -34,17 +37,18 @@ echo "Build Chamber View Scenario"
echo "run wifi capacity test"
./lf_wifi_capacity_test.py --config_name Custom --create_stations --radio wiphy1 --pull_report --influx_host ${INFLUX_MGR} \
--influx_port 8086 --influx_org Candela --influx_token ${INFLUXTOKEN} --influx_bucket ${INFLUXBUCKET} --mgr ${MGR} \
--instance_name testing --upstream eth1 --test_rig ${TESTBED} --graphgroups lf_cv_rpt_filelocation.txt --lf_password lanforgepassword
--instance_name testing --upstream eth1 --test_rig ${TESTBED} --graph_groups lf_cv_rpt_filelocation.txt --duration 15s
#config_name doesn't matter, change the influx_host to your LANforge device,
echo "run Dataplane test"
./lf_dataplane_test.py --mgr ${MGR} --instance_name dataplane-instance --config_name test_config --upstream 1.1.eth1 \
--station 1.1.06 --dut linksys-8450 --influx_host ${INFLUX_MGR} --influx_port 8086 --influx_org Candela --influx_token ${INFLUXTOKEN} \
--influx_bucket ${INFLUXBUCKET} --influx_tag testbed ${TESTBED} --graphgroups lf_cv_rpt_filelocation.txt
--influx_bucket ${INFLUXBUCKET} --influx_tag testbed ${TESTBED} --graph_groups lf_cv_rpt_filelocation.txt --duration 15s
# Build grafana dashboard and graphs view for the KPI in the capacity test.
./grafana_profile.py --create_custom --title ${TESTBED} --influx_bucket ${INFLUXBUCKET} --mgr ${MGR} --grafana_token ${GRAFANATOKEN} \
--grafana_host ${INFLUX_MGR} --testbed ${TESTBED} --graph-groups lf_cv_rpt_filelocation.txt --scripts Dataplane --scripts 'WiFi Capacity'
./grafana_profile.py --create_custom --title 'Stidmatt-02' --influx_bucket ${INFLUXBUCKET} --mgr ${MGR} --grafana_token \
${GRAFANATOKEN} --grafana_host ${INFLUX_MGR} --testbed ${TESTBED} --graph_groups_file lf_cv_rpt_filelocation.txt \
--scripts Dataplane --datasource 'InfluxDB stidmatt bucket'
#rm ${GROUPS}
rm lf_cv_rpt_filelocation.txt