diff --git a/py-dashboard/GhostRequest.py b/py-dashboard/GhostRequest.py
index f052351b..adfc540f 100644
--- a/py-dashboard/GhostRequest.py
+++ b/py-dashboard/GhostRequest.py
@@ -15,6 +15,7 @@ import requests
import jwt
from datetime import datetime
+from dateutil import tz
import json
import subprocess
from scp import SCPClient
@@ -239,7 +240,7 @@ class GhostRequest:
user_push=None,
password_push=None,
customer=None,
- testbed='Unknown Testbed',
+ testbed=None,
test_run=None,
target_folders=list(),
grafana_token=None,
@@ -248,6 +249,9 @@ class GhostRequest:
grafana_datasource='InfluxDB',
grafana_bucket=None):
global dut_hw, dut_sw, dut_model, dut_serial
+
+ now = datetime.now()
+
text = ''
csvreader = CSVReader()
if grafana_token is not None:
@@ -292,20 +296,19 @@ class GhostRequest:
images = list()
times = list()
test_pass_fail = list()
- duts = dict()
for target_folder in target_folders:
try:
target_file = '%s/kpi.csv' % target_folder
df = csvreader.read_csv(file=target_file, sep='\t')
- csv_testbed = csvreader.get_column(df, 'test-rig')[0]
+ test_rig = csvreader.get_column(df, 'test-rig')[0]
pass_fail = Counter(csvreader.get_column(df, 'pass/fail'))
test_pass_fail.append(pass_fail)
dut_hw = csvreader.get_column(df, 'dut-hw-version')[0]
dut_sw = csvreader.get_column(df, 'dut-sw-version')[0]
dut_model = csvreader.get_column(df, 'dut-model-num')[0]
dut_serial = csvreader.get_column(df, 'dut-serial-num')[0]
- duts[csv_testbed] = [dut_hw, dut_sw, dut_model, dut_serial]
+ duts = [dut_serial, dut_hw, dut_sw, dut_model, test_rig]
times_append = csvreader.get_column(df, 'Date')
for target_time in times_append:
times.append(float(target_time) / 1000)
@@ -323,11 +326,14 @@ class GhostRequest:
print("Failure")
target_folders.remove(target_folder)
break
- testbeds.append(csv_testbed)
- if testbed == 'Unknown Testbed':
- raise UserWarning('Please define your testbed')
+ testbeds.append(test_rig)
+ if testbed is None:
+ testbed = test_rig
- local_path = '/home/%s/%s/%s' % (user_push, customer, testbed)
+ if test_run is None:
+ test_run = now.strftime('%B-%d-%Y-%I-%M-%p-report')
+
+ local_path = '/home/%s/%s/%s/%s' % (user_push, customer, testbed, test_run)
transport = paramiko.Transport(ghost_host, port)
transport.connect(None, user_push, password_push)
@@ -336,12 +342,22 @@ class GhostRequest:
if self.debug:
print(local_path)
print(target_folder)
+
+ try:
+ sftp.mkdir('/home/%s/%s/%s' % (user_push, customer, testbed))
+ except:
+ pass
+
+ try:
+ sftp.mkdir(local_path)
+ except:
+ pass
scp_push.put(target_folder, local_path, recursive=True)
files = sftp.listdir(local_path + '/' + target_folder)
for file in files:
if 'pdf' in file:
- url = 'http://%s/%s/%s/%s/%s' % (
- ghost_host, customer.strip('/'), testbed, target_folder, file)
+ url = 'http://%s/%s/%s/%s/%s/%s' % (
+ ghost_host, customer.strip('/'), testbed, test_run, target_folder, file)
pdfs.append('PDF of results: %s
' % (url, file))
scp_push.close()
self.upload_images(target_folder)
@@ -362,13 +378,15 @@ class GhostRequest:
low_priority_list.append(low_priority)
- now = datetime.now()
test_pass_fail_results = sum((Counter(test) for test in test_pass_fail), Counter())
end_time = max(times)
start_time = '2021-07-01'
- end_time = datetime.utcfromtimestamp(end_time).strftime('%Y-%m-%d %H:%M:%S')
+ end_time = datetime.utcfromtimestamp(end_time)#.strftime('%Y-%m-%d %H:%M:%S')
+ now = time.time()
+ offset = datetime.fromtimestamp(now) - datetime.utcfromtimestamp(now)
+ end_time = end_time + offset
high_priority = csvreader.concat(high_priority_list)
low_priority = csvreader.concat(low_priority_list)
@@ -381,7 +399,7 @@ class GhostRequest:
high_priority.append(['Total Failed', test_pass_fail_results['FAIL'], 'Total subtests failed during this run'])
if title is None:
- title = now.strftime('%B %d, %Y %I:%M %p report')
+ title = end_time.strftime('%B %d, %Y %I:%M %p report')
# create Grafana Dashboard
target_files = []
@@ -394,7 +412,7 @@ class GhostRequest:
datasource=grafana_datasource,
bucket=grafana_bucket,
from_date=start_time,
- to_date=end_time,
+ to_date=end_time.strftime('%Y-%m-%d %H:%M:%S'),
pass_fail='GhostRequest',
testbed=testbeds[0])
@@ -426,24 +444,23 @@ class GhostRequest:
text = 'Testbed: %s
' % testbeds[0]
dut_table = '
' \
- '| Ghost Request requested values |
'
- for device, data in duts.items():
- dut_table = dut_table + '| Device | ' \
- '%s |
' \
- '| DUT_HW | ' \
- '%s |
' \
- '| DUT_SW | ' \
- '%s |
' \
- '| DUT model | ' \
- '%s |
' \
- '| DUT Serial | ' \
- '%s |
' \
- '| Tests passed | ' \
- '%s |
' \
- '| Tests failed | ' \
- '%s |
' % (
- device, data[0], data[1], data[2], data[3], test_pass_fail_results['PASS'],
- test_pass_fail_results['FAIL'])
+ '| Test Information |
' \
+ '| Testbed | ' \
+ '%s |
' \
+ '| DUT_HW | ' \
+ '%s |
' \
+ '| DUT_SW | ' \
+ '%s |
' \
+ '| DUT model | ' \
+ '%s |
' \
+ '| DUT Serial | ' \
+ '%s |
' \
+ '| Tests passed | ' \
+ '%s |
' \
+ '| Tests failed | ' \
+ '%s |
' % (
+ duts[4], duts[1], duts[2], duts[3], duts[0], test_pass_fail_results['PASS'],
+ test_pass_fail_results['FAIL'])
dut_table = dut_table + '
'
text = text + dut_table
diff --git a/py-dashboard/GrafanaRequest.py b/py-dashboard/GrafanaRequest.py
index a831904f..a6349938 100644
--- a/py-dashboard/GrafanaRequest.py
+++ b/py-dashboard/GrafanaRequest.py
@@ -15,6 +15,7 @@ import json
import string
import random
+
class CSVReader:
def __init__(self):
self.shape = None
@@ -41,6 +42,7 @@ class CSVReader:
values.append(row[index])
return values
+
class GrafanaRequest:
def __init__(self,
_grafana_token,
@@ -63,6 +65,7 @@ class GrafanaRequest:
self.data = dict()
self.data['overwrite'] = _overwrite
self.csvreader = CSVReader()
+ self.units = dict()
def create_bucket(self,
bucket_name=None):
@@ -128,7 +131,12 @@ class GrafanaRequest:
# we need to make sure we match each Graph Group to the script it occurs in
for script in scripts:
# Unique Graph Groups for each script
- dictionary[script] = list(set(self.csvreader.get_column(csv, 'Graph-Group')))
+ graph_groups = self.csvreader.get_column(csv, 'Graph-Group')
+ dictionary[script] = list(set(graph_groups))
+ units = self.csvreader.get_column(csv, 'Units')
+ self.units[script] = dict()
+ for index in range(0, len(graph_groups)):
+ self.units[script][graph_groups[index]] = units[index]
print(dictionary)
return dictionary
@@ -174,7 +182,6 @@ class GrafanaRequest:
dic['type'] = grouptype
return dic
-
def create_custom_dashboard(self,
scripts=None,
title=None,
@@ -226,6 +233,7 @@ class GrafanaRequest:
graph_groups[pass_fail] = ['PASS', 'FAIL']
for scriptname in graph_groups.keys():
+ print(scriptname)
for graph_group in graph_groups[scriptname]:
panel = dict()
@@ -276,7 +284,10 @@ class GrafanaRequest:
yaxis = dict()
yaxis['format'] = 'short'
- #yaxis['label'] = unit_dict[graph_group]
+ try:
+ yaxis['label'] = self.units[scriptname][graph_group]
+ except:
+ pass
yaxis['logBase'] = 1
yaxis['max'] = None
yaxis['min'] = None
@@ -389,8 +400,8 @@ class GrafanaRequest:
df = self.csvreader.read_csv(csv)
units = self.csvreader.get_column(df, 'Units')
test_id = self.csvreader.get_column(df, 'test-id')
- maxunit = max(set(units), key = units.count)
- maxtest = max(set(test_id), key = test_id.count)
+ maxunit = max(set(units), key=units.count)
+ maxtest = max(set(test_id), key=test_id.count)
d = dict()
d[maxunit] = maxtest
print(maxunit, maxtest)
diff --git a/py-scripts/cv_to_grafana.py b/py-scripts/cv_to_grafana.py
index df3d2c02..3e1574c5 100755
--- a/py-scripts/cv_to_grafana.py
+++ b/py-scripts/cv_to_grafana.py
@@ -16,7 +16,6 @@ Influx from this script.
--line "Resource=1.1 Profile=default Amount=4 Uses-1=wiphy1 DUT=DUT_TO_GRAFANA_DUT Traffic=wiphy1 Freq=-1"
--line "Resource=1.1 Profile=upstream Amount=1 Uses-1=eth1 DUT=DUT_TO_GRAFANA_DUT Traffic=eth1 Freq=-1"
--dut DUT_TO_GRAFANA
---test_rig Stidmatt-01
--create_scenario DUT_TO_GRAFANA_SCENARIO
--station 1.1.sta00002
--duration 15s
@@ -103,7 +102,6 @@ def main():
--line
--line
--dut
- --test_rig
--create_scenario
--station
--influx_tag
diff --git a/py-scripts/ghost_profile.py b/py-scripts/ghost_profile.py
index 3abe00df..297b4fd6 100755
--- a/py-scripts/ghost_profile.py
+++ b/py-scripts/ghost_profile.py
@@ -166,7 +166,7 @@ def main():
optional.add_argument('--influx_mgr',
help='IP address of the server your Influx database is hosted if different from your LANforge Manager',
default=None)
- optional.add_argument('--debug')
+ optional.add_argument('--debug', help='Enable debugging', default=False, action="store_true")
args = parser.parse_args()
Ghost = UseGhost(_ghost_token=args.ghost_token,
diff --git a/py-scripts/lf_ap_auto_test.py b/py-scripts/lf_ap_auto_test.py
index d77cc3d2..d431df98 100755
--- a/py-scripts/lf_ap_auto_test.py
+++ b/py-scripts/lf_ap_auto_test.py
@@ -23,7 +23,7 @@ the options and how best to input data.
--set 'Skip 2.4Ghz Tests' 1 --set 'Skip 5Ghz Tests' 1 \
--set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Stability' 0 --set 'Band-Steering' 0 \
--set 'Multi-Station Throughput vs Pkt Size' 0 --set 'Long-Term' 0 \
- --test_rig Testbed-01 --pull_report \
+ --pull_report \
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \
--influx_bucket ben \
@@ -46,7 +46,6 @@ show_log: 0
port_sorting: 0
kpi_id: AP Auto
bg: 0xE0ECF8
-test_rig: Ferndale-01-Basic
show_scan: 1
auto_helper: 1
skip_2: 1
diff --git a/py-scripts/lf_tr398_test.py b/py-scripts/lf_tr398_test.py
index 9f5d9d9e..8cbec10a 100755
--- a/py-scripts/lf_tr398_test.py
+++ b/py-scripts/lf_tr398_test.py
@@ -28,8 +28,7 @@ the options and how best to input data.
--set 'Multiple Assoc Stability' 0 \
--set 'Downlink MU-MIMO' 0 \
--set 'AP Coexistence' 0 \
- --set 'Long Term Stability' 0 \
- --test_rig Testbed-01
+ --set 'Long Term Stability' 0
Note:
--raw_line 'line contents' will add any setting to the test config. This is
@@ -61,7 +60,6 @@ notes1: smaller CT810a chamber. CT704b and CT714 4-module attenuators are used.
notes2: mounted on the sides of the DUT chamber are used to communicate to the DUT. DUT is facing forward at
notes3: the zero-rotation angle.
bg: 0xE0ECF8
-test_rig: TR-398 test bed
show_scan: 1
auto_helper: 1
skip_2: 0
@@ -278,8 +276,7 @@ def main():
--set 'Multiple Assoc Stability' 0 \
--set 'Downlink MU-MIMO' 0 \
--set 'AP Coexistence' 0 \
- --set 'Long Term Stability' 0 \
- --test_rig Testbed-01
+ --set 'Long Term Stability' 0
"""
)
diff --git a/py-scripts/wifi_cap_to_grafana.sh b/py-scripts/wifi_cap_to_grafana.sh
index d9054061..baf918cb 100755
--- a/py-scripts/wifi_cap_to_grafana.sh
+++ b/py-scripts/wifi_cap_to_grafana.sh
@@ -37,18 +37,25 @@ echo "Build Chamber View Scenario"
echo "run wifi capacity test"
./lf_wifi_capacity_test.py --config_name Custom --create_stations --radio wiphy1 --pull_report --influx_host ${INFLUX_MGR} \
--influx_port 8086 --influx_org Candela --influx_token ${INFLUXTOKEN} --influx_bucket ${INFLUXBUCKET} --mgr ${MGR} \
---instance_name testing --upstream eth1 --test_rig ${TESTBED} --graph_groups lf_cv_rpt_filelocation.txt --duration 15s
+--instance_name testing --upstream eth1 --test_rig ${TESTBED} --graph_groups lf_cv_rpt_filelocation.txt --duration 15s --local_lf_report_dir ${REPORT_PATH}
+
#config_name doesn't matter, change the influx_host to your LANforge device,
echo "run Dataplane test"
./lf_dataplane_test.py --mgr ${MGR} --instance_name dataplane-instance --config_name test_config --upstream 1.1.eth1 \
--station 1.1.06 --dut linksys-8450 --influx_host ${INFLUX_MGR} --influx_port 8086 --influx_org Candela --influx_token ${INFLUXTOKEN} \
---influx_bucket ${INFLUXBUCKET} --influx_tag testbed ${TESTBED} --graph_groups lf_cv_rpt_filelocation.txt --duration 15s
+--influx_bucket ${INFLUXBUCKET} --influx_tag testbed ${TESTBED} --graph_groups lf_cv_rpt_filelocation.txt --duration 15s --pull_report --local_lf_report_dir ${REPORT_PATH}
+
# Build grafana dashboard and graphs view for the KPI in the capacity test.
-./grafana_profile.py --create_custom --title ${TESTBED} --influx_bucket ${INFLUXBUCKET} --mgr ${MGR} --grafana_token \
-${GRAFANATOKEN} --grafana_host ${INFLUX_MGR} --testbed ${TESTBED} --graph_groups_file lf_cv_rpt_filelocation.txt \
---scripts Dataplane --datasource 'InfluxDB stidmatt bucket'
+#./grafana_profile.py --create_custom --title ${TESTBED} --influx_bucket ${INFLUXBUCKET} --mgr ${MGR} --grafana_token \
+#${GRAFANATOKEN} --grafana_host ${INFLUX_MGR} --testbed ${TESTBED} --graph_groups_file lf_cv_rpt_filelocation.txt \
+#--scripts Dataplane --datasource 'InfluxDB stidmatt bucket'
+
+./ghost_profile.py --ghost_token ${GHOST_TOKEN} --ghost_host ${GHOST_MGR} --authors ${AUTHOR} --customer ${CUSTOMER} \
+--user_push ${USER_PUSH} --password_push ${PASSWORD_PUSH} --kpi_to_ghost --grafana_token ${GRAFANATOKEN} --grafana_host ${INFLUX_MGR} \
+--grafana_bucket ${INFLUXBUCKET} --influx_host ${INFLUX_MGR} --influx_org Candela --influx_token ${INFLUXTOKEN} \
+--influx_bucket ${INFLUXBUCKET} --parent_folder ${REPORT_PATH}
rm lf_cv_rpt_filelocation.txt
diff --git a/py-scripts/wifi_capacity_dataplane_ghost.sh b/py-scripts/wifi_capacity_dataplane_ghost.sh
new file mode 100755
index 00000000..ebdf9ddb
--- /dev/null
+++ b/py-scripts/wifi_capacity_dataplane_ghost.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+
+INFLUX_TOKEN=31N9QDhjJHBu4eMUlMBwbK3sOjXLRAhZuCzZGeO8WVCj-xvR8gZWWvRHOcuw-5RHeB7xBFnLs7ZV023k4koR1A==
+INFLUX_HOST=c7-grafana.candelatech.com
+INFLUX_BUCKET=stidmatt
+
+GRAFANA_TOKEN=eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ==
+
+GHOST_TOKEN=60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742
+
+SSID=lanforge
+PASSWORD=password
+SECURITY=wpa2
+TEST_RIG=ct523c-ccbc
+DUT_NAME=linksys-8450
+MGR=192.168.1.7
+RADIO=wiphy1
+UPSTREAM=1.1.eth1
+
+NOW=$(date +"%Y-%m-%d-%H-%M")
+
+mkdir ${NOW}
+#./scenario.py --mgr ${MGR} --load BLANK
+#./create_l3.py --mgr ${MGR} --radio ${RADIO} --ssid ${SSID} --password ${PASSWORD} --security ${SECURITY}
+# Create/update new DUT.
+#Replace my arguments with your setup. Separate your ssid arguments with spaces and ensure the names are lowercase
+echo "Make new DUT"
+./create_chamberview_dut.py --lfmgr ${MGR} --dut_name DUT_TO_GRAFANA_DUT \
+--ssid "ssid_idx=0 ssid=lanforge security=WPA2 password=password bssid=04:f0:21:2c:41:84"
+
+# Create/update chamber view scenario and apply and build it.
+echo "Build Chamber View Scenario"
+#change the lfmgr to your system, set the radio to a working radio on your LANforge system, same with the ethernet port.
+./create_chamberview.py --lfmgr ${MGR} --create_scenario DUT_TO_GRAFANA_SCENARIO \
+--line "Resource=1.1 Profile=default Amount=4 Uses-1=wiphy1 DUT=DUT_TO_GRAFANA_DUT Traffic=wiphy1 Freq=-1" \
+--line "Resource=1.1 Profile=upstream Amount=1 Uses-1=eth1 DUT=DUT_TO_GRAFANA_DUT Traffic=eth1 Freq=-1"
+
+./lf_wifi_capacity_test.py --mgr 192.168.1.7 --lf_user lanforge --lf_password lanforge --instance_name ${DUT_NAME} \
+--config_name wifi_config --upstream ${UPSTREAM} --radio wiphy0 --ssid ${SSID} --paswd ${PASSWORD} --security ${SECURITY} \
+--influx_host ${INFLUX_HOST} --influx_org Candela --influx_token ${INFLUX_TOKEN} --influx_bucket ${INFLUX_BUCKET} \
+--test_rig ${TEST_RIG} --influx_tag testbed ${TEST_RIG} --set DUT_NAME ${DUT_NAME} \
+--local_lf_report_dir /home/matthew/Documents/candela/lanforge-scripts/py-scripts/${NOW}
+./lf_dataplane_test.py --mgr 192.168.1.7 --lf_user lanforge --lf_password lanforge --instance_name wct_instance \
+--config_name wifi_config --upstream ${UPSTREAM} --influx_host ${INFLUX_HOST} --influx_org Candela \
+--influx_token ${INFLUX_TOKEN} --influx_bucket ${INFLUX_BUCKET} --test_rig ${TEST_RIG} --influx_tag testbed ${TEST_RIG} \
+--station 1.1.sta00000 --raw_line 'traffic_types: UDP;TCP' --set DUT_NAME ${DUT_NAME} \
+--local_lf_report_dir /home/matthew/Documents/candela/lanforge-scripts/py-scripts/${NOW} --pull_report
+./ghost_profile.py --ghost_token ${GHOST_TOKEN} --ghost_host v-centos8s.candelatech.com --authors Matthew --customer candela \
+--user_push lanforge --password_push lanforge --kpi_to_ghost --grafana_token ${GRAFANA_TOKEN} \
+--grafana_host c7-grafana.candelatech.com --grafana_bucket lanforge_qa_testing \
+--parent_folder /home/matthew/Documents/candela/lanforge-scripts/py-scripts/${NOW}
\ No newline at end of file