diff --git a/py-dashboard/GhostRequest.py b/py-dashboard/GhostRequest.py
index b10dc8c2..0abd3fc5 100644
--- a/py-dashboard/GhostRequest.py
+++ b/py-dashboard/GhostRequest.py
@@ -443,53 +443,57 @@ class GhostRequest:
if self.debug:
print('Target files: %s' % target_files)
+ text = 'Testbed: %s
' % testbeds[0]
if self.influx_token is not None:
influxdb = RecordInflux(_influx_host=self.influx_host,
_influx_port=self.influx_port,
_influx_org=self.influx_org,
_influx_token=self.influx_token,
_influx_bucket=self.influx_bucket)
- short_description = 'Tests passed' # variable name
- numeric_score = test_pass_fail_results['PASS'] # value
- tags = dict()
- if self.debug:
- print(datetime.utcfromtimestamp(max(times)))
- tags['testbed'] = testbeds[0]
- tags['script'] = 'GhostRequest'
- tags['Graph-Group'] = 'PASS'
- date = datetime.utcfromtimestamp(max(times)).isoformat()
- influxdb.post_to_influx(short_description, numeric_score, tags, date)
+ try:
+ short_description = 'Tests passed' # variable name
+ numeric_score = test_pass_fail_results['PASS'] # value
+ tags = dict()
+ if self.debug:
+ print(datetime.utcfromtimestamp(max(times)))
+ tags['testbed'] = testbeds[0]
+ tags['script'] = 'GhostRequest'
+ tags['Graph-Group'] = 'PASS'
+ date = datetime.utcfromtimestamp(max(times)).isoformat()
+ influxdb.post_to_influx(short_description, numeric_score, tags, date)
- short_description = 'Tests failed' # variable name
- numeric_score = test_pass_fail_results['FAIL'] # value
- tags = dict()
- tags['testbed'] = testbeds[0]
- tags['script'] = 'GhostRequest'
- tags['Graph-Group'] = 'FAIL'
- date = datetime.utcfromtimestamp(max(times)).isoformat()
- influxdb.post_to_influx(short_description, numeric_score, tags, date)
+ short_description = 'Tests failed' # variable name
+ numeric_score = test_pass_fail_results['FAIL'] # value
+ tags = dict()
+ tags['testbed'] = testbeds[0]
+ tags['script'] = 'GhostRequest'
+ tags['Graph-Group'] = 'FAIL'
+ date = datetime.utcfromtimestamp(max(times)).isoformat()
+ influxdb.post_to_influx(short_description, numeric_score, tags, date)
- short_description = 'Subtests passed' # variable name
- numeric_score = subtest_pass_fail_results['PASS'] # value
- tags = dict()
- if self.debug:
- print(datetime.utcfromtimestamp(max(times)))
- tags['testbed'] = testbeds[0]
- tags['script'] = 'GhostRequest'
- tags['Graph-Group'] = 'Subtest PASS'
- date = datetime.utcfromtimestamp(max(times)).isoformat()
- influxdb.post_to_influx(short_description, numeric_score, tags, date)
+ short_description = 'Subtests passed' # variable name
+ numeric_score = subtest_pass_fail_results['PASS'] # value
+ tags = dict()
+ if self.debug:
+ print(datetime.utcfromtimestamp(max(times)))
+ tags['testbed'] = testbeds[0]
+ tags['script'] = 'GhostRequest'
+ tags['Graph-Group'] = 'Subtest PASS'
+ date = datetime.utcfromtimestamp(max(times)).isoformat()
+ influxdb.post_to_influx(short_description, numeric_score, tags, date)
- short_description = 'Subtests failed' # variable name
- numeric_score = subtest_pass_fail_results['FAIL'] # value
- tags = dict()
- tags['testbed'] = testbeds[0]
- tags['script'] = 'GhostRequest'
- tags['Graph-Group'] = 'Subtest FAIL'
- date = datetime.utcfromtimestamp(max(times)).isoformat()
- influxdb.post_to_influx(short_description, numeric_score, tags, date)
+ short_description = 'Subtests failed' # variable name
+ numeric_score = subtest_pass_fail_results['FAIL'] # value
+ tags = dict()
+ tags['testbed'] = testbeds[0]
+ tags['script'] = 'GhostRequest'
+ tags['Graph-Group'] = 'Subtest FAIL'
+ date = datetime.utcfromtimestamp(max(times)).isoformat()
+ influxdb.post_to_influx(short_description, numeric_score, tags, date)
+ except Exception as err:
+ influx_error = err
+ text += ('
InfluxDB Error: %s
' % influx_error)
- text = 'Testbed: %s
' % testbeds[0]
raw_test_tags = list()
test_tag_table = ''
for tag in test_tag.values():
@@ -526,11 +530,10 @@ class GhostRequest:
'| Subtests passed | ' \
'%s |
' \
'| Subtests failed | ' \
- '%s |
' % (
+ '%s | ' \
+ '' % (
dut_table_columns, test_tag_table, test_pass_fail_results['PASS'],
test_pass_fail_results['FAIL'], subtest_pass_total, subtest_fail_total)
-
- dut_table = dut_table + ''
text = text + dut_table
for dictionary in web_pages_and_pdfs:
@@ -555,23 +558,27 @@ class GhostRequest:
)
if self.debug:
print('Test Tag: %s' % test_tag)
- grafana.create_custom_dashboard(target_csvs=target_files,
- title=title,
- datasource=grafana_datasource,
- bucket=grafana_bucket,
- from_date=start_time,
- to_date=end_time.strftime('%Y-%m-%d %H:%M:%S'),
- pass_fail='GhostRequest',
- testbed=testbeds[0],
- test_tag=test_tag)
- # get the details of the dashboard through the API, and set the end date to the youngest KPI
- grafana.list_dashboards()
+ try:
+ grafana.create_custom_dashboard(target_csvs=target_files,
+ title=title,
+ datasource=grafana_datasource,
+ bucket=grafana_bucket,
+ from_date=start_time,
+ to_date=end_time.strftime('%Y-%m-%d %H:%M:%S'),
+ pass_fail='GhostRequest',
+ testbed=testbeds[0],
+ test_tag=test_tag)
+ # get the details of the dashboard through the API, and set the end date to the youngest KPI
+ grafana.list_dashboards()
- grafana.create_snapshot(title='Testbed: ' + title)
- time.sleep(3)
- snapshot = grafana.list_snapshots()[-1]
- text = text + '
' % (
- grafana_host, snapshot['key'], '%')
+ grafana.create_snapshot(title='Testbed: ' + title)
+ time.sleep(3)
+ snapshot = grafana.list_snapshots()[-1]
+ text = text + '
' % (
+ grafana_host, snapshot['key'], '%')
+ except Exception as err:
+ grafana_error = err
+ text = text + 'Grafana Error: %s
' % grafana_error
text = text + 'Low priority results: %s' % csvreader.to_html(low_priority)
diff --git a/py-scripts/tools/lf_check.py b/py-scripts/tools/lf_check.py
index 89f47643..9bbc67b6 100755
--- a/py-scripts/tools/lf_check.py
+++ b/py-scripts/tools/lf_check.py
@@ -80,7 +80,7 @@ import csv
import shutil
from os import path
import shlex
-import paramiko
+import paramiko
import pandas as pd
# lf_report is from the parent of the current file
@@ -157,8 +157,8 @@ class lf_check():
self.email_txt = ""
# lanforge configuration
- self.lf_mgr_ip = "192.168.0.102"
- self.lf_mgr_port = ""
+ self.lf_mgr_ip = "192.168.0.102"
+ self.lf_mgr_port = ""
self.lf_mgr_user = "lanforge"
self.lf_mgr_pass = "lanforge"
@@ -168,9 +168,9 @@ class lf_check():
self.dut_sw = "DUT_SW_NOT_SET"
self.dut_model = "DUT_MODEL_NOT_SET"
self.dut_serial = "DUT_SERIAL_NOT_SET"
- self.dut_bssid_2g = "BSSID_2G_NOT_SET" #"3c:7c:3f:55:4d:64" - this is the mac for the 2.4G radio this may be seen with a scan
- self.dut_bssid_5g = "BSSID_5G_NOT_SET" #"3c:7c:3f:55:4d:64" - this is the mac for the 5G radio this may be seen with a scan
- self.dut_bssid_6g = "BSSID_6G_NOT_SET" #"3c:7c:3f:55:4d:64" - this is the mac for the 6G radio this may be seen with a scan
+ self.dut_bssid_2g = "BSSID_2G_NOT_SET" #"3c:7c:3f:55:4d:64" - this is the mac for the 2.4G radio this may be seen with a scan
+ self.dut_bssid_5g = "BSSID_5G_NOT_SET" #"3c:7c:3f:55:4d:64" - this is the mac for the 5G radio this may be seen with a scan
+ self.dut_bssid_6g = "BSSID_6G_NOT_SET" #"3c:7c:3f:55:4d:64" - this is the mac for the 6G radio this may be seen with a scan
#NOTE: My influx token is unlucky and starts with a '-', but using the syntax below # with '=' right after the argument keyword works as hoped.
# --influx_token=
@@ -220,7 +220,7 @@ class lf_check():
ssh = paramiko.SSHClient() # creating shh client object we use this object to connect to router
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # automatically adds the missing host key
#ssh.connect(self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass, banner_timeout=600)
- ssh.connect(hostname=self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass, banner_timeout=600)
+ ssh.connect(hostname=self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass, allow_agent=False, look_for_keys=False, banner_timeout=600)
stdin, stdout, stderr = ssh.exec_command('uname -n')
lanforge_node_version = stdout.readlines()
# print('\n'.join(output))
@@ -233,7 +233,7 @@ class lf_check():
ssh = paramiko.SSHClient() # creating shh client object we use this object to connect to router
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # automatically adds the missing host key
#ssh.connect(self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass, banner_timeout=600)
- ssh.connect(hostname=self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass, banner_timeout=600)
+ ssh.connect(hostname=self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass, allow_agent=False, look_for_keys=False, banner_timeout=600)
stdin, stdout, stderr = ssh.exec_command('uname -r')
lanforge_kernel_version = stdout.readlines()
# print('\n'.join(output))
@@ -246,7 +246,7 @@ class lf_check():
output = ""
ssh = paramiko.SSHClient() # creating shh client object we use this object to connect to router
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # automatically adds the missing host key
- ssh.connect(hostname=self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass, banner_timeout=600)
+ ssh.connect(hostname=self.lf_mgr_ip, port=22, username=self.lf_mgr_user, password=self.lf_mgr_pass, allow_agent=False, look_for_keys=False, banner_timeout=600)
stdin, stdout, stderr = ssh.exec_command('./btserver --version | grep Version')
lanforge_gui_version = stdout.readlines()
# print('\n'.join(output))
@@ -254,7 +254,7 @@ class lf_check():
ssh.close()
time.sleep(1)
return lanforge_gui_version
-
+
# NOT complete : will send the email results
def send_results_email(self, report_file=None):
@@ -315,7 +315,7 @@ blog: http://{blog}:2368
except subprocess.TimeoutExpired:
print("send email timed out")
process.terminate()
-
+
def get_csv_results(self):
return self.csv_file.name
@@ -323,7 +323,7 @@ blog: http://{blog}:2368
print("self.csv_results")
self.csv_results_file = open(self.csv_results, "w")
self.csv_results_writer = csv.writer(self.csv_results_file, delimiter=",")
- self.csv_results_column_headers = ['Test','Command','Result','STDOUT','STDERR']
+ self.csv_results_column_headers = ['Test','Command','Result','STDOUT','STDERR']
self.csv_results_writer.writerow(self.csv_results_column_headers)
self.csv_results_file.flush()
@@ -387,7 +387,7 @@ blog: http://{blog}:2368
self.read_test_database()
else:
self.logger.info("NOTE: test_database not found in json")
-
+
if "test_dashboard" in self.json_data:
self.logger.info("json: read test_dashboard")
#self.logger.info("test_dashboard {}".format(self.json_data["test_dashboard"]))
@@ -742,13 +742,13 @@ blog: http://{blog}:2368
if self.test_suite in config_file.sections():
section = config_file[self.test_suite]
# for json replace the \n and \r they are invalid json characters, allows for multiple line args
- try:
+ try:
self.test_dict = json.loads(section.get('TEST_DICT', self.test_dict).replace('\n',' ').replace('\r',' '))
self.logger.info("{}: {}".format(self.test_suite,self.test_dict))
except:
- self.logger.info("Excpetion loading {}, is there comma after the last entry? Check syntax".format(self.test_suite))
+ self.logger.info("Excpetion loading {}, is there comma after the last entry? Check syntax".format(self.test_suite))
else:
- self.logger.info("EXITING... NOT FOUND Test Suite with name : {}".format(self.test_suite))
+ self.logger.info("EXITING... NOT FOUND Test Suite with name : {}".format(self.test_suite))
exit(1)
def load_factory_default_db(self):
@@ -791,7 +791,7 @@ blog: http://{blog}:2368
errcode = process.returncode
def run_script_test(self):
- self.start_html_results()
+ self.start_html_results()
self.start_csv_results()
for test in self.test_dict:
@@ -912,7 +912,7 @@ blog: http://{blog}:2368
self.logger.info("timeout : {}".format(self.test_dict[test]['timeout']))
self.test_timeout = int(self.test_dict[test]['timeout'])
else:
- self.test_timeout = self.test_timeout_default
+ self.test_timeout = self.test_timeout_default
if 'load_db' in self.test_dict[test]:
self.logger.info("load_db : {}".format(self.test_dict[test]['load_db']))
@@ -921,7 +921,7 @@ blog: http://{blog}:2368
self.load_custom_db(self.test_dict[test]['load_db'])
except:
self.logger.info("custom database failed to load check existance and location: {}".format(self.test_dict[test]['load_db']))
- else:
+ else:
self.logger.info("no load_db present in dictionary, load db normally")
if self.use_factory_default_db == "TRUE":
self.load_factory_default_db()
@@ -958,7 +958,7 @@ blog: http://{blog}:2368
#self.logger.info("stdout_log_txt: {}".format(stdout_log_txt))
stdout_log = open(stdout_log_txt, 'a')
stderr_log_txt = self.outfile
- stderr_log_txt = stderr_log_txt + "-{}-stderr.txt".format(test)
+ stderr_log_txt = stderr_log_txt + "-{}-stderr.txt".format(test)
#self.logger.info("stderr_log_txt: {}".format(stderr_log_txt))
stderr_log = open(stderr_log_txt, 'a')
@@ -999,9 +999,15 @@ blog: http://{blog}:2368
# Ghost will put data in stderr
if('ghost' in command):
if(self.test_result != "TIMEOUT"):
- self.test_result = "Success"
- background = self.background_blue
-
+ text = open(stderr_log_txt).read()
+ if 'Error' in text:
+ self.test_result = "Failure"
+ background = self.background_red
+ else:
+ self.test_result = "Success"
+ background = self.background_blue
+
+
# stdout_log_link is used for the email reporting to have the corrected path
stdout_log_link = str(stdout_log_txt).replace('/home/lanforge','')
stderr_log_link = str(stderr_log_txt).replace('/home/lanforge','')
@@ -1015,7 +1021,7 @@ blog: http://{blog}:2368
self.html_results += """STDERR | """
else:
self.html_results += """ | """
- self.html_results += """"""
+ self.html_results += """"""
row = [test,command,self.test_result,stdout_log_txt,stderr_log_txt]
self.csv_results_writer.writerow(row)
@@ -1025,7 +1031,7 @@ blog: http://{blog}:2368
else:
self.logger.info("enable value {} invalid for test: {}, test skipped".format(self.test_dict[test]['enabled'],test))
- self.finish_html_results()
+ self.finish_html_results()
def main():
# arguments
@@ -1057,7 +1063,7 @@ Example :
parser.add_argument('--outfile', help="--outfile