diff --git a/ap_ctl.py b/ap_ctl.py
index 0a67ea04..c562b302 100755
--- a/ap_ctl.py
+++ b/ap_ctl.py
@@ -79,7 +79,7 @@ def usage():
print("-s|--scheme (serial|telnet|ssh): connect to controller via serial, ssh or telnet")
print("--tty Serial port for accessing AP")
print("-l|--log file: log messages here")
- print("-b|--band: a (5Ghz) or b (2.4Ghz) or abgn for dual-band 2.4Ghz AP")
+ print("-b|--baud: serial baud rate")
print("-z|--action: action")
print("-h|--help")
diff --git a/influxgrafanaghost_fedora_install.sh b/influxgrafanaghost_fedora_install.sh
new file mode 100755
index 00000000..62a9def3
--- /dev/null
+++ b/influxgrafanaghost_fedora_install.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+# This bash script installs Influx, Grafana, and Ghost on Fedora or CentOS.
+# Run this script as a normal user with sudo access.
+# You need to provide your username at the beginning of the script.
+# There are a few fields you will need to enter when this installs Ghost, and you will be prompted by the script.
+# Many scripts in this library are built around Influx, Grafana, and Ghost. Influx is a time series database,
+# Grafana has dashboards which display the data stored in Influx,
+# and Ghost is a blogging platform which creates an easy way for a user to view automated reports which are built using LANforge scripts
+# Once a user uses this script, the user can use those features with the credentials for the system this script sets up.
+
+# After running this script, Grafana is at port 3000, Influx is at port 8086, and Ghost is at port 2368
+# The user will need to login to those through a web browser to create login credentials, and find API tokens.
+# These API tokens are needed to run many scripts in LANforge scripts with these three programs.
+
+echo Type in your username here
+read -r USER
+
+#Influx installation
+wget https://dl.influxdata.com/influxdb/releases/influxdb2-2.0.4.x86_64.rpm
+sudo yum localinstall influxdb2-2.0.4.x86_64.rpm
+sudo service influxdb start
+sudo service influxdb enable
+
+#Grafana installation
+wget https://dl.grafana.com/oss/release/grafana-7.5.3-1.x86_64.rpm
+sudo yum localinstall grafana-7.5.3-1.x86_64.rpm -y
+sudo systemctl start grafana-server
+sudo systemctl enable grafana-server
+
+#Ghost installation
+sudo adduser ghost
+sudo usermod -aG sudo ghost
+sudo ufw allow 'Nginx Full'
+curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash
+sudo npm install ghost-cli@latest -g
+# Ensure that NPM is up to date
+npm cache verify
+sudo npm install -g n
+sudo n stable
+curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash
+npm install ghost-cli@latest -g
+sudo mkdir -p /var/www/ghostsite
+sudo chown ${USER}:${USER} /var/www/ghostsite
+sudo chmod 775 /var/www/ghostsite
+cd /var/www/ghostsite
+ghost install local
\ No newline at end of file
diff --git a/influxgrafanaghost_ubuntu_install.sh b/influxgrafanaghost_ubuntu_install.sh
new file mode 100755
index 00000000..5220db80
--- /dev/null
+++ b/influxgrafanaghost_ubuntu_install.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+#This script installs Influx, Grafana, and Ghost on Ubuntu.
+#Run this script as a normal user with sudo access.
+#You need to provide your username at the beginning of the script.
+#There are a few fields you will need to enter when it is installing Ghost, and you will be prompted by the script.
+#Lanforge scripts is built around Influx, Grafana, and Ghost. Influx is a time series database,
+#Grafana has dashboards which display the data stored in Influx,
+#and Ghost is a blogging platform which creates an easy way for a user to view automated reports which are built using LANforge scripts
+#Once a user uses this script, the user can use those features with the credentials for the system this script sets up.
+
+#After running this script, Grafana is accessible through port 3000, Influx is at port 8086, and Ghost is accessible at 2368
+#The user will need to login to those through a web browser to create login credentials, and find API tokens.
+#These API tokens are needed to run many scripts in LANforge scripts with the functionality these three programs provide.
+
+#Update necessary parts of system
+echo Type in your username here
+read -r USER
+
+sudo apt-get update && sudo apt-get upgrade -y
+sudo apt-get install nginx mysql-server nodejs npm -y
+
+#Influx installation
+wget https://dl.influxdata.com/influxdb/releases/influxdb2-2.0.7-amd64.deb
+sudo dpkg -i influxdb2-2.0.7-amd64.deb
+sudo systemctl unmask influxdb
+sudo systemctl start influxdb
+sudo systemctl enable influxdb
+
+#Grafana installation
+sudo apt-get install -y adduser libfontconfig1
+wget https://dl.grafana.com/oss/release/grafana_8.0.5_amd64.deb
+sudo dpkg -i grafana_8.0.5_amd64.deb
+sudo systemctl start grafana-server
+sudo systemctl enable grafana-server
+
+#Ghost installation
+sudo adduser ghost
+sudo usermod -aG sudo ghost
+sudo ufw allow 'Nginx Full'
+curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash
+sudo npm install ghost-cli@latest -g
+# Ensure that NPM is up to date
+npm cache verify
+sudo npm install -g n
+sudo n stable
+curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash
+npm install ghost-cli@latest -g
+sudo mkdir -p /var/www/ghostsite
+sudo chown ${USER}:${USER} /var/www/ghostsite
+sudo chmod 775 /var/www/ghostsite
+cd /var/www/ghostsite
+ghost install local
\ No newline at end of file
diff --git a/py-dashboard/GhostRequest.py b/py-dashboard/GhostRequest.py
index aa593cdc..23f08522 100644
--- a/py-dashboard/GhostRequest.py
+++ b/py-dashboard/GhostRequest.py
@@ -3,7 +3,7 @@
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Class holds default settings for json requests to Ghost -
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-import ast
+
import os
import sys
@@ -14,18 +14,23 @@ if sys.version_info[0] != 3:
import requests
import jwt
-from datetime import datetime as date
+from datetime import datetime
+from dateutil import tz
import json
import subprocess
from scp import SCPClient
import paramiko
from GrafanaRequest import GrafanaRequest
+from influx2 import RecordInflux
+import time
+from collections import Counter
+import shutil
class CSVReader:
def read_csv(self,
file,
- sep=','):
+ sep='\t'):
df = open(file).read().split('\n')
rows = list()
for x in df:
@@ -44,6 +49,70 @@ class CSVReader:
values.append(row[index])
return values
+ def get_columns(self, df, targets):
+ target_index = []
+ for item in targets:
+ target_index.append(df[0].index(item))
+ results = []
+ for row in df:
+ row_data = []
+ for x in target_index:
+ row_data.append(row[x])
+ results.append(row_data)
+ return results
+
+ def to_html(self, df):
+ html = ''
+ html = html + ('
'
+ ''
+ ' '
+ ' '
+ ' '
+ ' '
+ ''
+ '')
+ for row in df:
+ for item in row:
+ html = html + ('%s ' % item)
+ html = html + (' \n')
+ html = html + (' '
+ '
')
+ return html
+
+ def filter_df(self, df, column, expression, target):
+ target_index = df[0].index(column)
+ counter = 0
+ targets = [0]
+ for row in df:
+ try:
+ if expression == 'less than':
+ if float(row[target_index]) < target:
+ targets.append(counter)
+ counter += 1
+ else:
+ counter += 1
+ if expression == 'greater than':
+ if float(row[target_index]) > target:
+ targets.append(counter)
+ counter += 1
+ else:
+ counter += 1
+ if expression == 'greater than or equal to':
+ if float(row[target_index]) >= target:
+ targets.append(counter)
+ counter += 1
+ else:
+ counter += 1
+ except:
+ counter += 1
+ return list(map(df.__getitem__, targets))
+
+ def concat(self, dfs):
+ final_df = dfs[0]
+ for df in dfs[1:]:
+ final_df = final_df + df[1:]
+ return final_df
+
class GhostRequest:
def __init__(self,
@@ -52,7 +121,12 @@ class GhostRequest:
_api_token=None,
_overwrite='false',
debug_=False,
- die_on_error_=False):
+ die_on_error_=False,
+ influx_host=None,
+ influx_port=8086,
+ influx_org=None,
+ influx_token=None,
+ influx_bucket=None):
self.debug = debug_
self.die_on_error = die_on_error_
self.ghost_json_host = _ghost_json_host
@@ -64,6 +138,11 @@ class GhostRequest:
self.api_token = _api_token
self.images = list()
self.pdfs = list()
+ self.influx_host = influx_host
+ self.influx_port = influx_port
+ self.influx_org = influx_org
+ self.influx_token = influx_token
+ self.influx_bucket = influx_bucket
def encode_token(self):
@@ -71,7 +150,7 @@ class GhostRequest:
key_id, secret = self.api_token.split(':')
# Prepare header and payload
- iat = int(date.now().timestamp())
+ iat = int(datetime.now().timestamp())
header = {'alg': 'HS256', 'typ': 'JWT', 'kid': key_id}
payload = {
@@ -147,111 +226,265 @@ class GhostRequest:
tags='custom',
authors=authors)
- def wifi_capacity_to_ghost(self,
- authors,
- folders,
- title=None,
- server_pull=None,
- ghost_host=None,
- port='22',
- user_pull='lanforge',
- password_pull='lanforge',
- user_push=None,
- password_push=None,
- customer=None,
- testbed='Unknown Testbed',
- test_run=None,
- target_folders=list(),
- grafana_dashboard=None,
- grafana_token=None,
- grafana_host=None,
- grafana_port=3000):
+ def list_append(self, list_1, value):
+ list_1.append(value)
+
+ def kpi_to_ghost(self,
+ authors,
+ folders,
+ parent_folder=None,
+ title=None,
+ server_pull=None,
+ ghost_host=None,
+ port=22,
+ user_push=None,
+ password_push=None,
+ customer=None,
+ testbed=None,
+ test_run=None,
+ target_folders=list(),
+ grafana_token=None,
+ grafana_host=None,
+ grafana_port=3000,
+ grafana_datasource='InfluxDB',
+ grafana_bucket=None):
+ global dut_hw, dut_sw, dut_model, dut_serial
+
+ now = datetime.now()
+
text = ''
csvreader = CSVReader()
- if test_run is None:
- test_run = sorted(folders)[0].split('/')[-1].strip('/')
- for folder in folders:
- print(folder)
- ssh_pull = paramiko.SSHClient()
- ssh_pull.set_missing_host_key_policy(paramiko.client.AutoAddPolicy)
- ssh_pull.connect(server_pull,
- port,
- username=user_pull,
- password=password_pull,
- allow_agent=False,
- look_for_keys=False)
- scp_pull = SCPClient(ssh_pull.get_transport())
- scp_pull.get(folder, recursive=True)
- target_folder = str(folder).rstrip('/').split('/')[-1]
- target_folders.append(target_folder)
- print(target_folder)
+ if grafana_token is not None:
+ grafana = GrafanaRequest(grafana_token,
+ grafana_host,
+ grafanajson_port=grafana_port
+ )
+ if self.debug:
+ print('Folders: %s' % folders)
+
+ ssh_push = paramiko.SSHClient()
+ ssh_push.set_missing_host_key_policy(paramiko.client.AutoAddPolicy)
+ ssh_push.connect(ghost_host,
+ port,
+ username=user_push,
+ password=password_push,
+ allow_agent=False,
+ look_for_keys=False)
+ scp_push = SCPClient(ssh_push.get_transport())
+
+ if parent_folder is not None:
+ print("parent_folder %s" % parent_folder)
+ files = os.listdir(parent_folder)
+ print(files)
+ for file in files:
+ if os.path.isdir(parent_folder + '/' + file) is True:
+ if os.path.exists(file):
+ shutil.rmtree(file)
+ shutil.copytree(parent_folder + '/' + file, file)
+ target_folders.append(file)
+ print('Target folders: %s' % target_folders)
+ else:
+ for folder in folders:
+ if self.debug:
+ print(folder)
+ target_folders.append(folder)
+
+ testbeds = list()
+ pdfs = list()
+ high_priority_list = list()
+ low_priority_list = list()
+ images = list()
+ times = list()
+ test_pass_fail = list()
+
+ for target_folder in target_folders:
try:
target_file = '%s/kpi.csv' % target_folder
- print('target file %s' % target_file)
df = csvreader.read_csv(file=target_file, sep='\t')
- csv_testbed = csvreader.get_column(df, 'test-rig')[0]
- print(csv_testbed)
- except:
- pass
- if len(csv_testbed) > 2:
- testbed = csv_testbed
- text = text + 'Testbed: %s ' % testbed
- if testbed == 'Unknown Testbed':
- raise UserWarning('Please define your testbed')
- print('testbed %s' % testbed)
+ test_rig = csvreader.get_column(df, 'test-rig')[0]
+ pass_fail = Counter(csvreader.get_column(df, 'pass/fail'))
+ test_pass_fail.append(pass_fail)
+ dut_hw = csvreader.get_column(df, 'dut-hw-version')[0]
+ dut_sw = csvreader.get_column(df, 'dut-sw-version')[0]
+ dut_model = csvreader.get_column(df, 'dut-model-num')[0]
+ dut_serial = csvreader.get_column(df, 'dut-serial-num')[0]
+ duts = [dut_serial, dut_hw, dut_sw, dut_model, test_rig]
+ times_append = csvreader.get_column(df, 'Date')
+ for target_time in times_append:
+ times.append(float(target_time) / 1000)
+ if pass_fail['PASS'] + pass_fail['FAIL'] > 0:
+ text = text + 'Tests passed: %s ' % pass_fail['PASS']
+ text = text + 'Tests failed: %s ' % pass_fail['FAIL']
+ text = text + 'Percentage of tests passed: %s ' % (
+ pass_fail['PASS'] / (pass_fail['PASS'] + pass_fail['FAIL']))
+ else:
+ text = text + 'Tests passed: 0 ' \
+ 'Tests failed : 0 ' \
+ 'Percentage of tests passed: Not Applicable '
+
+ except:
+ print("Failure")
+ target_folders.remove(target_folder)
+ break
+ testbeds.append(test_rig)
+ if testbed is None:
+ testbed = test_rig
+
+ if test_run is None:
+ test_run = now.strftime('%B-%d-%Y-%I-%M-%p-report')
- ssh_push = paramiko.SSHClient()
- ssh_push.set_missing_host_key_policy(paramiko.client.AutoAddPolicy)
- ssh_push.connect(ghost_host,
- port,
- username=user_push,
- password=password_push,
- allow_agent=False,
- look_for_keys=False)
- scp_push = SCPClient(ssh_push.get_transport())
local_path = '/home/%s/%s/%s/%s' % (user_push, customer, testbed, test_run)
- transport = paramiko.Transport((ghost_host, port))
+
+ transport = paramiko.Transport(ghost_host, port)
transport.connect(None, user_push, password_push)
sftp = paramiko.sftp_client.SFTPClient.from_transport(transport)
- print(local_path)
+
+ if self.debug:
+ print(local_path)
+ print(target_folder)
+
+ try:
+ sftp.mkdir('/home/%s/%s/%s' % (user_push, customer, testbed))
+ except:
+ pass
+
try:
sftp.mkdir(local_path)
except:
- print('folder %s already exists' % local_path)
- scp_push.put(target_folder, recursive=True, remote_path=local_path)
+ pass
+ scp_push.put(target_folder, local_path, recursive=True)
files = sftp.listdir(local_path + '/' + target_folder)
- # print('Files: %s' % files)
for file in files:
if 'pdf' in file:
url = 'http://%s/%s/%s/%s/%s/%s' % (
ghost_host, customer.strip('/'), testbed, test_run, target_folder, file)
- text = text + 'PDF of results: %s ' % (url, file)
- print(url)
- scp_pull.close()
+ pdfs.append('PDF of results: %s ' % (url, file))
scp_push.close()
self.upload_images(target_folder)
for image in self.images:
if 'kpi-' in image:
if '-print' not in image:
- text = text + ' ' % image
+ images.append(' ' % image)
self.images = []
- if grafana_token is not None:
- GR = GrafanaRequest(grafana_token,
- grafana_host,
- grafanajson_port=grafana_port
- )
- GR.create_snapshot(title=grafana_dashboard)
- snapshot = GR.list_snapshots()[-1]
- text = text + '' % (snapshot['externalUrl'], '%')
+ results = csvreader.get_columns(df, ['short-description', 'numeric-score', 'test details', 'pass/fail',
+ 'test-priority'])
- now = date.now()
+ results[0] = ['Short Description', 'Score', 'Test Details', 'Pass or Fail', 'test-priority']
+
+ low_priority = csvreader.filter_df(results, 'test-priority', 'less than', 94)
+ high_priority = csvreader.filter_df(results, 'test-priority', 'greater than or equal to', 95)
+ high_priority_list.append(high_priority)
+
+ low_priority_list.append(low_priority)
+
+
+ test_pass_fail_results = sum((Counter(test) for test in test_pass_fail), Counter())
+
+ end_time = max(times)
+ start_time = '2021-07-01'
+ end_time = datetime.utcfromtimestamp(end_time)#.strftime('%Y-%m-%d %H:%M:%S')
+ now = time.time()
+ offset = datetime.fromtimestamp(now) - datetime.utcfromtimestamp(now)
+ end_time = end_time + offset
+
+ high_priority = csvreader.concat(high_priority_list)
+ low_priority = csvreader.concat(low_priority_list)
+
+ high_priority = csvreader.get_columns(high_priority,
+ ['Short Description', 'Score', 'Test Details'])
+ low_priority = csvreader.get_columns(low_priority,
+ ['Short Description', 'Score', 'Test Details'])
+ high_priority.append(['Total Passed', test_pass_fail_results['PASS'], 'Total subtests passed during this run'])
+ high_priority.append(['Total Failed', test_pass_fail_results['FAIL'], 'Total subtests failed during this run'])
if title is None:
- title = "%s %s %s %s:%s report" % (now.day, now.month, now.year, now.hour, now.minute)
+ title = end_time.strftime('%B %d, %Y %I:%M %p report')
- if grafana_dashboard is not None:
- pass
+ # create Grafana Dashboard
+ target_files = []
+ for folder in target_folders:
+ target_files.append(folder.split('/')[-1] + '/kpi.csv')
+ if self.debug:
+ print('Target files: %s' % target_files)
+ grafana.create_custom_dashboard(target_csvs=target_files,
+ title=title,
+ datasource=grafana_datasource,
+ bucket=grafana_bucket,
+ from_date=start_time,
+ to_date=end_time.strftime('%Y-%m-%d %H:%M:%S'),
+ pass_fail='GhostRequest',
+ testbed=testbeds[0])
+
+ if self.influx_token is not None:
+ influxdb = RecordInflux(_influx_host=self.influx_host,
+ _influx_port=self.influx_port,
+ _influx_org=self.influx_org,
+ _influx_token=self.influx_token,
+ _influx_bucket=self.influx_bucket)
+ short_description = 'Ghost Post Tests passed' # variable name
+ numeric_score = test_pass_fail_results['PASS'] # value
+ tags = dict()
+ print(datetime.utcfromtimestamp(max(times)))
+ tags['testbed'] = testbeds[0]
+ tags['script'] = 'GhostRequest'
+ tags['Graph-Group'] = 'PASS'
+ date = datetime.utcfromtimestamp(max(times)).isoformat()
+ influxdb.post_to_influx(short_description, numeric_score, tags, date)
+
+ short_description = 'Ghost Post Tests failed' # variable name
+ numeric_score = test_pass_fail_results['FAIL'] # value
+ tags = dict()
+ tags['testbed'] = testbeds[0]
+ tags['script'] = 'GhostRequest'
+ tags['Graph-Group'] = 'FAIL'
+ date = datetime.utcfromtimestamp(max(times)).isoformat()
+ influxdb.post_to_influx(short_description, numeric_score, tags, date)
+
+ text = 'Testbed: %s ' % testbeds[0]
+ dut_table = '' \
+ 'Test Information ' \
+ 'Testbed ' \
+ '%s ' \
+ 'DUT_HW ' \
+ '%s ' \
+ 'DUT_SW ' \
+ '%s ' \
+ 'DUT model ' \
+ '%s ' \
+ 'DUT Serial ' \
+ '%s ' \
+ 'Tests passed ' \
+ '%s ' \
+ 'Tests failed ' \
+ '%s ' % (
+ duts[4], duts[1], duts[2], duts[3], duts[0], test_pass_fail_results['PASS'],
+ test_pass_fail_results['FAIL'])
+
+ dut_table = dut_table + '
'
+ text = text + dut_table
+
+ for pdf in pdfs:
+ print(pdf)
+ text = text + pdf
+
+ for image in images:
+ text = text + image
+
+ text = text + 'High priority results: %s' % csvreader.to_html(high_priority)
+
+ if grafana_token is not None:
+ # get the details of the dashboard through the API, and set the end date to the youngest KPI
+ grafana.list_dashboards()
+
+ grafana.create_snapshot(title='Testbed: ' + title)
+ time.sleep(3)
+ snapshot = grafana.list_snapshots()[-1]
+ text = text + ' ' % (
+ grafana_host, snapshot['key'], '%')
+
+ text = text + 'Low priority results: %s' % csvreader.to_html(low_priority)
self.create_post(title=title,
text=text,
diff --git a/py-dashboard/GrafanaRequest.py b/py-dashboard/GrafanaRequest.py
index fc585ab6..a6349938 100644
--- a/py-dashboard/GrafanaRequest.py
+++ b/py-dashboard/GrafanaRequest.py
@@ -12,6 +12,35 @@ if sys.version_info[0] != 3:
import requests
import json
+import string
+import random
+
+
+class CSVReader:
+ def __init__(self):
+ self.shape = None
+
+ def read_csv(self,
+ file,
+ sep='\t'):
+ df = open(file).read().split('\n')
+ rows = list()
+ for x in df:
+ if len(x) > 0:
+ rows.append(x.split(sep))
+ length = list(range(0, len(df[0])))
+ columns = dict(zip(df[0], length))
+ self.shape = (length, columns)
+ return rows
+
+ def get_column(self,
+ df,
+ value):
+ index = df[0].index(value)
+ values = []
+ for row in df[1:]:
+ values.append(row[index])
+ return values
class GrafanaRequest:
@@ -35,6 +64,8 @@ class GrafanaRequest:
self.grafanajson_url = "http://%s:%s" % (_grafanajson_host, grafanajson_port)
self.data = dict()
self.data['overwrite'] = _overwrite
+ self.csvreader = CSVReader()
+ self.units = dict()
def create_bucket(self,
bucket_name=None):
@@ -45,7 +76,7 @@ class GrafanaRequest:
def list_dashboards(self):
url = self.grafanajson_url + '/api/search'
print(url)
- return json.loads(requests.get(url,headers=self.headers).text)
+ return json.loads(requests.get(url, headers=self.headers).text)
def create_dashboard(self,
dashboard_name=None,
@@ -77,32 +108,277 @@ class GrafanaRequest:
datastore['dashboard'] = dashboard
datastore['overwrite'] = False
data = json.dumps(datastore, indent=4)
- #return print(data)
return requests.post(grafanajson_url, headers=self.headers, data=data, verify=False)
def create_dashboard_from_dict(self,
- dictionary=None):
+ dictionary=None,
+ overwrite=False):
grafanajson_url = self.grafanajson_url + '/api/dashboards/db'
datastore = dict()
dashboard = dict(json.loads(dictionary))
datastore['dashboard'] = dashboard
- datastore['overwrite'] = False
+ datastore['overwrite'] = overwrite
data = json.dumps(datastore, indent=4)
- #return print(data)
return requests.post(grafanajson_url, headers=self.headers, data=data, verify=False)
+ def get_graph_groups(self, target_csvs): # Get the unique values in the Graph-Group column
+ dictionary = dict()
+ for target_csv in target_csvs:
+ if len(target_csv) > 1:
+ csv = self.csvreader.read_csv(target_csv)
+ # Unique values in the test-id column
+ scripts = list(set(self.csvreader.get_column(csv, 'test-id')))
+ # we need to make sure we match each Graph Group to the script it occurs in
+ for script in scripts:
+ # Unique Graph Groups for each script
+ graph_groups = self.csvreader.get_column(csv, 'Graph-Group')
+ dictionary[script] = list(set(graph_groups))
+ units = self.csvreader.get_column(csv, 'Units')
+ self.units[script] = dict()
+ for index in range(0, len(graph_groups)):
+ self.units[script][graph_groups[index]] = units[index]
+ print(dictionary)
+ return dictionary
+
+ def maketargets(self,
+ bucket,
+ scriptname,
+ groupBy,
+ index,
+ graph_group,
+ testbed):
+ query = (
+ 'from(bucket: "%s")\n '
+ '|> range(start: v.timeRangeStart, stop: v.timeRangeStop)\n '
+ '|> filter(fn: (r) => r["script"] == "%s")\n '
+ '|> group(columns: ["_measurement"])\n '
+ % (bucket, scriptname))
+ queryend = ('|> aggregateWindow(every: v.windowPeriod, fn: mean, createEmpty: false)\n '
+ '|> yield(name: "mean")\n ')
+ if graph_group is not None:
+ graphgroup = ('|> filter(fn: (r) => r["Graph-Group"] == "%s")\n' % graph_group)
+ query += graphgroup
+ if testbed is not None:
+ query += ('|> filter(fn: (r) => r["testbed"] == "%s")\n' % testbed)
+ targets = dict()
+ targets['delimiter'] = ','
+ targets['groupBy'] = groupBy
+ targets['header'] = True
+ targets['ignoreUnknown'] = False
+ targets['orderByTime'] = 'ASC'
+ targets['policy'] = 'default'
+ targets['query'] = query + queryend
+ targets['refId'] = dict(enumerate(string.ascii_uppercase, 1))[index + 1]
+ targets['resultFormat'] = "time_series"
+ targets['schema'] = list()
+ targets['skipRows'] = 0
+ targets['tags'] = list()
+ return targets
+
+ def groupby(self, params, grouptype):
+ dic = dict()
+ dic['params'] = list()
+ dic['params'].append(params)
+ dic['type'] = grouptype
+ return dic
def create_custom_dashboard(self,
- datastore=None):
- data = json.dumps(datastore, indent=4)
- return requests.post(self.grafanajson_url, headers=self.headers, data=data, verify=False)
+ scripts=None,
+ title=None,
+ bucket=None,
+ graph_groups=None,
+ graph_groups_file=None,
+ target_csvs=None,
+ testbed=None,
+ datasource='InfluxDB',
+ from_date='now-1y',
+ to_date='now',
+ graph_height=8,
+ graph__width=12,
+ pass_fail=None):
+ options = string.ascii_lowercase + string.ascii_uppercase + string.digits
+ uid = ''.join(random.choice(options) for i in range(9))
+ input1 = dict()
+ annotations = dict()
+ annotations['builtIn'] = 1
+ annotations['datasource'] = '-- Grafana --'
+ annotations['enable'] = True
+ annotations['hide'] = True
+ annotations['iconColor'] = 'rgba(0, 211, 255, 1)'
+ annotations['name'] = 'Annotations & Alerts'
+ annotations['type'] = 'dashboard'
+ annot = dict()
+ annot['list'] = list()
+ annot['list'].append(annotations)
+
+ templating = dict()
+ templating['list'] = list()
+
+ timedict = dict()
+ timedict['from'] = from_date
+ timedict['to'] = to_date
+
+ panels = list()
+ index = 1
+ if graph_groups_file:
+ print("graph_groups_file: %s" % graph_groups_file)
+ target_csvs = open(graph_groups_file).read().split('\n')
+ graph_groups = self.get_graph_groups(
+ target_csvs) # Get the list of graph groups which are in the tests we ran
+ if target_csvs:
+ print('Target CSVs: %s' % target_csvs)
+ graph_groups = self.get_graph_groups(
+ target_csvs) # Get the list of graph groups which are in the tests we ran
+ if pass_fail is not None:
+ graph_groups[pass_fail] = ['PASS', 'FAIL']
+
+ for scriptname in graph_groups.keys():
+ print(scriptname)
+ for graph_group in graph_groups[scriptname]:
+ panel = dict()
+
+ gridpos = dict()
+ gridpos['h'] = graph_height
+ gridpos['w'] = graph__width
+ gridpos['x'] = 0
+ gridpos['y'] = 0
+
+ legend = dict()
+ legend['avg'] = False
+ legend['current'] = False
+ legend['max'] = False
+ legend['min'] = False
+ legend['show'] = True
+ legend['total'] = False
+ legend['values'] = False
+
+ options = dict()
+ options['alertThreshold'] = True
+
+ groupBy = list()
+ groupBy.append(self.groupby('$__interval', 'time'))
+ groupBy.append(self.groupby('null', 'fill'))
+
+ targets = list()
+ counter = 0
+ new_target = self.maketargets(bucket, scriptname, groupBy, counter, graph_group, testbed)
+ targets.append(new_target)
+
+ fieldConfig = dict()
+ fieldConfig['defaults'] = dict()
+ fieldConfig['overrides'] = list()
+
+ transformation = dict()
+ transformation['id'] = "renameByRegex"
+ transformation_options = dict()
+ transformation_options['regex'] = "(.*) value.*"
+ transformation_options['renamePattern'] = "$1"
+ transformation['options'] = transformation_options
+
+ xaxis = dict()
+ xaxis['buckets'] = None
+ xaxis['mode'] = "time"
+ xaxis['name'] = None
+ xaxis['show'] = True
+ xaxis['values'] = list()
+
+ yaxis = dict()
+ yaxis['format'] = 'short'
+ try:
+ yaxis['label'] = self.units[scriptname][graph_group]
+ except:
+ pass
+ yaxis['logBase'] = 1
+ yaxis['max'] = None
+ yaxis['min'] = None
+ yaxis['show'] = True
+
+ yaxis1 = dict()
+ yaxis1['align'] = False
+ yaxis1['alignLevel'] = None
+
+ panel['aliasColors'] = dict()
+ panel['bars'] = False
+ panel['dashes'] = False
+ panel['dashLength'] = 10
+ panel['datasource'] = datasource
+ panel['fieldConfig'] = fieldConfig
+ panel['fill'] = 0
+ panel['fillGradient'] = 0
+ panel['gridPos'] = gridpos
+ panel['hiddenSeries'] = False
+ panel['id'] = index
+ panel['legend'] = legend
+ panel['lines'] = True
+ panel['linewidth'] = 1
+ panel['nullPointMode'] = 'null'
+ panel['options'] = options
+ panel['percentage'] = False
+ panel['pluginVersion'] = '7.5.4'
+ panel['pointradius'] = 2
+ panel['points'] = True
+ panel['renderer'] = 'flot'
+ panel['seriesOverrides'] = list()
+ panel['spaceLength'] = 10
+ panel['stack'] = False
+ panel['steppedLine'] = False
+ panel['targets'] = targets
+ panel['thresholds'] = list()
+ panel['timeFrom'] = None
+ panel['timeRegions'] = list()
+ panel['timeShift'] = None
+ if graph_group is not None:
+ panel['title'] = scriptname + ' ' + graph_group
+ else:
+ panel['title'] = scriptname
+ if 'PASS' in panel['title']:
+ panel['title'] = 'Total Passed'
+ if 'FAIL' in panel['title']:
+ panel['title'] = 'Total Failed'
+ panel['transformations'] = list()
+ panel['transformations'].append(transformation)
+ panel['type'] = "graph"
+ panel['xaxis'] = xaxis
+ panel['yaxes'] = list()
+ panel['yaxes'].append(yaxis)
+ panel['yaxes'].append(yaxis)
+ panel['yaxis'] = yaxis1
+
+ panels.append(panel)
+ index = index + 1
+ input1['annotations'] = annot
+ input1['editable'] = True
+ input1['gnetId'] = None
+ input1['graphTooltip'] = 0
+ input1['links'] = list()
+ input1['panels'] = panels
+ input1['refresh'] = False
+ input1['schemaVersion'] = 27
+ input1['style'] = 'dark'
+ input1['tags'] = list()
+ input1['templating'] = templating
+ input1['time'] = timedict
+ input1['timepicker'] = dict()
+ input1['timezone'] = ''
+ input1['title'] = ("Testbed: %s" % title)
+ input1['uid'] = uid
+ input1['version'] = 11
+ return self.create_dashboard_from_dict(dictionary=json.dumps(input1))
+
+ # def create_custom_dashboard(self,
+ # datastore=None):
+ # data = json.dumps(datastore, indent=4)
+ # return requests.post(self.grafanajson_url, headers=self.headers, data=data, verify=False)
def create_snapshot(self, title):
+ print('create snapshot')
grafanajson_url = self.grafanajson_url + '/api/snapshots'
- data=self.get_dashboard(title)
- data['expires'] = 3600
- data['external'] = True
- print(data)
+ data = self.get_dashboard(title)
+ data['expires'] = 360000
+ data['external'] = False
+ data['timeout'] = 15
+ if self.debug:
+ print(data)
return requests.post(grafanajson_url, headers=self.headers, json=data, verify=False).text
def list_snapshots(self):
@@ -112,9 +388,21 @@ class GrafanaRequest:
def get_dashboard(self, target):
dashboards = self.list_dashboards()
+ print(target)
for dashboard in dashboards:
if dashboard['title'] == target:
uid = dashboard['uid']
grafanajson_url = self.grafanajson_url + '/api/dashboards/uid/' + uid
print(grafanajson_url)
- return json.loads(requests.get(grafanajson_url, headers=self.headers, verify=False).text)
\ No newline at end of file
+ return json.loads(requests.get(grafanajson_url, headers=self.headers, verify=False).text)
+
+ def get_units(self, csv):
+ df = self.csvreader.read_csv(csv)
+ units = self.csvreader.get_column(df, 'Units')
+ test_id = self.csvreader.get_column(df, 'test-id')
+ maxunit = max(set(units), key=units.count)
+ maxtest = max(set(test_id), key=test_id.count)
+ d = dict()
+ d[maxunit] = maxtest
+ print(maxunit, maxtest)
+ return d
diff --git a/py-json/LANforge/LFUtils.py b/py-json/LANforge/LFUtils.py
index 464c1e21..7005425e 100644
--- a/py-json/LANforge/LFUtils.py
+++ b/py-json/LANforge/LFUtils.py
@@ -746,4 +746,58 @@ def exec_wrap(cmd):
print("\nError with '" + cmd + "', bye\n")
exit(1)
+
+def expand_endp_histogram(distribution_payload=None):
+ """
+ Layer 3 endpoints can contain DistributionPayloads that appear like
+ "rx-silence-5m" : {
+ # "histo_category_width" : 1,
+ # "histogram" : [
+ # 221,
+ # 113,
+ # 266,
+ # 615,
+ # 16309,
+ # 56853,
+ # 7954,
+ # 1894,
+ # 29246,
+ # 118,
+ # 12,
+ # 2,
+ # 0,
+ # 0,
+ # 0,
+ # 0
+ # ],
+ # "time window ms" : 300000,
+ # "window avg" : 210.285,
+ # "window max" : 228,
+ # "window min" : 193
+
+ These histogbrams are a set of linear categorys roughly power-of-two categories.
+ :param distribution_payload: dictionary requiring histo_category_width and histogram
+ :return: dictionary containing expanded category ranges and values for categories
+ """
+ if distribution_payload is None:
+ return None
+ if ("histogram" not in distribution_payload) \
+ or ("histo_category_width" not in distribution_payload):
+ raise ValueError("Unexpected histogram format.")
+ multiplier = int(distribution_payload["histo_category_width"])
+ formatted_dict = {
+ #"00000 <= x <= 00001" : "0"
+ }
+ for bucket_index in range(len(distribution_payload["histogram"]) - 1):
+ pow1 = (2**bucket_index) * multiplier
+ pow2 = (2**(bucket_index+1)) * multiplier
+ if bucket_index == 0:
+ category_name = "00000 <= x <= {:-05.0f}".format(pow2)
+ else:
+ category_name = "{:-05.0f} < x <= {:-05.0f}".format(pow1, pow2)
+ formatted_dict[category_name] = distribution_payload["histogram"][bucket_index]
+
+ pprint.pprint([("historgram", distribution_payload["histogram"]),
+ ("formatted", formatted_dict)])
+ return formatted_dict
###
diff --git a/py-json/create_wanlink.py b/py-json/create_wanlink.py
index 94a16ef5..fe9839df 100755
--- a/py-json/create_wanlink.py
+++ b/py-json/create_wanlink.py
@@ -1,17 +1,12 @@
#!/usr/bin/python3
-
# Create and modify WAN Links Using LANforge JSON AP : http://www.candelatech.com/cookbook.php?vol=cli&book=JSON:+Managing+WANlinks+using+JSON+and+Python
-
# Written by Candela Technologies Inc.
-# Updated by:
-
+# Updated by: Erin Grimes
import sys
import urllib
-
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
-
import time
from time import sleep
from urllib import error
@@ -22,19 +17,22 @@ from LANforge import LFUtils
from LANforge.LFUtils import NA
j_printer = pprint.PrettyPrinter(indent=2)
-# typically you're using resource 1 in stand alone realm
+# todo: this needs to change
resource_id = 1
-def main(base_url="http://localhost:8080"):
+def main(base_url="http://localhost:8080", args={}):
json_post = ""
json_response = ""
num_wanlinks = -1
+
# see if there are old wanlinks to remove
lf_r = LFRequest.LFRequest(base_url+"/wl/list")
print(lf_r.get_as_json())
+ # ports to set as endpoints
port_a ="rd0a"
port_b ="rd1a"
+
try:
json_response = lf_r.getAsJson()
LFUtils.debug_printer.pprint(json_response)
@@ -71,13 +69,12 @@ def main(base_url="http://localhost:8080"):
# create wanlink 1a
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp")
lf_r.addPostData({
- 'alias': 'wl_eg1-A',
- 'shelf': 1,
- 'resource': '1',
- 'port': port_a,
- 'latency': '75',
- 'max_rate': '128000',
- 'description': 'cookbook-example'
+ 'alias': 'wl_eg1-A',
+ 'shelf': 1,
+ 'resource': '1',
+ 'port': port_a,
+ 'latency': args['latency_A'],
+ 'max_rate': args['rate_A']
})
lf_r.jsonPost()
sleep(0.05)
@@ -85,13 +82,12 @@ def main(base_url="http://localhost:8080"):
# create wanlink 1b
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp")
lf_r.addPostData({
- 'alias': 'wl_eg1-B',
- 'shelf': 1,
- 'resource': '1',
- 'port': port_b,
- 'latency': '95',
- 'max_rate': '256000',
- 'description': 'cookbook-example'
+ 'alias': 'wl_eg1-B',
+ 'shelf': 1,
+ 'resource': '1',
+ 'port': port_b,
+ 'latency': args['latency_B'],
+ 'max_rate': args['rate_B']
})
lf_r.jsonPost()
sleep(0.05)
@@ -134,6 +130,7 @@ def main(base_url="http://localhost:8080"):
continue
print("starting wanlink:")
+ # print("the latency is {laten}".format(laten=latency))
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({
'test_mgr': 'all',
@@ -163,25 +160,7 @@ def main(base_url="http://localhost:8080"):
print("Error code "+error.code)
continue
- print("Wanlink is running, wait one sec...")
- sleep(1)
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- # Now we can alter the delay and speed of the wanlink by
- # updating its endpoints see https://www.candelatech.com/lfcli_ug.php#set_wanlink_info
- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
- print("Updating Wanlink...")
- lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_wanlink_info")
- lf_r.addPostData({
- 'name': 'wl_eg1-A',
- 'speed': 265333,
- 'latency': 30,
- 'reorder_freq': 3200, # thats 3200/1000000
- 'drop_freq': 2000, # 2000/1000000
- 'dup_freq': 1325, # 1325/1000000
- 'jitter_freq': 25125, # 25125/1000000
- })
- lf_r.jsonPost()
- sleep(1)
+ print("Wanlink is running")
# stop wanlink
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
@@ -214,22 +193,19 @@ def main(base_url="http://localhost:8080"):
print("Wanlink is stopped.")
- print("Wanlink info:")
- lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1")
- json_response = lf_r.getAsJson()
- LFUtils.debug_printer.pprint(json_response)
+ # print("Wanlink info:")
+ # lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1")
+ # json_response = lf_r.getAsJson()
+ # LFUtils.debug_printer.pprint(json_response)
- lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-A")
- json_response = lf_r.getAsJson()
- LFUtils.debug_printer.pprint(json_response)
+ # lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-A")
+ # json_response = lf_r.getAsJson()
+ # LFUtils.debug_printer.pprint(json_response)
- lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-B")
- json_response = lf_r.getAsJson()
- LFUtils.debug_printer.pprint(json_response)
+ # lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-B")
+ # json_response = lf_r.getAsJson()
+ # LFUtils.debug_printer.pprint(json_response)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if __name__ == '__main__':
main()
-
-###
-###
\ No newline at end of file
diff --git a/py-json/cv_test_manager.py b/py-json/cv_test_manager.py
index f681d5a5..0354839e 100644
--- a/py-json/cv_test_manager.py
+++ b/py-json/cv_test_manager.py
@@ -5,13 +5,12 @@ Note: This script is working as library for chamberview tests.
import time
-from LANforge.lfcli_base import LFCliBase
from realm import Realm
import json
from pprint import pprint
-import argparse
from cv_test_reports import lanforge_reports as lf_rpt
from csv_to_influx import *
+import os.path
def cv_base_adjust_parser(args):
@@ -67,12 +66,14 @@ class cv_test(Realm):
def __init__(self,
lfclient_host="localhost",
lfclient_port=8080,
- report_dir=""
+ lf_report_dir="",
+ debug=False
):
super().__init__(lfclient_host=lfclient_host,
lfclient_port=lfclient_port)
- self.report_dir = report_dir
+ self.lf_report_dir = lf_report_dir
self.report_name = None
+ self.debug = debug
# Add a config line to a text blob. Will create new text blob
# if none exists already.
@@ -127,7 +128,7 @@ class cv_test(Realm):
"cmd": command
}
debug_par = ""
- rsp = self.json_post("/gui-json/cmd%s" % debug_par, data, debug_=False, response_json_list_=response_json)
+ rsp = self.json_post("/gui-json/cmd%s" % debug_par, data, debug_=self.debug, response_json_list_=response_json)
try:
if response_json[0]["LAST"]["warnings"].startswith("Unknown"):
print("Unknown command?\n");
@@ -286,7 +287,7 @@ class cv_test(Realm):
# cv_cmds: Array of raw chamber-view commands, such as "cv click 'button-name'"
# These (and the sets) are applied after the test is created and before it is started.
def create_and_run_test(self, load_old_cfg, test_name, instance_name, config_name, sets,
- pull_report, lf_host, lf_user, lf_password, cv_cmds, local_path="", ssh_port=22,
+ pull_report, lf_host, lf_user, lf_password, cv_cmds, local_lf_report_dir="", ssh_port=22,
graph_groups_file=None):
load_old = "false"
if load_old_cfg:
@@ -349,12 +350,12 @@ class cv_test(Realm):
filelocation.write(location + '/kpi.csv\n')
filelocation.close()
print(location)
- self.report_dir = location
+ self.lf_report_dir = location
if pull_report:
try:
print(lf_host)
report.pull_reports(hostname=lf_host, username=lf_user, password=lf_password,
- port=ssh_port, local_path=local_path,
+ port=ssh_port, report_dir=local_lf_report_dir,
report_location=location)
except Exception as e:
print("SCP failed, user %s, password %s, dest %s", (lf_user, lf_password, lf_host))
@@ -385,7 +386,7 @@ class cv_test(Realm):
# Takes cmd-line args struct or something that looks like it.
# See csv_to_influx.py::influx_add_parser_args for options, or --help.
def check_influx_kpi(self, args):
- if self.report_dir == "":
+ if self.lf_report_dir == "":
# Nothing to report on.
print("Not submitting to influx, no report-dir.\n")
return
@@ -399,16 +400,21 @@ class cv_test(Realm):
(args.influx_host, args.influx_port, args.influx_org, args.influx_token, args.influx_bucket))
# lfjson_host would be if we are reading out of LANforge or some other REST
# source, which we are not. So dummy those out.
- influxdb = RecordInflux(_lfjson_host="",
- _lfjson_port="",
- _influx_host=args.influx_host,
+ influxdb = RecordInflux(_influx_host=args.influx_host,
_influx_port=args.influx_port,
_influx_org=args.influx_org,
_influx_token=args.influx_token,
_influx_bucket=args.influx_bucket)
- path = "%s/kpi.csv" % (self.report_dir)
-
+ # lf_wifi_capacity_test.py may be run / initiated by a remote system against a lanforge
+ # the local_lf_report_dir is data is stored, if there is no local_lf_report_dir then the test is run directly on lanforge
+ if self.local_lf_report_dir == "":
+ path = "%s/kpi.csv" % (self.lf_report_dir)
+ else:
+ kpi_location = self.local_lf_report_dir + "/" + os.path.basename(self.lf_report_dir)
+ # the local_lf_report_dir is the parent directory, need to get the directory name
+ path = "%s/kpi.csv" % (kpi_location)
+
print("Attempt to submit kpi: ", path)
csvtoinflux = CSVtoInflux(influxdb=influxdb,
target_csv=path,
diff --git a/py-json/cv_test_reports.py b/py-json/cv_test_reports.py
index e385057d..325bb9d9 100644
--- a/py-json/cv_test_reports.py
+++ b/py-json/cv_test_reports.py
@@ -5,13 +5,13 @@ class lanforge_reports:
def pull_reports(self, hostname="localhost", port=22, username="lanforge", password="lanforge",
report_location="/home/lanforge/html-reports/",
- local_path="../../../reports/"):
+ report_dir="../../../reports/"):
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
- ssh.connect(hostname=hostname, username=username, password=password, port=port)
+ ssh.connect(hostname=hostname, username=username, password=password, port=port, allow_agent=False, look_for_keys=False)
with SCPClient(ssh.get_transport()) as scp:
- scp.get(remote_path=report_location, local_path=local_path, recursive=True)
+ scp.get(remote_path=report_location, local_path=report_dir, recursive=True)
scp.close()
diff --git a/py-json/gen_cxprofile.py b/py-json/gen_cxprofile.py
index 811eccd6..0bea33a6 100644
--- a/py-json/gen_cxprofile.py
+++ b/py-json/gen_cxprofile.py
@@ -152,8 +152,8 @@ class GenCXProfile(LFCliBase):
count = 40
for i in range(0, count):
port_info = self.local_realm.name_to_eid(sta_port)
- resource = port_info[0]
- shelf = port_info[1]
+ resource = port_info[1]
+ shelf = port_info[0]
name = port_info[2]
gen_name_a = "%s-%s" % (self.name_prefix, name) + "_" + str(i) + add
@@ -167,8 +167,8 @@ class GenCXProfile(LFCliBase):
for i in range(0, 5):
port_info = self.local_realm.name_to_eid(port_name)
try:
- resource = port_info[0]
- shelf = port_info[1]
+ resource = port_info[1]
+ shelf = port_info[0]
name = port_info[2]
except:
raise ValueError("Unexpected name for port_name %s" % port_name)
@@ -279,8 +279,8 @@ class GenCXProfile(LFCliBase):
endp_tpls = []
for port_name in ports:
port_info = self.local_realm.name_to_eid(port_name)
- resource = port_info[0]
- shelf = port_info[1]
+ resource = port_info[1]
+ shelf = port_info[0]
name = port_info[2]
# this naming convention follows what you see when you use
diff --git a/py-json/l4_cxprofile.py b/py-json/l4_cxprofile.py
index 0fccce11..87e327ca 100644
--- a/py-json/l4_cxprofile.py
+++ b/py-json/l4_cxprofile.py
@@ -21,6 +21,7 @@ class L4CXProfile(LFCliBase):
self.local_realm = local_realm
self.created_cx = {}
self.created_endp = []
+ self.test_type = "urls"
self.lfclient_port = lfclient_port
self.lfclient_host = lfclient_host
@@ -69,6 +70,34 @@ class L4CXProfile(LFCliBase):
print(".", end='')
print("")
+ def compare_vals(self, old_list, new_list):
+ passes = 0
+ expected_passes = 0
+ if len(old_list) == len(new_list):
+ for item, value in old_list.items():
+ expected_passes += 1
+ if new_list[item] > old_list[item]:
+ passes += 1
+ if passes == expected_passes:
+ return True
+ else:
+ return False
+ else:
+ return False
+
+ def get_bytes(self):
+ time.sleep(1)
+ cx_list = self.json_get("layer4/list?fields=name,%s" % self.test_type, debug_=self.debug)
+ # print("==============\n", cx_list, "\n==============")
+ cx_map = {}
+ for cx_name in cx_list['endpoint']:
+ if cx_name != 'uri' and cx_name != 'handler':
+ for item, value in cx_name.items():
+ for value_name, value_rx in value.items():
+ if item in self.created_cx.keys() and value_name == self.test_type:
+ cx_map[item] = value_rx
+ return cx_map
+
def check_request_rate(self):
endp_list = self.json_get("layer4/list?fields=urls/s")
expected_passes = 0
@@ -83,12 +112,11 @@ class L4CXProfile(LFCliBase):
if name in self.created_cx.keys():
expected_passes += 1
if info['urls/s'] * self.requests_per_ten >= self.target_requests_per_ten * .9:
- print(name, info['urls/s'], info['urls/s'] * self.requests_per_ten, self.target_requests_per_ten * .9)
+ # print(name, info['urls/s'], info['urls/s'] * self.requests_per_ten, self.target_requests_per_ten * .9)
passes += 1
return passes == expected_passes
-
def cleanup(self):
print("Cleaning up cxs and endpoints")
if len(self.created_cx) != 0:
@@ -110,7 +138,7 @@ class L4CXProfile(LFCliBase):
def create(self, ports=[], sleep_time=.5, debug_=False, suppress_related_commands_=None):
cx_post_data = []
for port_name in ports:
- print("port_name: {} len: {} self.local_realm.name_to_eid(port_name): {}".format(port_name,len(self.local_realm.name_to_eid(port_name)),self.local_realm.name_to_eid(port_name),))
+ print("port_name: {} len: {} self.local_realm.name_to_eid(port_name): {}".format(port_name, len(self.local_realm.name_to_eid(port_name)), self.local_realm.name_to_eid(port_name)))
shelf = self.local_realm.name_to_eid(port_name)[0]
resource = self.local_realm.name_to_eid(port_name)[1]
name = self.local_realm.name_to_eid(port_name)[2]
@@ -186,7 +214,6 @@ class L4CXProfile(LFCliBase):
print(header_row)
# Step 2 - Monitor columns
-
start_time = datetime.datetime.now()
end_time = start_time + datetime.timedelta(seconds=duration_sec)
sleep_interval = round(duration_sec // 5)
@@ -198,6 +225,9 @@ class L4CXProfile(LFCliBase):
passes = 0
expected_passes = 0
timestamps = []
+ if self.test_type != 'urls':
+ old_rx_values = self.get_bytes()
+
for test in range(1+iterations):
while datetime.datetime.now() < end_time:
if col_names is None:
@@ -219,16 +249,27 @@ class L4CXProfile(LFCliBase):
timestamps.append(t)
value_map[t] = response
expected_passes += 1
- if self.check_errors(debug):
- if self.check_request_rate():
+ if self.test_type == 'urls':
+ if self.check_errors(self.debug):
+ if self.check_request_rate():
+ passes += 1
+ else:
+ self._fail("FAIL: Request rate did not exceed target rate")
+ break
+ else:
+ self._fail("FAIL: Errors found getting to %s " % self.url)
+ break
+
+ else:
+ new_rx_values = self.get_bytes()
+ if self.compare_vals(old_rx_values, new_rx_values):
passes += 1
else:
- self._fail("FAIL: Request rate did not exceed 90% target rate")
- self.exit_fail()
- else:
- self._fail("FAIL: Errors found getting to %s " % self.url)
- self.exit_fail()
+ self._fail("FAIL: Not all stations increased traffic")
+
+ # self.exit_fail()
time.sleep(monitor_interval)
+
print(value_map)
#[further] post-processing data, after test completion
diff --git a/py-json/lf_attenmod.py b/py-json/lf_attenmod.py
index ce1a33b3..33b7f2b3 100644
--- a/py-json/lf_attenmod.py
+++ b/py-json/lf_attenmod.py
@@ -57,7 +57,7 @@ class ATTENUATORProfile(LFCliBase):
def create(self, debug=False):
if len(self.atten_serno) == 0 or len(self.atten_idx) == 0 or len(self.atten_val) == 0:
print("ERROR: Must specify atten_serno, atten_idx, and atten_val when setting attenuator.\n")
- print("Creating Attenuator...")
+ print("Setting Attenuator...")
self.set_command_param("set_attenuator", "serno", self.atten_serno)
self.set_command_param("set_attenuator", "atten_idx", self.atten_idx)
self.set_command_param("set_attenuator", "val", self.atten_val)
diff --git a/py-json/station_profile.py b/py-json/station_profile.py
index 6561e10f..74c649b1 100644
--- a/py-json/station_profile.py
+++ b/py-json/station_profile.py
@@ -193,6 +193,10 @@ class StationProfile:
self.set_command_param("add_sta", "ieee80211w", 2)
# self.add_sta_data["key"] = passwd
+ def station_mode_to_number(self,mode):
+ modes = ['a', 'b', 'g', 'abg', 'an', 'abgn', 'bgn', 'bg', 'abgn-AC', 'bgn-AC', 'an-AC']
+ return modes.index(mode) + 1
+
def add_security_extra(self, security):
types = {"wep": "wep_enable", "wpa": "wpa_enable", "wpa2": "wpa2_enable", "wpa3": "use-wpa3", "open": "[BLANK]"}
if self.desired_add_sta_flags.__contains__(types[security]) and \
diff --git a/py-json/test_histogram.py b/py-json/test_histogram.py
new file mode 100755
index 00000000..b6bf3cd0
--- /dev/null
+++ b/py-json/test_histogram.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python3
+""" ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
+ internal test driving LFUtils.expand_endp_histogram
+----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- """
+import LANforge
+from LANforge import LFUtils
+import pprint
+
+distrib_load = {
+ "histo_category_width" : 3,
+ "histogram" : [
+ 221,
+ 113,
+ 266,
+ 615,
+ 16309,
+ 56853,
+ 7954,
+ 1894,
+ 29246,
+ 118,
+ 12,
+ 2,
+ 0,
+ 0,
+ 0,
+ 0
+ ],
+ "time window ms" : 300000,
+ "window avg" : 210.285,
+ "window max" : 228,
+ "window min" : 193
+}
+
+if __name__ == '__main__':
+ LFUtils.expand_endp_histogram(distrib_load)
+
+
diff --git a/py-scripts/artifacts/candela_swirl_small-72h.png b/py-scripts/artifacts/candela_swirl_small-72h.png
new file mode 100644
index 00000000..e288f8c9
Binary files /dev/null and b/py-scripts/artifacts/candela_swirl_small-72h.png differ
diff --git a/py-scripts/artifacts/report.css b/py-scripts/artifacts/report.css
new file mode 100644
index 00000000..420a5265
--- /dev/null
+++ b/py-scripts/artifacts/report.css
@@ -0,0 +1,306 @@
+html, body,div {
+ margin: 0;
+ padding:0;
+ font-size: 14px;
+}
+h1,h2,h3,h4 {
+ padding: 0em;
+ line-height: 1.5;
+ text-align: left;
+ color: rgb(42,91,41);
+}
+@font-face {
+ font-family: CenturyGothic;
+ src: url("CenturyGothic.woff"),
+ url("images/CenturyGothic.woff"),
+ url("/images/CenturyGothic.woff"),
+ url("http://www.candelatech.com/images/CenturyGothic.woff");
+}
+body,h1,h2,h3,h4 {
+ font-family: CenturyGothic, "Century Gothic", Arial, Helvetica, sans-serif;
+}
+h1 { font-size: 30px;}
+h2 { font-size: 24px;}
+h3 { font-size: 18px;}
+h4 { font-size: 14px;}
+li,pre,tt {
+ text-align: left;
+}
+pre {
+ font-size: 10px;
+}
+table {
+ border-collapse: collapse;
+ background: #e0e0e0;
+}
+table, td, th {
+ border: 1px solid gray;
+ padding 4px;
+}
+table.noborder, table.noborder td, table.noborder th {
+ border: 0 none;
+}
+td {
+ background: white;
+}
+td.ar {
+ text-align: right;
+}
+th {
+ color: rgb(42,91,41);
+ text-align: center;
+}
+#lf_title {
+ text-align: center;
+ background-image: url(candela_swirl_small-72h.png);
+ background-position: right;
+ background-repeat: no-repeat;
+ height: 90px;
+}
+#new_chart {
+ display: block;
+ height: 250px;
+ min-width: 200px;
+ width: 80%;
+ border: 1px solid black;
+ margin: 14px auto;
+ padding: 14px;
+ vertical-align: bottom;
+ text-align: center;
+}
+.lf_chart {
+ margin: 1em;
+ padding: 5px;
+}
+#error_types ul {
+ background: #f0f0f0;
+ font-size: 12px;
+ line-height: 1.5;
+ margin: 1em;
+ padding: 0.25em inherit 0.25em inherit;
+ max-height: 8em;
+ overflow: auto;
+}
+li {
+ line-height: 1.5;
+}
+.contentDiv {
+ min-width: 800px;
+ max-width: 8in;
+ margin: 1em auto;
+ padding: 0;
+}
+.ct-point {
+ stroke-width: 6px;}
+
+.o_el {
+ display: inline-block;
+ width: 100px;
+ height: 230px;
+ border: none;
+ margin: 1px 1px 16px 1px;
+ padding: 10px 10px 0 10px;
+ background: #eee;
+ text-align: center;
+ vertical-align: bottom;
+}
+.bar_el {
+ display: block;
+ background: green;
+ border: none;
+ min-height: 1px;
+
+ margin: 0 0 5px 0;
+ padding: 0;
+ text-align: center;
+}
+.label_el {
+ color: black;
+ display: block;
+ font-size: 14px;
+ font-family: Arial,Helvetica,sans-serif,mono;
+ margin: 1px;
+ text-align: center;
+ vertical-align: bottom;
+ width: inherit;
+}
+.value_el {
+ font-family: Arial,Helvetica,sans-serif,mono;
+ color: black;
+ display: block;
+ font-size: 14px;
+ margin: 0 auto;
+ padding: none;
+ border: none;
+ background: white;
+ text-align: center;
+ vertical-align: bottom;
+ width: auto;
+}
+.value_el>span {
+ background: #f0f0f0a0;
+ border: 1px solid #f0f0f0a0;
+ border-radius: 5px;
+ padding: 1px;
+ min-width: 2em;
+}
+.error {
+ color: red;
+}
+
+@media only screen {
+.hideFromPrint { }
+.hideFromScreen { display:none; }
+}
+@media only print {
+.hideFromScreen { }
+.hideFromPrint { display:none; }
+}
+
+/* these styles will get overridden by custom.css */
+#BannerBack {
+ background-color: #e68b15;
+ height: 205px;
+ max-height: 205px;
+ border: 0 none;
+ margin: 0;
+ padding: 0;
+ top: 0;
+ left: 0;
+ width: 100%;
+}
+#Banner {
+ background-image:url("banner.png");
+ background-repeat:no-repeat;
+ padding: 0;
+ margin: 0 auto;
+ min-width: 1000px;
+ min-height: 205px;
+ width: 1000px;
+ height: 205px;
+ max-width: 1000px;
+ max-height: 205px;
+}
+#BannerLogo {
+ text-align: right;
+ padding: 25px;
+ margin: 5px;
+ width: 200px;
+ border: none;
+}
+#BannerLogoFooter {
+ text-align: right;
+ padding: 1px;
+ margin: 1px;
+ width: 200px;
+ border: none;
+}
+.TitleFontScreen {
+ margin-left: auto;
+ margin-right: auto;
+ margin-top: 1em;
+ margin-bottom: 0.2em;
+ font-size: 50px;
+ padding-top: 1em;
+}
+
+.TitleFontPrint {
+ line-height: 1;
+ margin-left: 0px;
+ margin-right: auto;
+ margin-top: 0.5em;
+ margin-bottom: 0.2em;
+ padding-top: 20px;
+ padding-left: 20px;
+ color: darkgreen;
+}
+
+.TitleFontPrintSub {
+ line-height: 1;
+ margin-left: 0px;
+ margin-right: auto;
+ margin-top: 0;
+ margin-bottom: 0;
+ /*font-size: 20px; Let 'h3', etc control this */
+ padding-top: 0px;
+ padding-left: 20px;
+}
+
+.HeaderFont {}
+.TableFont {}
+.TableBorder {}
+.ImgStyle {}
+div.Section h1, div.Section h2 {
+ margin: 0 0 0 0em;
+}
+div.HeaderStyle h1, div.HeaderStyle h2 {
+ text-align: left;
+ margin: 0 0 0 0;
+ max-width: 8in;
+ min-width: 800px;
+}
+div.Section {
+ padding 5px;
+ position: relative;
+}
+div.Section img {
+ margin: 0;
+ padding: 0;
+ position: relative;
+ top: 50%;
+ transform: translateY(-50%);
+}
+div.FooterStyle {
+ width: 100%;
+ vertical-align: middle;
+ border: 0 none;
+ border-top: 2px solid #2A5B29;
+ color: #2A5B29;
+ font-size: 12px;
+ margin-top: 2em;
+}
+div.FooterStyle img {
+ width: auto;
+ height: auto;
+ text-align: right;
+}
+div.FooterStyle span.Gradient {
+ background: white;
+ color: #2A5B29;
+ display: inline-block;
+ height: 30px;
+ line-height: 1;
+ padding-top: 22px;
+ padding-bottom: 20px;
+ padding-left: 2em;
+ vertical-align: middle;
+ max-width:80%;
+ float:left;
+ width:50%;
+}
+.FooterStyle a, .FooterStyle a:visited {
+ color: #2A5B29;
+ font-size: 12px;
+ line-height: 1;
+ height: 30px;
+ margin: 0;
+ padding: 0;
+ vertical-align: middle;
+}
+div.FooterStyle a.LogoImgLink {
+ display: inline-block;
+ text-align: right;
+ float: right;
+}
+a .LogoImgLink {
+}
+a.LogoImgLink img {
+}
+
+table.dataframe {
+ margin: 1em;
+ padding: 0;
+}
+table.dataframe tr th {
+ padding: 0.5em;
+}
\ No newline at end of file
diff --git a/py-scripts/cicd_TipIntegration.py b/py-scripts/cicd_TipIntegration.py
index 02651f2c..b246d522 100755
--- a/py-scripts/cicd_TipIntegration.py
+++ b/py-scripts/cicd_TipIntegration.py
@@ -1,543 +1,543 @@
-
-import base64
-import urllib.request
-from bs4 import BeautifulSoup
-import ssl
-import subprocess, os
-from artifactory import ArtifactoryPath
-import tarfile
-import paramiko
-from paramiko import SSHClient
-from scp import SCPClient
-import os
-import pexpect
-from pexpect import pxssh
-import sys
-import paramiko
-from scp import SCPClient
-import pprint
-from pprint import pprint
-from os import listdir
-import re
-
-# For finding files
-# https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
-import glob
-external_results_dir=/var/tmp/lanforge
-
-local_dir=os.getenv('LOG_DIR')
-print("Local Directory where all files will be copied and logged", local_dir)
-cicd_user=os.getenv('CICD_USER')
-print("cicd_user = ", cicd_user)
-cicd_pw=os.getenv('CICD_PW')
-print("cicd pw =",cicd_pw)
-ap_pw=os.getenv('AP_PW')
-ap_user=os.getenv('AP_USER')
-tr_user=os.getenv('TR_USER')
-print("Testrail user id = ", tr_user)
-tr_pw=os.getenv('TR_PW')
-print ("Testrail password =", tr_pw)
-aws_host='3.96.56.0'
-aws_user='ubuntu'
-
-
-
-
-if sys.version_info[0] != 3:
- print("This script requires Python 3")
- exit(1)
-if 'py-json' not in sys.path:
- sys.path.append('../py-json')
-
-from LANforge.LFUtils import *
-# if you lack __init__.py in this directory you will not find sta_connect module#
-import sta_connect
-import testrail_api
-from sta_connect import StaConnect
-from testrail_api import APIClient
-
-client: APIClient = APIClient('https://telecominfraproject.testrail.com')
-client.user = tr_user
-client.password = tr_pw
-
-
-print('Beginning file download with requests')
-
-class GetBuild:
- def __init__(self):
- self.user = cicd_user
- self.password = cicd_pw
- ssl._create_default_https_context = ssl._create_unverified_context
-
- def get_latest_image(self,url):
-
- auth = str(
- base64.b64encode(
- bytes('%s:%s' % (cicd_user,cicd_pw ), 'utf-8')
- ),
- 'ascii'
- ).strip()
- headers = {'Authorization': 'Basic ' + auth}
-
- ''' FIND THE LATEST FILE NAME'''
- print(url)
- req = urllib.request.Request(url, headers=headers)
- response = urllib.request.urlopen(req)
- html = response.read()
- soup = BeautifulSoup(html, features="html.parser")
- last_link = soup.find_all('a', href=True)[-1]
- latest_file=last_link['href']
-
- filepath = local_dir
- os.chdir(filepath)
- #file_url = url + latest_file
-
- ''' Download the binary file from Jfrog'''
- path = ArtifactoryPath(url,auth=(cicd_user, cicd_pw))
- path.touch()
- for file in path:
- print('File =', file)
-
- path = ArtifactoryPath(file, auth=(cicd_user, cicd_pw))
- print("file to be downloaded :" ,latest_file)
- print("File Path:",file)
- with path.open() as des:
- with open(latest_file, "wb") as out:
- out.write(des.read())
- des.close()
- print("Extract the tar.gz file and upgrade the AP ")
- housing_tgz = tarfile.open(latest_file)
- housing_tgz.extractall()
- housing_tgz.close()
- return "pass"
- print("Extract the tar file, and copying the file to Linksys AP directory")
- #with open("/Users/syamadevi/Desktop/syama/ea8300/ap_sysupgrade_output.log", "a") as output:
- # subprocess.call("scp /Users/syamadevi/Desktop/syama/ea8300/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin root@192.100.1.1:/tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin",shell=True, stdout=output,
- # stderr=output)
-
- print('SSH to Linksys and upgrade the file')
-
- '''
-
- ssh = SSHClient()
- ssh.load_system_host_keys()
- ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
- ssh.connect(hostname='192.100.1.1',
- port='22',
- username='root',
- password='Dadun123$',
- look_for_keys=False,
- pkey='load_key_if_relevant')
-
- # SCPCLient takes a paramiko transport as its only argument
- scp = SCPClient(ssh.get_transport())
-
- scp.put('test.txt', 'testD.txt')
- scp.close()
-
-
-
- # client = paramiko.SSHClient()
- #client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
- #client.connect('192.100.1.1', username='syama', password='Dadun123$')
-
- stdin, stdout, stderr = ssh.exec_command('sysupgrade /tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin')
-
- for line in stdout:
- print (line.strip('\n'))
- client.close()
- '''
-
- def run_opensyncgw_in_docker(self):
- #user_password = 'fepv6nj9guCPeEHC'
- #my_env = os.environ.copy()
- #my_env["userpass"] = user_password
- #my_command = 'python --version'
- #subprocess.Popen('echo', env=my_env)
- with open(local_dir +"docker_jfrog_login.log", "a") as output:
- subprocess.call("docker login --username" + cicd_user + "--password" + cicd_pw + " https://tip-tip-wlan-cloud-docker-repo.jfrog.io", shell=True, stdout=output,
- stderr=output)
- with open(local_dir +"opensyncgw_upgrade.log", "a") as output:
- subprocess.call("docker pull tip-tip-wlan-cloud-docker-repo.jfrog.io/opensync-gateway-and-mqtt:0.0.1-SNAPSHOT", shell=True, stdout=output,
- stderr=output)
- with open(local_dir+"opensyncgw.log", "a") as output:
- subprocess.call("docker run --rm -i -p 1883:1883 -p 6640:6640 -p 6643:6643 -p 4043:4043 \
- -v ~/mosquitto/data:/mosquitto/data \
- -v ~/mosquitto/log:/mosquitto/log \
- -v ~/wlan-pki-cert-scripts:/opt/tip-wlan/certs \
- -v ~/app/log:/app/logs \
- -v ~//app/config:/app/config \
- -e OVSDB_CONFIG_FILE='/app/config/config_2_ssids.json' \
- tip-tip-wlan-cloud-docker-repo.jfrog.io/opensync-gateway-and-mqtt:0.0.1-SNAPSHOT",shell=True, stdout=output,
- stderr=output)
- print("opensync Gateway is running")
- return "pass"
-
- def run_opensyncgw_in_aws(self):
- try:
- s = pxssh.pxssh()
-
- os.chdir(local_dir)
- print("AWS OPENSYNC GW UPGRADE VIA HELM")
- print(
- 'Helm upgrades the latest image in the GW if a new image is found from jfrog and the AWS gateway is not upto date ')
- # makesure the client key file is in the fame directory to login to AWS VM
- s.login(aws_host, aws_user, ssh_key='id_key.pem')
- s.sendline('kubectl get pods')
-
- # run a command
- s.prompt() # match the prompt
- print(s.before) # print everything before the prompt.
- s.sendline(
- 'helm upgrade tip-wlan wlan-cloud-helm/tip-wlan/ -n default -f wlan-cloud-helm/tip-wlan/resources/environments/dev-amazon.yaml')
- s.prompt() # match the prompt
- print(s.before) # print everything before the prompt.
- s.sendline('kubectl get pods')
-
- # run a command
- s.prompt() # match the prompt
- print(s.before) # print everything before the prompt.
- s.logout()
- return "pass"
-
- except pxssh.ExceptionPxssh as e:
- print("ALERT !!!!!! pxssh failed on login.")
- print(e)
-
-
-class openwrt_ap:
-
- def ap_upgrade(src,user2,host2,tgt,pwd,opts='', timeout=60):
- ''' Performs the scp command. Transfers file(s) from local host to remote host '''
- print("AP Model getting upgarded is :", apModel)
- if apModel == "ecw5410":
- ap_firmware = 'openwrt-ipq806x-generic-edgecore_ecw5410-squashfs-nand-sysupgrade.bin'
- AP_IP = '10.10.10.207'
- else:
- if apModel == "ea8300":
- ap_firmware = 'openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin'
- AP_IP = '10.10.10.208'
- host2 = AP_IP
- src = src+ ap_firmware
- print("src =", src)
- print("AP IP ", AP_IP)
- print("AP USER =", ap_user)
- print("AP PASSWORD =", ap_pw)
- cmd = f'''/bin/bash -c "scp {opts} {src} {user2}@{AP_IP}:{tgt}"'''
- print("Executing the following cmd:",cmd,sep='\n')
-
- tmpFl = '/tmp/scp.log'
- fp = open(tmpFl,'wb')
- print(tmpFl)
- childP = pexpect.spawn(cmd,timeout=timeout)
- try:
- childP.sendline(cmd)
- childP.expect([f"{user2}@{host2}'s password:"])
- childP.sendline(pwd)
- childP.logfile = fp
- childP.expect(pexpect.EOF)
- childP.close()
- fp.close()
- fp = open(tmpFl,'r')
- stdout = fp.read()
- fp.close()
-
- if childP.exitstatus != 0:
- raise Exception(stdout)
- except KeyboardInterrupt:
- childP.close()
- fp.close()
- return
- print(stdout)
-
- try:
- s = pxssh.pxssh()
- s.login(host2, user2, pwd)
- #s.sendline('sysupgrade /tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin&')
- s.sendline('sysupgrade /tmp/openwrt-ipq806x-generic-edgecore_ecw5410-squashfs-nand-sysupgrade.bin&')
- #s.logout()
- #s.prompt() # match the prompt
- print(s.before) # print everything before the prompt.
- sleep(100)
- #s.login(host2, user2, pwd)
- s.prompt()
- #os.system(f"scp {local_dir}/cacert.pem root@10.10.10.207:/usr/plume/certs/ca.pem")
- #os.system(f"scp {local_dir}/clientcert.pem root@10.10.10.207:/usr/plume/certs/client.pem")
- #os.system(f"scp {local_dir}/clientkey_dec.pem root@10.10.10.207:/usr/plume/certs/client_dec.key")
- #s.sendline('service opensync restart')
- #s.prompt() # match the prompt
- #print(s.before) # print everything before the prompt.
- s.logout()
- return "pass"
- except pxssh.ExceptionPxssh as e:
- print("ALERT !!!!!! pxssh failed on login.")
- print(e)
- def apCopyCert(src,user2,host2,tgt,pwd,opts='', timeout=60):
-
- print("Copying the AP Certs")
- '''
- s = pxssh.pxssh()
- print(src, users2,pwd)
- s.login(host2, user2, pwd)
- s.prompt() # match the prompt
- print("Copying ca.pem")
- os.system(f"scp {src}/cacert.pem root@10.10.10.207:/usr/plume/certs/ca.pem")
- print("Copying the client.pem")
- os.system(f"scp {src}/clientcert.pem root@10.10.10.207:/usr/plume/certs/client.pem")
- print("Copying the client_dec.key")
- os.system(f"scp {src}/clientkey_dec.pem root@10.10.10.207:/usr/plume/certs/client_dec.key")
- s.sendline('service opensync restart')
- s.prompt() # match the prompt
- print(s.before) # print everything before the prompt.
- s.logout()
- '''
- cacert=src+"ca.pem"
- clientcert = src+"client.pem"
- clientkey=src+"client_dec.key"
- tgt ="/usr/plume/certs"
- ap_pw
-
- print("src =", src)
- print("AP IP ", host2)
- print("AP USER =", ap_user)
- print("AP PASSWORD =", ap_pw)
- #cmd = f'''/bin/bash -c "scp {opts} {src} {user2}@{AP_IP}:{tgt}"'''
- #cmd = f'''/bin/bash -c "scp {opts} {cacert} {user2}@{AP_IP}:{tgt}"'''
- #cmd = f'''/bin/bash -c "scp {opts} {clientcert} {user2}@{AP_IP}:{tgt}"'''
- cmd = f'''/bin/bash -c "scp {opts} {cacert} {clientcert} {clientkey} {user2}@{host2}:{tgt}"'''
- print("Executing the following cmd:", cmd, sep='\n')
- tmpFl = '/tmp/cert.log'
- fp = open(tmpFl, 'wb')
- print(tmpFl)
- childP = pexpect.spawn(cmd, timeout=timeout)
- try:
- childP.sendline(cmd)
- childP.expect([f"{user2}@{host2}'s password:"])
- childP.sendline(ap_pw)
- childP.logfile = fp
- childP.expect(pexpect.EOF)
- fp.close()
- fp = open(tmpFl,'r')
- stdout = fp.read()
- fp.close()
-
- if childP.exitstatus != 0:
- #raise Exception(stdout)
- print("there is an excess status non 0")
- except KeyboardInterrupt:
- childP.close()
- fp.close()
- return
- print(stdout)
- def restartGw(src,user2,host2,tgt,pwd,opts='', timeout=60):
- print("Restarting opensync GW")
- s = pxssh.pxssh()
- s.login(host2, user2, pwd)
- # s.sendline('sysupgrade /tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin&')
- s.sendline('service opensync restart')
- # s.logout()
- # s.prompt() # match the prompt
- print(s.before) # print everything before the prompt.
- s.prompt()
- s.logout()
-
-
-class RunTest:
- def TestCase_938(self, rid):
- '''SINGLE CLIENT CONNECTIVITY'''
- staConnect = StaConnect("10.10.10.201", 8080, _debugOn=False)
- staConnect.sta_mode = 0
- staConnect.upstream_resource = 1
- staConnect.upstream_port = "eth2"
- staConnect.radio = "wiphy1"
- staConnect.resource = 1
- staConnect.dut_ssid = "autoProvisionedSsid-5u"
- #staConnect.dut_passwd = "4C0nnectUS!"
- staConnect.dut_passwd = "12345678"
- staConnect.dut_security = "wpa2"
- staConnect.station_names = ["sta01010"]
- staConnect.runtime_secs = 30
- staConnect.cleanup_on_exit = True
- staConnect.run()
- run_results = staConnect.get_result_list()
- for result in run_results:
- print("test result: " + result)
- #result = 'pass'
- print("Single Client Connectivity :",staConnect.passes)
- if staConnect.passes() == True:
- client.update_testrail(case_id=938, run_id=rid, status_id=1, msg='client Connectivity to 5GHZ Open SSID is Passed ')
- else:
- client.update_testrail(case_id=938, run_id=rid, status_id=5, msg='client connectivity to 5GHZ OPEN SSID is Failed')
-
- def TestCase_941(self, rid):
- #MULTI CLIENT CONNECTIVITY
- staConnect = StaConnect("10.10.10.201", 8080, _debugOn=False)
- staConnect.sta_mode = 0
- staConnect.upstream_resource = 1
- staConnect.upstream_port = "eth2"
- staConnect.radio = "wiphy1"
- staConnect.resource = 1
- staConnect.dut_ssid = "autoProvisionedSsid-5u"
- # staConnect.dut_passwd = "4C0nnectUS!"
- staConnect.dut_passwd = "12345678"
- staConnect.dut_security = "wpa2"
- staConnect.station_names = ["sta0020", 'sta0021', 'sta0022', 'sta0023']
- staConnect.runtime_secs = 20
- staConnect.cleanup_on_exit = True
- staConnect.run()
- run_results = staConnect.get_result_list()
- for result in run_results:
- print("test result: " + result)
- if staConnect.passes() == True:
- client.update_testrail(case_id=941, run_id=rid, status_id=1,
- msg='client Connectivity to 5GHZ Open SSID is Passed ')
- else:
- client.update_testrail(case_id=941, run_id=rid, status_id=5,
- msg='client connectivity to 5GHZ OPEN SSID is Failed')
-
- # Check for externally run test case results.
- def TestCase_LF_External(self, rid):
- #https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
- results = glob.glob("%s/*_CICD_RESULTS.txt"%external_results_dir)
- for r in results:
- rfile = open(r, 'r')
- lines = rfile.readlines()
-
- # File contents looks something like:
- #CASE_ID 9999
- #RUN_ID 15
- #STATUS 1
- #MSG Test passed nicely
- #MSG Build ID: deadbeef
- #MSG Results: http://cicd.telecominfraproject.com
-
- _case_id = -1
- _status_id = 1 # Default to pass
- _msg = ""
- _rid = rid
-
- for line in Lines:
- m = re.search(r'(\S+) (.*)', line)
- k = m.group(0);
- v = m.group(1);
-
- if k == "CASE_ID":
- _case_id = v
- if k == "RUN_ID":
- _rid = v
- if k == "STATUS":
- _status_id = v
- if k == "MSG":
- if _msg == "":
- _msg == v
- else:
- _msg += "\n"
- _msg += v
- if _case_id != -1:
- client.update_testrail(case_id=_case_id, run_id=_rid, status_id=_status_id, msg=_msg)
- os.unlink(r)
-
- def TestCase_939(self, rid):
- ''' Client Count in MQTT Log'''
- try:
- print("Counting clients in MQTT")
- s = pxssh.pxssh()
- #aws_host = os.getenv(AWS_HOST)
- #aws_user=os.getenv(AWS_USER)
- os.chdir(local_dir)
- # makesure the client key file is in the fame directory to login to AWS VM
- s.login(aws_host,aws_user,ssh_key='id_key.pem')
- s.sendline('kubectl cp tip-wlan-opensync-gw-static-f795d45-ctb5z:/app/logs/mqttData.log mqttData.log')
- # run a command
- s.prompt() # match the prompt
- print(s.before) # print everything before the prompt.
- s.sendline()
- s.logout()
- #return "pass"
- print(aws_host, aws_user)
- ssh = paramiko.SSHClient()
- ssh.load_system_host_keys()
- ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
- k = paramiko.RSAKey.from_private_key_file('id_key.pem')
- ssh.connect(aws_host, username=aws_user, pkey=k)
- print("Connected")
- scp = SCPClient(ssh.get_transport())
- scp.get("mqttData.log")
- scp.close()
- # Get the client Count
- ClientCount = subprocess.getoutput(
- 'grep \'{\"nodeID\"\' mqttData.log | grep clientList | tail -1 |cut -d \'=\' -f 3 | json_pp | grep macAddres | grep \'04:F0:21:55\' | tr -d , | sort | uniq | wc -l ')
- print("client count =", ClientCount)
- if (int(ClientCount) >= 1):
- client.update_testrail(case_id=939, run_id=rid, status_id=1,
- msg=ClientCount + ' Client/Clients Connected ')
- else:
- client.update_testrail(case_id=939, run_id=rid, status_id=5,
- msg='No Client Connected')
- except pxssh.ExceptionPxssh as e:
- print("ALERT !!!!!! pxssh failed on login.")
- print(e)
-
-
-params = {
- 'src': local_dir,
- 'user2': ap_user,
- 'host2': '10.10.10.207',
- 'tgt': '/tmp/',
- 'pwd': ap_pw,
- 'opts': ''
-}
-apModel= "ecw5410"
-
-
-url = 'https://tip.jfrog.io/artifactory/tip-wlan-ap-firmware/'
-url = url + apModel
-projId = client.get_project_id(project_name= 'WLAN')
-print("TIP WLAN Project ID Is :", projId)
-
-rid = client.get_run_id(test_run_name= 'TIP-DEMO4')
-print(rid)
-Test: RunTest = RunTest()
-Build: GetBuild = GetBuild()
-'''
-binary_fetch_result = Build.get_latest_image(url)
-print("UPDATING TEST RAIL WITH TEST RESULT FOR CASE_ID 940: Download latest openwrt image from Jfrog")
-
-if binary_fetch_result == 'pass':
- client.update_testrail(case_id=940, run_id=rid, status_id=1, msg='latest firmware downloaded')
-else:
- client.update_testrail(case_id=940, run_id=rid, status_id=5, msg='Firmware Download failed')
-
-sleep(10)
-print("Upgrading AP with latest image downloaded")
-ap_upgrade_result = openwrt_ap.ap_upgrade(**params)
-sleep(10)
-print("UPDATING TEST RAIL WITH TEST RESULT FOR CASE_ID 937")
-sleep(10)
-if ap_upgrade_result == 'pass':
- client.update_testrail(case_id=937, run_id=rid, status_id=1, msg='AP upgraded with latest Firmware')
-else:
- client.update_testrail(case_id=937, run_id=rid, status_id=5, msg='Firmware upgrade failed in AP ')
-print("Upgrading AWS Opensync gateway with latest docker image from Jfrog")
-OpensyncGw_UpgResult = Build.run_opensyncgw_in_aws()
-if OpensyncGw_UpgResult == 'pass':
- client.update_testrail(case_id=936, run_id=rid, status_id=1, msg='Opensync GW upgraded with latest Firmware')
-else:
- client.update_testrail(case_id=936, run_id=rid, status_id=5, msg='Firmware upgrade failed in Opensync Gateway')
-sleep(10)
-'''
-pprint.pprint(params)
-ap_cert_result = openwrt_ap.apCopyCert(**params)
-print("Executing TestCase 938: single Client Connectivity test")
-openwrt_ap.restartGw(**params)
-Test.TestCase_938(rid)
-
-print("Executing TestCase 941: Multi Client Connectivity test")
-Test.TestCase_941(rid)
-sleep(100)
-print("Executing TestCase 939:Counting The number of Clients Connected from MQTT")
-Test.TestCase_939(rid)
-
-
-
-
+#
+# import base64
+# import urllib.request
+# from bs4 import BeautifulSoup
+# import ssl
+# import subprocess, os
+# from artifactory import ArtifactoryPath
+# import tarfile
+# import paramiko
+# from paramiko import SSHClient
+# from scp import SCPClient
+# import os
+# import pexpect
+# from pexpect import pxssh
+# import sys
+# import paramiko
+# from scp import SCPClient
+# import pprint
+# from pprint import pprint
+# from os import listdir
+# import re
+#
+# # For finding files
+# # https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
+# import glob
+# external_results_dir=/var/tmp/lanforge
+#
+# local_dir=os.getenv('LOG_DIR')
+# print("Local Directory where all files will be copied and logged", local_dir)
+# cicd_user=os.getenv('CICD_USER')
+# print("cicd_user = ", cicd_user)
+# cicd_pw=os.getenv('CICD_PW')
+# print("cicd pw =",cicd_pw)
+# ap_pw=os.getenv('AP_PW')
+# ap_user=os.getenv('AP_USER')
+# tr_user=os.getenv('TR_USER')
+# print("Testrail user id = ", tr_user)
+# tr_pw=os.getenv('TR_PW')
+# print ("Testrail password =", tr_pw)
+# aws_host='3.96.56.0'
+# aws_user='ubuntu'
+#
+#
+#
+#
+# if sys.version_info[0] != 3:
+# print("This script requires Python 3")
+# exit(1)
+# if 'py-json' not in sys.path:
+# sys.path.append('../py-json')
+#
+# from LANforge.LFUtils import *
+# # if you lack __init__.py in this directory you will not find sta_connect module#
+# import sta_connect
+# import testrail_api
+# from sta_connect import StaConnect
+# from testrail_api import APIClient
+#
+# client: APIClient = APIClient('https://telecominfraproject.testrail.com')
+# client.user = tr_user
+# client.password = tr_pw
+#
+#
+# print('Beginning file download with requests')
+#
+# class GetBuild:
+# def __init__(self):
+# self.user = cicd_user
+# self.password = cicd_pw
+# ssl._create_default_https_context = ssl._create_unverified_context
+#
+# def get_latest_image(self,url):
+#
+# auth = str(
+# base64.b64encode(
+# bytes('%s:%s' % (cicd_user,cicd_pw ), 'utf-8')
+# ),
+# 'ascii'
+# ).strip()
+# headers = {'Authorization': 'Basic ' + auth}
+#
+# ''' FIND THE LATEST FILE NAME'''
+# print(url)
+# req = urllib.request.Request(url, headers=headers)
+# response = urllib.request.urlopen(req)
+# html = response.read()
+# soup = BeautifulSoup(html, features="html.parser")
+# last_link = soup.find_all('a', href=True)[-1]
+# latest_file=last_link['href']
+#
+# filepath = local_dir
+# os.chdir(filepath)
+# #file_url = url + latest_file
+#
+# ''' Download the binary file from Jfrog'''
+# path = ArtifactoryPath(url,auth=(cicd_user, cicd_pw))
+# path.touch()
+# for file in path:
+# print('File =', file)
+#
+# path = ArtifactoryPath(file, auth=(cicd_user, cicd_pw))
+# print("file to be downloaded :" ,latest_file)
+# print("File Path:",file)
+# with path.open() as des:
+# with open(latest_file, "wb") as out:
+# out.write(des.read())
+# des.close()
+# print("Extract the tar.gz file and upgrade the AP ")
+# housing_tgz = tarfile.open(latest_file)
+# housing_tgz.extractall()
+# housing_tgz.close()
+# return "pass"
+# print("Extract the tar file, and copying the file to Linksys AP directory")
+# #with open("/Users/syamadevi/Desktop/syama/ea8300/ap_sysupgrade_output.log", "a") as output:
+# # subprocess.call("scp /Users/syamadevi/Desktop/syama/ea8300/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin root@192.100.1.1:/tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin",shell=True, stdout=output,
+# # stderr=output)
+#
+# print('SSH to Linksys and upgrade the file')
+#
+# '''
+#
+# ssh = SSHClient()
+# ssh.load_system_host_keys()
+# ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+# ssh.connect(hostname='192.100.1.1',
+# port='22',
+# username='root',
+# password='Dadun123$',
+# look_for_keys=False,
+# pkey='load_key_if_relevant')
+#
+# # SCPCLient takes a paramiko transport as its only argument
+# scp = SCPClient(ssh.get_transport())
+#
+# scp.put('test.txt', 'testD.txt')
+# scp.close()
+#
+#
+#
+# # client = paramiko.SSHClient()
+# #client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+# #client.connect('192.100.1.1', username='syama', password='Dadun123$')
+#
+# stdin, stdout, stderr = ssh.exec_command('sysupgrade /tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin')
+#
+# for line in stdout:
+# print (line.strip('\n'))
+# client.close()
+# '''
+#
+# def run_opensyncgw_in_docker(self):
+# #user_password = 'fepv6nj9guCPeEHC'
+# #my_env = os.environ.copy()
+# #my_env["userpass"] = user_password
+# #my_command = 'python --version'
+# #subprocess.Popen('echo', env=my_env)
+# with open(local_dir +"docker_jfrog_login.log", "a") as output:
+# subprocess.call("docker login --username" + cicd_user + "--password" + cicd_pw + " https://tip-tip-wlan-cloud-docker-repo.jfrog.io", shell=True, stdout=output,
+# stderr=output)
+# with open(local_dir +"opensyncgw_upgrade.log", "a") as output:
+# subprocess.call("docker pull tip-tip-wlan-cloud-docker-repo.jfrog.io/opensync-gateway-and-mqtt:0.0.1-SNAPSHOT", shell=True, stdout=output,
+# stderr=output)
+# with open(local_dir+"opensyncgw.log", "a") as output:
+# subprocess.call("docker run --rm -i -p 1883:1883 -p 6640:6640 -p 6643:6643 -p 4043:4043 \
+# -v ~/mosquitto/data:/mosquitto/data \
+# -v ~/mosquitto/log:/mosquitto/log \
+# -v ~/wlan-pki-cert-scripts:/opt/tip-wlan/certs \
+# -v ~/app/log:/app/logs \
+# -v ~//app/config:/app/config \
+# -e OVSDB_CONFIG_FILE='/app/config/config_2_ssids.json' \
+# tip-tip-wlan-cloud-docker-repo.jfrog.io/opensync-gateway-and-mqtt:0.0.1-SNAPSHOT",shell=True, stdout=output,
+# stderr=output)
+# print("opensync Gateway is running")
+# return "pass"
+#
+# def run_opensyncgw_in_aws(self):
+# try:
+# s = pxssh.pxssh()
+#
+# os.chdir(local_dir)
+# print("AWS OPENSYNC GW UPGRADE VIA HELM")
+# print(
+# 'Helm upgrades the latest image in the GW if a new image is found from jfrog and the AWS gateway is not upto date ')
+# # makesure the client key file is in the fame directory to login to AWS VM
+# s.login(aws_host, aws_user, ssh_key='id_key.pem')
+# s.sendline('kubectl get pods')
+#
+# # run a command
+# s.prompt() # match the prompt
+# print(s.before) # print everything before the prompt.
+# s.sendline(
+# 'helm upgrade tip-wlan wlan-cloud-helm/tip-wlan/ -n default -f wlan-cloud-helm/tip-wlan/resources/environments/dev-amazon.yaml')
+# s.prompt() # match the prompt
+# print(s.before) # print everything before the prompt.
+# s.sendline('kubectl get pods')
+#
+# # run a command
+# s.prompt() # match the prompt
+# print(s.before) # print everything before the prompt.
+# s.logout()
+# return "pass"
+#
+# except pxssh.ExceptionPxssh as e:
+# print("ALERT !!!!!! pxssh failed on login.")
+# print(e)
+#
+#
+# class openwrt_ap:
+#
+# def ap_upgrade(src,user2,host2,tgt,pwd,opts='', timeout=60):
+# ''' Performs the scp command. Transfers file(s) from local host to remote host '''
+# print("AP Model getting upgarded is :", apModel)
+# if apModel == "ecw5410":
+# ap_firmware = 'openwrt-ipq806x-generic-edgecore_ecw5410-squashfs-nand-sysupgrade.bin'
+# AP_IP = '10.10.10.207'
+# else:
+# if apModel == "ea8300":
+# ap_firmware = 'openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin'
+# AP_IP = '10.10.10.208'
+# host2 = AP_IP
+# src = src+ ap_firmware
+# print("src =", src)
+# print("AP IP ", AP_IP)
+# print("AP USER =", ap_user)
+# print("AP PASSWORD =", ap_pw)
+# cmd = f'''/bin/bash -c "scp {opts} {src} {user2}@{AP_IP}:{tgt}"'''
+# print("Executing the following cmd:",cmd,sep='\n')
+#
+# tmpFl = '/tmp/scp.log'
+# fp = open(tmpFl,'wb')
+# print(tmpFl)
+# childP = pexpect.spawn(cmd,timeout=timeout)
+# try:
+# childP.sendline(cmd)
+# childP.expect([f"{user2}@{host2}'s password:"])
+# childP.sendline(pwd)
+# childP.logfile = fp
+# childP.expect(pexpect.EOF)
+# childP.close()
+# fp.close()
+# fp = open(tmpFl,'r')
+# stdout = fp.read()
+# fp.close()
+#
+# if childP.exitstatus != 0:
+# raise Exception(stdout)
+# except KeyboardInterrupt:
+# childP.close()
+# fp.close()
+# return
+# print(stdout)
+#
+# try:
+# s = pxssh.pxssh()
+# s.login(host2, user2, pwd)
+# #s.sendline('sysupgrade /tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin&')
+# s.sendline('sysupgrade /tmp/openwrt-ipq806x-generic-edgecore_ecw5410-squashfs-nand-sysupgrade.bin&')
+# #s.logout()
+# #s.prompt() # match the prompt
+# print(s.before) # print everything before the prompt.
+# sleep(100)
+# #s.login(host2, user2, pwd)
+# s.prompt()
+# #os.system(f"scp {local_dir}/cacert.pem root@10.10.10.207:/usr/plume/certs/ca.pem")
+# #os.system(f"scp {local_dir}/clientcert.pem root@10.10.10.207:/usr/plume/certs/client.pem")
+# #os.system(f"scp {local_dir}/clientkey_dec.pem root@10.10.10.207:/usr/plume/certs/client_dec.key")
+# #s.sendline('service opensync restart')
+# #s.prompt() # match the prompt
+# #print(s.before) # print everything before the prompt.
+# s.logout()
+# return "pass"
+# except pxssh.ExceptionPxssh as e:
+# print("ALERT !!!!!! pxssh failed on login.")
+# print(e)
+# def apCopyCert(src,user2,host2,tgt,pwd,opts='', timeout=60):
+#
+# print("Copying the AP Certs")
+# '''
+# s = pxssh.pxssh()
+# print(src, users2,pwd)
+# s.login(host2, user2, pwd)
+# s.prompt() # match the prompt
+# print("Copying ca.pem")
+# os.system(f"scp {src}/cacert.pem root@10.10.10.207:/usr/plume/certs/ca.pem")
+# print("Copying the client.pem")
+# os.system(f"scp {src}/clientcert.pem root@10.10.10.207:/usr/plume/certs/client.pem")
+# print("Copying the client_dec.key")
+# os.system(f"scp {src}/clientkey_dec.pem root@10.10.10.207:/usr/plume/certs/client_dec.key")
+# s.sendline('service opensync restart')
+# s.prompt() # match the prompt
+# print(s.before) # print everything before the prompt.
+# s.logout()
+# '''
+# cacert=src+"ca.pem"
+# clientcert = src+"client.pem"
+# clientkey=src+"client_dec.key"
+# tgt ="/usr/plume/certs"
+# ap_pw
+#
+# print("src =", src)
+# print("AP IP ", host2)
+# print("AP USER =", ap_user)
+# print("AP PASSWORD =", ap_pw)
+# #cmd = f'''/bin/bash -c "scp {opts} {src} {user2}@{AP_IP}:{tgt}"'''
+# #cmd = f'''/bin/bash -c "scp {opts} {cacert} {user2}@{AP_IP}:{tgt}"'''
+# #cmd = f'''/bin/bash -c "scp {opts} {clientcert} {user2}@{AP_IP}:{tgt}"'''
+# cmd = f'''/bin/bash -c "scp {opts} {cacert} {clientcert} {clientkey} {user2}@{host2}:{tgt}"'''
+# print("Executing the following cmd:", cmd, sep='\n')
+# tmpFl = '/tmp/cert.log'
+# fp = open(tmpFl, 'wb')
+# print(tmpFl)
+# childP = pexpect.spawn(cmd, timeout=timeout)
+# try:
+# childP.sendline(cmd)
+# childP.expect([f"{user2}@{host2}'s password:"])
+# childP.sendline(ap_pw)
+# childP.logfile = fp
+# childP.expect(pexpect.EOF)
+# fp.close()
+# fp = open(tmpFl,'r')
+# stdout = fp.read()
+# fp.close()
+#
+# if childP.exitstatus != 0:
+# #raise Exception(stdout)
+# print("there is an excess status non 0")
+# except KeyboardInterrupt:
+# childP.close()
+# fp.close()
+# return
+# print(stdout)
+# def restartGw(src,user2,host2,tgt,pwd,opts='', timeout=60):
+# print("Restarting opensync GW")
+# s = pxssh.pxssh()
+# s.login(host2, user2, pwd)
+# # s.sendline('sysupgrade /tmp/openwrt-ipq40xx-generic-linksys_ea8300-squashfs-sysupgrade.bin&')
+# s.sendline('service opensync restart')
+# # s.logout()
+# # s.prompt() # match the prompt
+# print(s.before) # print everything before the prompt.
+# s.prompt()
+# s.logout()
+#
+#
+# class RunTest:
+# def TestCase_938(self, rid):
+# '''SINGLE CLIENT CONNECTIVITY'''
+# staConnect = StaConnect("10.10.10.201", 8080, _debugOn=False)
+# staConnect.sta_mode = 0
+# staConnect.upstream_resource = 1
+# staConnect.upstream_port = "eth2"
+# staConnect.radio = "wiphy1"
+# staConnect.resource = 1
+# staConnect.dut_ssid = "autoProvisionedSsid-5u"
+# #staConnect.dut_passwd = "4C0nnectUS!"
+# staConnect.dut_passwd = "12345678"
+# staConnect.dut_security = "wpa2"
+# staConnect.station_names = ["sta01010"]
+# staConnect.runtime_secs = 30
+# staConnect.cleanup_on_exit = True
+# staConnect.run()
+# run_results = staConnect.get_result_list()
+# for result in run_results:
+# print("test result: " + result)
+# #result = 'pass'
+# print("Single Client Connectivity :",staConnect.passes)
+# if staConnect.passes() == True:
+# client.update_testrail(case_id=938, run_id=rid, status_id=1, msg='client Connectivity to 5GHZ Open SSID is Passed ')
+# else:
+# client.update_testrail(case_id=938, run_id=rid, status_id=5, msg='client connectivity to 5GHZ OPEN SSID is Failed')
+#
+# def TestCase_941(self, rid):
+# #MULTI CLIENT CONNECTIVITY
+# staConnect = StaConnect("10.10.10.201", 8080, _debugOn=False)
+# staConnect.sta_mode = 0
+# staConnect.upstream_resource = 1
+# staConnect.upstream_port = "eth2"
+# staConnect.radio = "wiphy1"
+# staConnect.resource = 1
+# staConnect.dut_ssid = "autoProvisionedSsid-5u"
+# # staConnect.dut_passwd = "4C0nnectUS!"
+# staConnect.dut_passwd = "12345678"
+# staConnect.dut_security = "wpa2"
+# staConnect.station_names = ["sta0020", 'sta0021', 'sta0022', 'sta0023']
+# staConnect.runtime_secs = 20
+# staConnect.cleanup_on_exit = True
+# staConnect.run()
+# run_results = staConnect.get_result_list()
+# for result in run_results:
+# print("test result: " + result)
+# if staConnect.passes() == True:
+# client.update_testrail(case_id=941, run_id=rid, status_id=1,
+# msg='client Connectivity to 5GHZ Open SSID is Passed ')
+# else:
+# client.update_testrail(case_id=941, run_id=rid, status_id=5,
+# msg='client connectivity to 5GHZ OPEN SSID is Failed')
+#
+# # Check for externally run test case results.
+# def TestCase_LF_External(self, rid):
+# #https://stackoverflow.com/questions/3207219/how-do-i-list-all-files-of-a-directory
+# results = glob.glob("%s/*_CICD_RESULTS.txt"%external_results_dir)
+# for r in results:
+# rfile = open(r, 'r')
+# lines = rfile.readlines()
+#
+# # File contents looks something like:
+# #CASE_ID 9999
+# #RUN_ID 15
+# #STATUS 1
+# #MSG Test passed nicely
+# #MSG Build ID: deadbeef
+# #MSG Results: http://cicd.telecominfraproject.com
+#
+# _case_id = -1
+# _status_id = 1 # Default to pass
+# _msg = ""
+# _rid = rid
+#
+# for line in Lines:
+# m = re.search(r'(\S+) (.*)', line)
+# k = m.group(0);
+# v = m.group(1);
+#
+# if k == "CASE_ID":
+# _case_id = v
+# if k == "RUN_ID":
+# _rid = v
+# if k == "STATUS":
+# _status_id = v
+# if k == "MSG":
+# if _msg == "":
+# _msg == v
+# else:
+# _msg += "\n"
+# _msg += v
+# if _case_id != -1:
+# client.update_testrail(case_id=_case_id, run_id=_rid, status_id=_status_id, msg=_msg)
+# os.unlink(r)
+#
+# def TestCase_939(self, rid):
+# ''' Client Count in MQTT Log'''
+# try:
+# print("Counting clients in MQTT")
+# s = pxssh.pxssh()
+# #aws_host = os.getenv(AWS_HOST)
+# #aws_user=os.getenv(AWS_USER)
+# os.chdir(local_dir)
+# # makesure the client key file is in the fame directory to login to AWS VM
+# s.login(aws_host,aws_user,ssh_key='id_key.pem')
+# s.sendline('kubectl cp tip-wlan-opensync-gw-static-f795d45-ctb5z:/app/logs/mqttData.log mqttData.log')
+# # run a command
+# s.prompt() # match the prompt
+# print(s.before) # print everything before the prompt.
+# s.sendline()
+# s.logout()
+# #return "pass"
+# print(aws_host, aws_user)
+# ssh = paramiko.SSHClient()
+# ssh.load_system_host_keys()
+# ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+# k = paramiko.RSAKey.from_private_key_file('id_key.pem')
+# ssh.connect(aws_host, username=aws_user, pkey=k)
+# print("Connected")
+# scp = SCPClient(ssh.get_transport())
+# scp.get("mqttData.log")
+# scp.close()
+# # Get the client Count
+# ClientCount = subprocess.getoutput(
+# 'grep \'{\"nodeID\"\' mqttData.log | grep clientList | tail -1 |cut -d \'=\' -f 3 | json_pp | grep macAddres | grep \'04:F0:21:55\' | tr -d , | sort | uniq | wc -l ')
+# print("client count =", ClientCount)
+# if (int(ClientCount) >= 1):
+# client.update_testrail(case_id=939, run_id=rid, status_id=1,
+# msg=ClientCount + ' Client/Clients Connected ')
+# else:
+# client.update_testrail(case_id=939, run_id=rid, status_id=5,
+# msg='No Client Connected')
+# except pxssh.ExceptionPxssh as e:
+# print("ALERT !!!!!! pxssh failed on login.")
+# print(e)
+#
+#
+# params = {
+# 'src': local_dir,
+# 'user2': ap_user,
+# 'host2': '10.10.10.207',
+# 'tgt': '/tmp/',
+# 'pwd': ap_pw,
+# 'opts': ''
+# }
+# apModel= "ecw5410"
+#
+#
+# url = 'https://tip.jfrog.io/artifactory/tip-wlan-ap-firmware/'
+# url = url + apModel
+# projId = client.get_project_id(project_name= 'WLAN')
+# print("TIP WLAN Project ID Is :", projId)
+#
+# rid = client.get_run_id(test_run_name= 'TIP-DEMO4')
+# print(rid)
+# Test: RunTest = RunTest()
+# Build: GetBuild = GetBuild()
+# '''
+# binary_fetch_result = Build.get_latest_image(url)
+# print("UPDATING TEST RAIL WITH TEST RESULT FOR CASE_ID 940: Download latest openwrt image from Jfrog")
+#
+# if binary_fetch_result == 'pass':
+# client.update_testrail(case_id=940, run_id=rid, status_id=1, msg='latest firmware downloaded')
+# else:
+# client.update_testrail(case_id=940, run_id=rid, status_id=5, msg='Firmware Download failed')
+#
+# sleep(10)
+# print("Upgrading AP with latest image downloaded")
+# ap_upgrade_result = openwrt_ap.ap_upgrade(**params)
+# sleep(10)
+# print("UPDATING TEST RAIL WITH TEST RESULT FOR CASE_ID 937")
+# sleep(10)
+# if ap_upgrade_result == 'pass':
+# client.update_testrail(case_id=937, run_id=rid, status_id=1, msg='AP upgraded with latest Firmware')
+# else:
+# client.update_testrail(case_id=937, run_id=rid, status_id=5, msg='Firmware upgrade failed in AP ')
+# print("Upgrading AWS Opensync gateway with latest docker image from Jfrog")
+# OpensyncGw_UpgResult = Build.run_opensyncgw_in_aws()
+# if OpensyncGw_UpgResult == 'pass':
+# client.update_testrail(case_id=936, run_id=rid, status_id=1, msg='Opensync GW upgraded with latest Firmware')
+# else:
+# client.update_testrail(case_id=936, run_id=rid, status_id=5, msg='Firmware upgrade failed in Opensync Gateway')
+# sleep(10)
+# '''
+# pprint.pprint(params)
+# ap_cert_result = openwrt_ap.apCopyCert(**params)
+# print("Executing TestCase 938: single Client Connectivity test")
+# openwrt_ap.restartGw(**params)
+# Test.TestCase_938(rid)
+#
+# print("Executing TestCase 941: Multi Client Connectivity test")
+# Test.TestCase_941(rid)
+# sleep(100)
+# print("Executing TestCase 939:Counting The number of Clients Connected from MQTT")
+# Test.TestCase_939(rid)
+#
+#
+#
+#
diff --git a/py-scripts/create_l3.py b/py-scripts/create_l3.py
index 98602f34..3b51dbe8 100755
--- a/py-scripts/create_l3.py
+++ b/py-scripts/create_l3.py
@@ -3,6 +3,8 @@
"""
This script will create a variable number of layer3 stations each with their own set of cross-connects and endpoints.
+ If you want to
+
Use './create_l3.py --help' to see command line usage and options
"""
@@ -164,7 +166,7 @@ python3 ./test_ipv4_variable_time.py
if (args.num_stations is not None) and (int(args.num_stations) > 0):
num_sta = int(args.num_stations)
- station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta - 1, padding_number_=10000,
+ station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=int(args.number_template), end_id_=num_sta+int(args.number_template) - 1, padding_number_=10000,
radio=args.radio)
ip_var_test = CreateL3(host=args.mgr,
port=args.mgr_port,
diff --git a/py-scripts/create_station.py b/py-scripts/create_station.py
index df685f58..f35aa101 100755
--- a/py-scripts/create_station.py
+++ b/py-scripts/create_station.py
@@ -27,12 +27,14 @@ class CreateStation(Realm):
_password=None,
_host=None,
_port=None,
+ _mode=0,
_sta_list=None,
_number_template="00000",
_radio="wiphy0",
_proxy_str=None,
_debug_on=False,
_up=True,
+ _set_txo_data=None,
_exit_on_error=False,
_exit_on_fail=False):
super().__init__(_host,
@@ -42,25 +44,26 @@ class CreateStation(Realm):
self.ssid = _ssid
self.security = _security
self.password = _password
+ self.mode = _mode
self.sta_list = _sta_list
self.radio = _radio
self.timeout = 120
self.number_template = _number_template
self.debug = _debug_on
self.up = _up
+ self.set_txo_data = _set_txo_data
self.station_profile = self.new_station_profile()
self.station_profile.lfclient_url = self.lfclient_url
self.station_profile.ssid = self.ssid
self.station_profile.ssid_pass = self.password,
self.station_profile.security = self.security
self.station_profile.number_template_ = self.number_template
- self.station_profile.mode = 0
+ self.station_profile.mode = self.mode
if self.debug:
print("----- Station List ----- ----- ----- ----- ----- ----- \n")
pprint.pprint(self.sta_list)
print("---- ~Station List ----- ----- ----- ----- ----- ----- \n")
-
def build(self):
# Build stations
self.station_profile.use_security(self.security, self.ssid, self.password)
@@ -70,6 +73,15 @@ class CreateStation(Realm):
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
+ if self.set_txo_data is not None:
+ self.station_profile.set_wifi_txo(txo_ena=self.set_txo_data["txo_enable"],
+ tx_power=self.set_txo_data["txpower"],
+ pream=self.set_txo_data["pream"],
+ mcs=self.set_txo_data["mcs"],
+ nss=self.set_txo_data["nss"],
+ bw=self.set_txo_data["bw"],
+ retries=self.set_txo_data["retries"],
+ sgi=self.set_txo_data["sgi"], )
self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
if self.up:
self.station_profile.admin_up()
@@ -78,7 +90,7 @@ class CreateStation(Realm):
def main():
- parser = LFCliBase.create_basic_argparse(
+ parser = LFCliBase.create_basic_argparse( # see create_basic_argparse in ../py-json/LANforge/lfcli_base.py
prog='create_station.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
@@ -91,6 +103,7 @@ def main():
Command example:
./create_station.py
--radio wiphy0
+ --start_id 2
--num_stations 3
--security open
--ssid netgear
@@ -98,14 +111,21 @@ Command example:
--debug
''')
required = parser.add_argument_group('required arguments')
- #required.add_argument('--security', help='WiFi Security protocol: < open | wep | wpa | wpa2 | wpa3 >', required=True)
+ required.add_argument('--start_id', help='--start_id default 0', default=0)
+
+ optional = parser.add_argument_group('Optional arguments')
+ optional.add_argument('--mode', help='Mode for your station (as a number)',default=0)
args = parser.parse_args()
- #if args.debug:
+ # if args.debug:
# pprint.pprint(args)
# time.sleep(5)
if (args.radio is None):
- raise ValueError("--radio required")
+ raise ValueError("--radio required")
+
+ start_id = 0
+ if (args.start_id != 0):
+ start_id = int(args.start_id)
num_sta = 2
if (args.num_stations is not None) and (int(args.num_stations) > 0):
@@ -113,20 +133,34 @@ Command example:
num_sta = num_stations_converted
station_list = LFUtils.port_name_series(prefix="sta",
- start_id=0,
- end_id=num_sta-1,
- padding_number=10000,
- radio=args.radio)
+ start_id=start_id,
+ end_id=start_id + num_sta - 1,
+ padding_number=10000,
+ radio=args.radio)
+
+ print("station_list {}".format(station_list))
+ set_txo_data={
+ "txo_enable": 1,
+ "txpower": 255,
+ "pream": 0,
+ "mcs": 0,
+ "nss": 0,
+ "bw": 3,
+ "retries": 1,
+ "sgi": 0
+ }
create_station = CreateStation(_host=args.mgr,
- _port=args.mgr_port,
- _ssid=args.ssid,
- _password=args.passwd,
- _security=args.security,
- _sta_list=station_list,
- _radio=args.radio,
- _proxy_str=args.proxy,
- _debug_on=args.debug)
+ _port=args.mgr_port,
+ _ssid=args.ssid,
+ _password=args.passwd,
+ _security=args.security,
+ _sta_list=station_list,
+ _mode=args.mode,
+ _radio=args.radio,
+ _set_txo_data=None,
+ _proxy_str=args.proxy,
+ _debug_on=args.debug)
create_station.build()
print('Created %s stations' % num_sta)
diff --git a/py-scripts/csv_to_influx.py b/py-scripts/csv_to_influx.py
index f710356f..8c126b0d 100755
--- a/py-scripts/csv_to_influx.py
+++ b/py-scripts/csv_to_influx.py
@@ -42,7 +42,7 @@ class CSVtoInflux():
target_csv=None,
sep='\t'):
self.influxdb = influxdb
- self.target_csv = target_csv.replace('/home/lanforge/html-reports/', '')
+ self.target_csv = target_csv
self.influx_tag = _influx_tag
self.sep = sep
@@ -69,7 +69,10 @@ class CSVtoInflux():
tags = dict()
print("row: %s" % row)
short_description = row[columns['short-description']]
- numeric_score = float(row[columns['numeric-score']])
+ if row[columns['numeric-score']] == 'NaN':
+ numeric_score = '0x0'
+ else:
+ numeric_score = float(row[columns['numeric-score']])
date = row[columns['Date']]
date = datetime.datetime.utcfromtimestamp(int(date) / 1000).isoformat() #convert to datetime so influx can read it, this is required
for variable in csv_variables:
@@ -146,9 +149,7 @@ python3 csv_to_influx.py --influx_host localhost --influx_org Candela --influx_t
args = parser.parse_args()
- influxdb = RecordInflux(_lfjson_host=lfjson_host,
- _lfjson_port=lfjson_port,
- _influx_host=args.influx_host,
+ influxdb = RecordInflux(_influx_host=args.influx_host,
_influx_port=args.influx_port,
_influx_org=args.influx_org,
_influx_token=args.influx_token,
diff --git a/py-scripts/cv_to_grafana.py b/py-scripts/cv_to_grafana.py
index df3d2c02..3e1574c5 100755
--- a/py-scripts/cv_to_grafana.py
+++ b/py-scripts/cv_to_grafana.py
@@ -16,7 +16,6 @@ Influx from this script.
--line "Resource=1.1 Profile=default Amount=4 Uses-1=wiphy1 DUT=DUT_TO_GRAFANA_DUT Traffic=wiphy1 Freq=-1"
--line "Resource=1.1 Profile=upstream Amount=1 Uses-1=eth1 DUT=DUT_TO_GRAFANA_DUT Traffic=eth1 Freq=-1"
--dut DUT_TO_GRAFANA
---test_rig Stidmatt-01
--create_scenario DUT_TO_GRAFANA_SCENARIO
--station 1.1.sta00002
--duration 15s
@@ -103,7 +102,6 @@ def main():
--line
--line
--dut
- --test_rig
--create_scenario
--station
--influx_tag
diff --git a/py-scripts/ghost_profile.py b/py-scripts/ghost_profile.py
new file mode 100755
index 00000000..297b4fd6
--- /dev/null
+++ b/py-scripts/ghost_profile.py
@@ -0,0 +1,220 @@
+#!/usr/bin/env python3
+
+"""
+NAME: ghost_profile.py
+PURPOSE: modify ghost database from the command line.
+SETUP: A Ghost installation which the user has admin access to.
+EXAMPLE: ./ghost_profile.py --article_text_file text.txt --title Test --authors Matthew --ghost_token SECRET_KEY --host 192.168.1.1
+
+There is a specific class for uploading kpi graphs called kpi_to_ghost.
+
+EXAMPLE: ./ghost_profile.py --ghost_token TOKEN --ghost_host 192.168.100.147
+--folders /home/lanforge/html-reports/wifi-capacity-2021-06-04-02-51-07
+--kpi_to_ghost appl --authors Matthew --title 'wifi capacity 2021 06 04 02 51 07' --server 192.168.93.51
+--user_pull lanforge --password_pull lanforge --customer candela --testbed heather --test_run test-run-6
+--user_push matt --password_push PASSWORD
+
+EXAMPLE 2: ./ghost_profile.py --ghost_token TOKEN
+--ghost_host 192.168.100.147 --server 192.168.93.51 --customer candela
+--testbed heather --user_push matt --password_push "amount%coverage;Online" --kpi_to_ghost app
+--folders /home/lanforge/html-reports/wifi-capacity-2021-06-14-10-42-29 --grafana_token TOKEN
+--grafana_host 192.168.100.201
+
+this script uses pyjwt. If you get the issue module 'jwt' has no attribute 'encode', run this: pip3 uninstall jwt pyjwt && pip install pyjwt
+ Matthew Stidham
+ Copyright 2021 Candela Technologies Inc
+ License: Free to distribute and modify. LANforge systems must be licensed.
+"""
+import sys
+import os
+import argparse
+
+if sys.version_info[0] != 3:
+ print("This script requires Python 3")
+ exit(1)
+
+if 'py-json' not in sys.path:
+ sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
+ sys.path.append(os.path.join(os.path.abspath('..'), 'py-dashboard'))
+
+from GhostRequest import GhostRequest
+
+
+class UseGhost(GhostRequest):
+ def __init__(self,
+ _ghost_token=None,
+ host="localhost",
+ port=8080,
+ _debug_on=False,
+ _exit_on_fail=False,
+ _ghost_host="localhost",
+ _ghost_port=2368,
+ influx_host=None,
+ influx_port=None,
+ influx_org=None,
+ influx_token=None,
+ influx_bucket=None):
+ super().__init__(_ghost_host,
+ str(_ghost_port),
+ _api_token=_ghost_token,
+ influx_host=influx_host,
+ influx_port=influx_port,
+ influx_org=influx_org,
+ influx_token=influx_token,
+ influx_bucket=influx_bucket,
+ debug_=_debug_on)
+ self.ghost_host = _ghost_host
+ self.ghost_port = _ghost_port
+ self.ghost_token = _ghost_token
+ self.influx_host = influx_host
+ self.influx_port = influx_port
+ self.influx_org = influx_org
+ self.influx_token = influx_token
+ self.influx_bucket = influx_bucket
+
+ def create_post_from_file(self, title, file, tags, authors):
+ text = open(file).read()
+ return self.create_post(title=title, text=text, tags=tags, authors=authors)
+
+ def kpi(self,
+ authors,
+ folders,
+ parent_folder,
+ title,
+ server_pull,
+ ghost_host,
+ port,
+ user_push,
+ password_push,
+ customer,
+ testbed,
+ test_run,
+ grafana_token,
+ grafana_host,
+ grafana_port,
+ datasource,
+ grafana_bucket):
+ target_folders = list()
+ return self.kpi_to_ghost(authors,
+ folders,
+ parent_folder,
+ title,
+ server_pull,
+ ghost_host,
+ port,
+ user_push,
+ password_push,
+ customer,
+ testbed,
+ test_run,
+ target_folders,
+ grafana_token,
+ grafana_host,
+ grafana_port,
+ datasource,
+ grafana_bucket)
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ prog='ghost_profile.py',
+ formatter_class=argparse.RawTextHelpFormatter,
+ epilog='''Manage Ghost Website''',
+ description='''
+ ghost_profile.py
+ ----------------
+ Command example:
+ ./ghost_profile.py
+ --ghost_token'''
+ )
+ optional = parser.add_argument_group('optional arguments')
+ optional.add_argument('--ghost_token', default=None)
+ optional.add_argument('--create_post', default=None)
+ optional.add_argument('--article_text_file', default=None)
+
+ optional.add_argument('--ghost_port', help='Ghost port if different from 2368', default=2368)
+ optional.add_argument('--ghost_host', help='Ghost host if different from localhost', default='localhost')
+ optional.add_argument('--article_text')
+ optional.add_argument('--article_tags', action='append')
+ optional.add_argument('--authors', action='append')
+ optional.add_argument('--title', default=None)
+ optional.add_argument('--image', default=None)
+ optional.add_argument('--folder', default=None)
+ optional.add_argument('--custom_post', default=None)
+ optional.add_argument('--kpi_to_ghost', help='Generate a Ghost report from KPI spreadsheets', action="store_true")
+ optional.add_argument('--folders', action='append', default=None)
+ optional.add_argument('--server_pull')
+ optional.add_argument('--port', default=22)
+ optional.add_argument('--user_push')
+ optional.add_argument('--password_push')
+ optional.add_argument('--customer')
+ optional.add_argument('--testbed')
+ optional.add_argument('--test_run', default=None)
+ optional.add_argument('--grafana_token', default=None)
+ optional.add_argument('--grafana_host', default=None)
+ optional.add_argument('--grafana_port', default=3000)
+ optional.add_argument('--parent_folder', default=None)
+ optional.add_argument('--datasource', default='InfluxDB')
+ optional.add_argument('--grafana_bucket', default=None)
+ optional.add_argument('--influx_host')
+ optional.add_argument('--influx_token', help='Username for your Influx database')
+ optional.add_argument('--influx_bucket', help='Password for your Influx database')
+ optional.add_argument('--influx_org', help='Name of your Influx database')
+ optional.add_argument('--influx_port', help='Port where your influx database is located', default=8086)
+ optional.add_argument('--influx_tag', action='append', nargs=2,
+ help='--influx_tag Can add more than one of these.')
+ optional.add_argument('--influx_mgr',
+ help='IP address of the server your Influx database is hosted if different from your LANforge Manager',
+ default=None)
+ optional.add_argument('--debug', help='Enable debugging', default=False, action="store_true")
+ args = parser.parse_args()
+
+ Ghost = UseGhost(_ghost_token=args.ghost_token,
+ _ghost_port=args.ghost_port,
+ _ghost_host=args.ghost_host,
+ influx_host=args.influx_host,
+ influx_port=args.influx_port,
+ influx_org=args.influx_org,
+ influx_token=args.influx_token,
+ influx_bucket=args.influx_bucket,
+ _debug_on=args.debug)
+
+ if args.create_post is not None:
+ Ghost.create_post(args.title, args.article_text, args.article_tags, args.authors)
+ if args.article_text_file is not None:
+ Ghost.create_post_from_file(args.title, args.article_text_file, args.article_tags, args.authors)
+
+ if args.image is not None:
+ Ghost.upload_image(args.image)
+
+ if args.custom_post is not None:
+ if args.folders is not None:
+ Ghost.custom_post(args.folders, args.authors)
+ else:
+ Ghost.custom_post(args.folder, args.authors)
+ else:
+ if args.folder is not None:
+ Ghost.upload_images(args.folder)
+
+ if args.kpi_to_ghost is True:
+ Ghost.kpi(args.authors,
+ args.folders,
+ args.parent_folder,
+ args.title,
+ args.server_pull,
+ args.ghost_host,
+ args.port,
+ args.user_push,
+ args.password_push,
+ args.customer,
+ args.testbed,
+ args.test_run,
+ args.grafana_token,
+ args.grafana_host,
+ args.grafana_port,
+ args.datasource,
+ args.grafana_bucket)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/py-scripts/grafana_profile.py b/py-scripts/grafana_profile.py
index 06c15a07..73ace537 100755
--- a/py-scripts/grafana_profile.py
+++ b/py-scripts/grafana_profile.py
@@ -19,8 +19,8 @@ if 'py-json' not in sys.path:
from GrafanaRequest import GrafanaRequest
from LANforge.lfcli_base import LFCliBase
-import json
import string
+<<<<<<< HEAD
import random
@@ -161,109 +161,11 @@ class UseGrafana(LFCliBase):
options = dict()
options['alertThreshold'] = True
+=======
- groupBy = list()
- groupBy.append(self.groupby('$__interval', 'time'))
- groupBy.append(self.groupby('null', 'fill'))
+class UseGrafana(GrafanaRequest):
+>>>>>>> 0ef021e1165cbaa612e5128bc48d6abfbb7b887b
- targets = list()
- counter = 0
- new_target = self.maketargets(bucket, scriptname, groupBy, counter, graph_group,testbed)
- targets.append(new_target)
-
- fieldConfig = dict()
- fieldConfig['defaults'] = dict()
- fieldConfig['overrides'] = list()
-
- transformation = dict()
- transformation['id'] = "renameByRegex"
- transformation_options = dict()
- transformation_options['regex'] = "(.*) value.*"
- transformation_options['renamePattern'] = "$1"
- transformation['options'] = transformation_options
-
- xaxis = dict()
- xaxis['buckets'] = None
- xaxis['mode'] = "time"
- xaxis['name'] = None
- xaxis['show'] = True
- xaxis['values'] = list()
-
- yaxis = dict()
- yaxis['format'] = 'short'
- yaxis['label'] = unit_dict[graph_group]
- yaxis['logBase'] = 1
- yaxis['max'] = None
- yaxis['min'] = None
- yaxis['show'] = True
-
- yaxis1 = dict()
- yaxis1['align'] = False
- yaxis1['alignLevel'] = None
-
- panel['aliasColors'] = dict()
- panel['bars'] = False
- panel['dashes'] = False
- panel['dashLength'] = 10
- panel['datasource'] = datasource
- panel['fieldConfig'] = fieldConfig
- panel['fill'] = 0
- panel['fillGradient'] = 0
- panel['gridPos'] = gridpos
- panel['hiddenSeries'] = False
- panel['id'] = index
- panel['legend'] = legend
- panel['lines'] = True
- panel['linewidth'] = 1
- panel['nullPointMode'] = 'null'
- panel['options'] = options
- panel['percentage'] = False
- panel['pluginVersion'] = '7.5.4'
- panel['pointradius'] = 2
- panel['points'] = True
- panel['renderer'] = 'flot'
- panel['seriesOverrides'] = list()
- panel['spaceLength'] = 10
- panel['stack'] = False
- panel['steppedLine'] = False
- panel['targets'] = targets
- panel['thresholds'] = list()
- panel['timeFrom'] = None
- panel['timeRegions'] = list()
- panel['timeShift'] = None
- if graph_group is not None:
- panel['title'] = scriptname + ' ' + graph_group
- else:
- panel['title'] = scriptname
- panel['transformations'] = list()
- panel['transformations'].append(transformation)
- panel['type'] = "graph"
- panel['xaxis'] = xaxis
- panel['yaxes'] = list()
- panel['yaxes'].append(yaxis)
- panel['yaxes'].append(yaxis)
- panel['yaxis'] = yaxis1
-
- panels.append(panel)
- index = index + 1
- input1['annotations'] = annot
- input1['editable'] = True
- input1['gnetId'] = None
- input1['graphTooltip'] = 0
- input1['links'] = list()
- input1['panels'] = panels
- input1['refresh'] = False
- input1['schemaVersion'] = 27
- input1['style'] = 'dark'
- input1['tags'] = list()
- input1['templating'] = templating
- input1['time'] = timedict
- input1['timepicker'] = dict()
- input1['timezone'] = ''
- input1['title'] = ("Testbed: %s" % title)
- input1['uid'] = uid
- input1['version'] = 11
- return self.GR.create_dashboard_from_dict(dictionary=json.dumps(input1))
def read_csv(self, file):
csv = open(file).read().split('\n')
@@ -280,19 +182,6 @@ class UseGrafana(LFCliBase):
results.append(row[value])
return results
- def get_graph_groups(self,target_csvs): # Get the unique values in the Graph-Group column
- dictionary = dict()
- for target_csv in target_csvs:
- if len(target_csv) > 1:
- csv = self.read_csv(target_csv)
- # Unique values in the test-id column
- scripts = list(set(self.get_values(csv,'test-id')))
- # we need to make sure we match each Graph Group to the script it occurs in
- for script in scripts:
- # Unique Graph Groups for each script
- dictionary[script] = list(set(self.get_values(csv,'Graph-Group')))
- print(dictionary)
- return dictionary
def get_units(self, target_csv):
csv = self.read_csv(target_csv)
@@ -324,6 +213,12 @@ def main():
--graph_groups 'Per Stations Rate DL'
--graph_groups 'Per Stations Rate UL'
--graph_groups 'Per Stations Rate UL+DL'
+
+ Create a snapshot of a dashboard:
+ ./grafana_profile.py --grafana_token TOKEN
+ --grafana_host HOST
+ --create_snapshot
+ --title TITLE_OF_DASHBOARD
''')
required = parser.add_argument_group('required arguments')
required.add_argument('--grafana_token', help='token to access your Grafana database', required=True)
diff --git a/py-scripts/influx2.py b/py-scripts/influx2.py
index 0e561945..48ead535 100755
--- a/py-scripts/influx2.py
+++ b/py-scripts/influx2.py
@@ -21,13 +21,11 @@ import json
import influxdb_client
from influxdb_client.client.write_api import SYNCHRONOUS
import datetime
-from LANforge.lfcli_base import LFCliBase
+#from LANforge.lfcli_base import LFCliBase
import time
-class RecordInflux(LFCliBase):
+class RecordInflux:
def __init__(self,
- _lfjson_host="lanforge",
- _lfjson_port=8080,
_influx_host="localhost",
_influx_port=8086,
_influx_org=None,
@@ -35,9 +33,6 @@ class RecordInflux(LFCliBase):
_influx_bucket=None,
_debug_on=False,
_exit_on_fail=False):
- super().__init__(_lfjson_host, _lfjson_port,
- _debug=_debug_on,
- _exit_on_fail=_exit_on_fail)
self.influx_host = _influx_host
self.influx_port = _influx_port
self.influx_org = _influx_org
@@ -49,10 +44,6 @@ class RecordInflux(LFCliBase):
org=self.influx_org,
debug=_debug_on)
self.write_api = self.client.write_api(write_options=SYNCHRONOUS)
- #print("org: ", self.influx_org)
- #print("token: ", self.influx_token)
- #print("bucket: ", self.influx_bucket)
- #exit(0)
def post_to_influx(self, key, value, tags, time):
p = influxdb_client.Point(key)
diff --git a/py-scripts/lf_ap_auto_test.py b/py-scripts/lf_ap_auto_test.py
index d77cc3d2..573e44a0 100755
--- a/py-scripts/lf_ap_auto_test.py
+++ b/py-scripts/lf_ap_auto_test.py
@@ -23,7 +23,7 @@ the options and how best to input data.
--set 'Skip 2.4Ghz Tests' 1 --set 'Skip 5Ghz Tests' 1 \
--set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Stability' 0 --set 'Band-Steering' 0 \
--set 'Multi-Station Throughput vs Pkt Size' 0 --set 'Long-Term' 0 \
- --test_rig Testbed-01 --pull_report \
+ --pull_report \
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \
--influx_bucket ben \
@@ -46,7 +46,6 @@ show_log: 0
port_sorting: 0
kpi_id: AP Auto
bg: 0xE0ECF8
-test_rig: Ferndale-01-Basic
show_scan: 1
auto_helper: 1
skip_2: 1
@@ -187,6 +186,7 @@ class ApAutoTest(cvtest):
lf_port=8080,
lf_user="lanforge",
lf_password="lanforge",
+ local_lf_report_dir="",
instance_name="ap_auto_instance",
config_name="ap_auto_config",
upstream="1.1.eth1",
@@ -231,6 +231,7 @@ class ApAutoTest(cvtest):
self.raw_lines_file = raw_lines_file
self.sets = sets
self.graph_groups = graph_groups
+ self.local_lf_report_dir = local_lf_report_dir
def setup(self):
# Nothing to do at this time.
@@ -283,7 +284,7 @@ class ApAutoTest(cvtest):
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets,
self.pull_report, self.lf_host, self.lf_user, self.lf_password,
- cv_cmds, graph_groups_file=self.graph_groups)
+ cv_cmds, graph_groups_file=self.graph_groups, local_lf_report_dir=self.local_lf_report_dir)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
@@ -333,6 +334,7 @@ def main():
help="Specify 2.4Ghz radio. May be specified multiple times.")
parser.add_argument("--radio5", action='append', nargs=1, default=[],
help="Specify 5Ghz radio. May be specified multiple times.")
+ parser.add_argument("--local_lf_report_dir", help="--local_lf_report_dir default '' put where dataplane script run from",default="")
args = parser.parse_args()
@@ -346,6 +348,7 @@ def main():
config_name = args.config_name,
upstream = args.upstream,
pull_report = args.pull_report,
+ local_lf_report_dir = args.local_lf_report_dir,
dut5_0 = args.dut5_0,
dut2_0 = args.dut2_0,
load_old_cfg = args.load_old_cfg,
diff --git a/py-scripts/lf_csv.py b/py-scripts/lf_csv.py
new file mode 100644
index 00000000..a87fb038
--- /dev/null
+++ b/py-scripts/lf_csv.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python3
+'''
+NAME: lf_csv.py
+
+PURPOSE:
+Common Library for generating csv for LANforge output
+
+SETUP:
+/lanforge/html-reports directory needs to be present or output generated in local file
+
+EXAMPLE:
+see: /py-scritps/lf_report_test.py for example
+
+COPYWRITE
+ Copyright 2021 Candela Technologies Inc
+ License: Free to distribute and modify. LANforge systems must be licensed.
+
+INCLUDE_IN_README
+'''
+
+import numpy as np
+import pandas as pd
+
+
+class LfCSV:
+ def __init__(self,
+ _columns=['Stations', 'bk', 'be', 'vi', 'vo'],
+ _rows=[['sta0001', 'sta0002', 'sta0003', 'sta0004', 'sta0005'],
+ [1, 2, 3, 4, 5],
+ [11, 22, 33, 44, 55],
+ [6, 7, 8, 9, 10],
+ [66, 77, 88, 99, 100]],
+ _filename='test.csv'):
+ self.rows = _rows
+ self.columns = _columns
+ self.filename = _filename
+
+ def generate_csv(self):
+ df = {}
+ for i in range(len(self.columns)):
+ df[self.columns[i]] = self.rows[i]
+ csv_df = pd.DataFrame(df)
+ print(csv_df)
+ csv_df.to_csv(self.filename, index=False, encoding='utf-8', na_rep='NA', float_format='%.2f')
+
+
+if __name__ == "__main__":
+ test = LfCSV()
+ test.generate_csv()
diff --git a/py-scripts/lf_dataplane_test.py b/py-scripts/lf_dataplane_test.py
index 17939f5b..98682f7a 100755
--- a/py-scripts/lf_dataplane_test.py
+++ b/py-scripts/lf_dataplane_test.py
@@ -121,7 +121,7 @@ class DataplaneTest(cv_test):
lf_user="lanforge",
lf_password="lanforge",
ssh_port=22,
- local_path="",
+ local_lf_report_dir="",
instance_name="dpt_instance",
config_name="dpt_config",
upstream="1.1.eth2",
@@ -138,7 +138,9 @@ class DataplaneTest(cv_test):
raw_lines_file="",
sets=[],
graph_groups=None,
- report_dir=""
+ report_dir="",
+ test_rig="",
+ debug=False
):
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
@@ -165,7 +167,9 @@ class DataplaneTest(cv_test):
self.graph_groups = graph_groups
self.report_dir = report_dir
self.ssh_port = ssh_port
- self.local_path = local_path
+ self.local_lf_report_dir = local_lf_report_dir
+ self.test_rig = test_rig
+ self.debug = debug
def setup(self):
# Nothing to do at this time.
@@ -200,6 +204,8 @@ class DataplaneTest(cv_test):
cfg_options.append("duration: " + self.duration)
if self.dut != "":
cfg_options.append("selected_dut: " + self.dut)
+ if self.test_rig != "":
+ cfg_options.append("test_rig: " + self.test_rig)
# We deleted the scenario earlier, now re-build new one line at a time.
@@ -209,8 +215,8 @@ class DataplaneTest(cv_test):
self.create_and_run_test(self.load_old_cfg, self.test_name, self.instance_name,
self.config_name, self.sets,
self.pull_report, self.lf_host, self.lf_user, self.lf_password,
- cv_cmds, ssh_port=self.ssh_port, local_path=self.local_path,
- graph_groups_file=self.graph_groups)
+ cv_cmds, ssh_port=self.ssh_port, local_lf_report_dir=self.local_lf_report_dir,
+ graph_groups_file=self.graph_groups, debug=self.debug)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
@@ -239,6 +245,11 @@ def main():
cv_add_base_parser(parser) # see cv_test_manager.py
+<<<<<<< HEAD
+=======
+ parser.add_argument('--json', help="--json json input file", default="")
+ parser.add_argument('--influx_json', help="--influx_json influx config json input file", default="")
+>>>>>>> 0ef021e1165cbaa612e5128bc48d6abfbb7b887b
parser.add_argument("-u", "--upstream", type=str, default="",
help="Upstream port for wifi capacity test ex. 1.1.eth2")
parser.add_argument("--station", type=str, default="",
@@ -254,9 +265,76 @@ def main():
help="Specify duration of each traffic run")
parser.add_argument("--graph_groups", help="File to save graph_groups to", default=None)
parser.add_argument("--report_dir", default="")
+ parser.add_argument("--local_lf_report_dir", help="--local_lf_report_dir default '' put where dataplane script run from",default="")
+ parser.add_argument("--debug", default=False)
+
args = parser.parse_args()
+<<<<<<< HEAD
+=======
+ # use json config file
+ if args.json != "":
+ try:
+ with open(args.json, 'r') as json_config:
+ json_data = json.load(json_config)
+ except:
+ print("Error reading {}".format(args.json))
+ # json configuation takes presidence to command line
+ if "mgr" in json_data:
+ args.mgr = json_data["mgr"]
+ if "port" in json_data:
+ args.port = json_data["port"]
+ if "lf_user" in json_data:
+ args.lf_user = json_data["lf_user"]
+ if "lf_password" in json_data:
+ args.lf_password = json_data["lf_password"]
+ if "instance_name" in json_data:
+ args.instance_name = json_data["instance_name"]
+ if "config_name" in json_data:
+ args.config_name = json_data["config_name"]
+ if "upstream" in json_data:
+ args.upstream = json_data["upstream"]
+ if "dut" in json_data:
+ args.dut = json_data["dut"]
+ if "duration" in json_data:
+ args.duration = json_data["duration"]
+ if "station" in json_data:
+ args.station = json_data["station"]
+ if "download_speed" in json_data:
+ args.download_speed = json_data["download_speed"]
+ if "upload_speed" in json_data:
+ args.upload_speed = json_data["upload_speed"]
+ if "pull_report" in json_data:
+ args.pull_report = json_data["pull_report"]
+ if "raw_line" in json_data:
+ # the json_data is a list , need to make into a list of lists, to match command line raw_line paramaters
+ # https://www.tutorialspoint.com/convert-list-into-list-of-lists-in-python
+ json_data_tmp = [[x] for x in json_data["raw_line"]]
+ args.raw_line = json_data_tmp
+
+
+ # use influx json config file
+ if args.influx_json != "":
+ try:
+ with open(args.influx_json, 'r') as influx_json_config:
+ influx_json_data = json.load(influx_json_config)
+ except:
+ print("Error reading {}".format(args.influx_json))
+ # json configuation takes presidence to command line
+ # influx DB configuration
+ if "influx_host" in influx_json_data:
+ args.influx_host = influx_json_data["influx_host"]
+ if "influx_port" in influx_json_data:
+ args.influx_port = influx_json_data["influx_port"]
+ if "influx_org" in influx_json_data:
+ args.influx_org = influx_json_data["influx_org"]
+ if "influx_token" in influx_json_data:
+ args.influx_token = influx_json_data["influx_token"]
+ if "influx_bucket" in influx_json_data:
+ args.influx_bucket = influx_json_data["influx_bucket"]
+
+>>>>>>> 0ef021e1165cbaa612e5128bc48d6abfbb7b887b
cv_base_adjust_parser(args)
CV_Test = DataplaneTest(lf_host = args.mgr,
@@ -267,6 +345,7 @@ def main():
config_name = args.config_name,
upstream = args.upstream,
pull_report = args.pull_report,
+ local_lf_report_dir = args.local_lf_report_dir,
load_old_cfg = args.load_old_cfg,
download_speed = args.download_speed,
upload_speed = args.upload_speed,
@@ -278,7 +357,9 @@ def main():
raw_lines = args.raw_line,
raw_lines_file = args.raw_lines_file,
sets = args.set,
- graph_groups = args.graph_groups
+ graph_groups = args.graph_groups,
+ test_rig=args.test_rig,
+ debug=args.debug
)
CV_Test.setup()
CV_Test.run()
diff --git a/py-scripts/lf_graph.py b/py-scripts/lf_graph.py
index df109b65..fa82d944 100755
--- a/py-scripts/lf_graph.py
+++ b/py-scripts/lf_graph.py
@@ -25,16 +25,20 @@ import pandas as pd
import pdfkit
import math
from matplotlib.colors import ListedColormap
+from lf_csv import LfCSV
# internal candela references included during intial phases, to be deleted at future date
# graph reporting classes
class lf_bar_graph():
- def __init__(self, _data_set=[[30, 55, 69, 37], [45, 67, 34, 22], [22, 45, 12, 34]],
+ def __init__(self, _data_set=[[30.4, 55.3, 69.2, 37.1], [45.1, 67.2, 34.3, 22.4], [22.5, 45.6, 12.7, 34.8]],
_xaxis_name="x-axis",
_yaxis_name="y-axis",
- _xaxis_categories=[1, 2, 3, 4],
+ _xaxis_categories=[1, 2, 3, 4, 5],
+ _xaxis_label=["a", "b", "c", "d", "e"],
+ _graph_title="",
+ _title_size=16,
_graph_image_name="image_name",
_label=["bi-downlink", "bi-uplink", 'uplink'],
_color=None,
@@ -43,12 +47,22 @@ class lf_bar_graph():
_font_weight='bold',
_color_name=['lightcoral', 'darkgrey', 'r', 'g', 'b', 'y'],
_figsize=(10, 5),
- _dpi=96):
+ _show_bar_value=False,
+ _xaxis_step=5,
+ _xticks_font = None,
+ _text_font=None,
+ _text_rotation=None,
+ _grp_title = "",
+ _dpi=96,
+ _enable_csv=False):
self.data_set = _data_set
self.xaxis_name = _xaxis_name
self.yaxis_name = _yaxis_name
self.xaxis_categories = _xaxis_categories
+ self.xaxis_label = _xaxis_label
+ self.title = _graph_title
+ self.title_size = _title_size
self.graph_image_name = _graph_image_name
self.label = _label
self.color = _color
@@ -57,6 +71,14 @@ class lf_bar_graph():
self.font_weight = _font_weight
self.color_name = _color_name
self.figsize = _figsize
+ self.show_bar_value = _show_bar_value
+ self.xaxis_step = _xaxis_step
+ self.xticks_font = _xticks_font
+ self.text_font = _text_font
+ self.text_rotation = _text_rotation
+ self.grp_title = _grp_title
+ self.enable_csv = _enable_csv
+ self.lf_csv = LfCSV()
def build_bar_graph(self):
if self.color is None:
@@ -68,31 +90,53 @@ class lf_bar_graph():
fig = plt.subplots(figsize=self.figsize)
i = 0
+
+ def show_value(rects):
+ for rect in rects:
+ h = rect.get_height()
+ plt.text(rect.get_x() + rect.get_width() / 2., h, h,
+ ha='center', va='bottom', rotation=self.text_rotation, fontsize=self.text_font)
+
for data in self.data_set:
if i > 0:
br = br1
br2 = [x + self.bar_width for x in br]
- plt.bar(br2, self.data_set[i], color=self.color[i], width=self.bar_width,
- edgecolor=self.color_edge, label=self.label[i])
+ rects = plt.bar(br2, self.data_set[i], color=self.color[i], width=self.bar_width,
+ edgecolor=self.color_edge, label=self.label[i])
+ if self.show_bar_value:
+ show_value(rects)
br1 = br2
i = i + 1
else:
br1 = np.arange(len(self.data_set[i]))
- plt.bar(br1, self.data_set[i], color=self.color[i], width=self.bar_width,
- edgecolor=self.color_edge, label=self.label[i])
+ rects = plt.bar(br1, self.data_set[i], color=self.color[i], width=self.bar_width,
+ edgecolor=self.color_edge, label=self.label[i])
+ if self.show_bar_value:
+ show_value(rects)
i = i + 1
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
- """plt.xticks([r + self.bar_width for r in range(len(self.data_set[0]))],
- self.xaxis_categories)"""
- plt.xticks(np.arange(0, len(self.xaxis_categories), step=5))
+ if self.xaxis_categories[0] == 0:
+ plt.xticks(np.arange(0, len(self.xaxis_categories), step=self.xaxis_step),fontsize = self.xticks_font)
+ else:
+ plt.xticks(np.arange(0, len(self.data_set[0]), step=self.xaxis_step), self.xaxis_categories,
+ fontsize = self.xticks_font)
plt.legend()
-
+ plt.suptitle(self.title, fontsize=self.title_size)
+ plt.title(self.grp_title)
fig = plt.gcf()
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
plt.close()
print("{}.png".format(self.graph_image_name))
-
+ if self.enable_csv:
+ if self.data_set is not None:
+ self.lf_csv.columns = self.label
+ self.lf_csv.rows = self.data_set
+ self.lf_csv.filename = f"{self.graph_image_name}.csv"
+ self.lf_csv.generate_csv()
+ else:
+ print("No Dataset Found")
+ print("{}.csv".format(self.graph_image_name))
return "%s.png" % self.graph_image_name
@@ -104,9 +148,10 @@ class lf_scatter_graph():
_xaxis_name="x-axis",
_yaxis_name="y-axis",
_label=["num1", "num2"],
- _graph_image_name="image_name",
+ _graph_image_name="image_name1",
_color=["r", "y"],
- _figsize=(9, 4)):
+ _figsize=(9, 4),
+ _enable_csv=True):
self.x_data_set = _x_data_set
self.y_data_set = _y_data_set
self.xaxis_name = _xaxis_name
@@ -116,6 +161,8 @@ class lf_scatter_graph():
self.color = _color
self.label = _label
self.values = _values
+ self.enable_csv = _enable_csv
+ self.lf_csv = LfCSV()
def build_scatter_graph(self):
if self.color is None:
@@ -140,6 +187,11 @@ class lf_scatter_graph():
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
plt.close()
print("{}.png".format(self.graph_image_name))
+ if self.enable_csv:
+ self.lf_csv.columns = self.label
+ self.lf_csv.rows = self.y_data_set
+ self.lf_csv.filename = f"{self.graph_image_name}.csv"
+ self.lf_csv.generate_csv()
return "%s.png" % self.graph_image_name
@@ -150,9 +202,10 @@ class lf_stacked_graph():
_xaxis_name="Stations",
_yaxis_name="Numbers",
_label=['Success', 'Fail'],
- _graph_image_name="image_name",
+ _graph_image_name="image_name2",
_color=["b", "g"],
- _figsize=(9, 4)):
+ _figsize=(9, 4),
+ _enable_csv=True):
self.data_set = _data_set # [x_axis,y1_axis,y2_axis]
self.xaxis_name = _xaxis_name
self.yaxis_name = _yaxis_name
@@ -160,6 +213,8 @@ class lf_stacked_graph():
self.graph_image_name = _graph_image_name
self.label = _label
self.color = _color
+ self.enable_csv = _enable_csv
+ self.lf_csv = LfCSV()
def build_stacked_graph(self):
fig = plt.subplots(figsize=self.figsize)
@@ -177,7 +232,11 @@ class lf_stacked_graph():
plt.savefig("%s.png" % (self.graph_image_name), dpi=96)
plt.close()
print("{}.png".format(self.graph_image_name))
-
+ if self.enable_csv:
+ self.lf_csv.columns = self.label
+ self.lf_csv.rows = self.data_set
+ self.lf_csv.filename = f"{self.graph_image_name}.csv"
+ self.lf_csv.generate_csv()
return "%s.png" % (self.graph_image_name)
@@ -190,10 +249,11 @@ class lf_horizontal_stacked_graph():
_unit="%",
_xaxis_name="Stations",
_label=['Success', 'Fail'],
- _graph_image_name="image_name",
+ _graph_image_name="image_name3",
_color=["success", "Fail"],
_figsize=(9, 4),
- _disable_xaxis=False):
+ _disable_xaxis=False,
+ _enable_csv=True):
self.unit = _unit
self.seg = _seg
self.xaxis_set1 = _xaxis_set1
@@ -205,6 +265,8 @@ class lf_horizontal_stacked_graph():
self.label = _label
self.color = _color
self.disable_xaxis = _disable_xaxis
+ self.enable_csv = _enable_csv
+ self.lf_csv = LfCSV()
def build_horizontal_stacked_graph(self):
def sumzip(items):
@@ -246,7 +308,11 @@ class lf_horizontal_stacked_graph():
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
plt.close()
print("{}.png".format(self.graph_image_name))
-
+ if self.enable_csv:
+ self.lf_csv.columns = self.label
+ self.lf_csv.rows = self.data_set
+ self.lf_csv.filename = f"{self.graph_image_name}.csv"
+ self.lf_csv.generate_csv()
return "%s.png" % self.graph_image_name
@@ -261,7 +327,7 @@ if __name__ == "__main__":
"""
- #
+ #
test_file = open(output_html_1, "w")
test_file.write(graph_html_obj)
test_file.close()
@@ -293,7 +359,7 @@ if __name__ == "__main__":
"""
- #
+ #
test_file = open(output_html_2, "w")
test_file.write(graph_html_obj)
test_file.close()
diff --git a/py-scripts/lf_influx_db.json b/py-scripts/lf_influx_db.json
new file mode 100644
index 00000000..3aee6cfa
--- /dev/null
+++ b/py-scripts/lf_influx_db.json
@@ -0,0 +1,12 @@
+{
+ "influx_host":"192.168.100.201",
+ "influx_port": "8086",
+ "influx_org": "Candela",
+ "influx_token": "-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ==",
+ "influx_bucket": "ben",
+ "influx_tag": "testbed Ferndale-01"
+}
+
+
+
+
\ No newline at end of file
diff --git a/py-scripts/lf_report.py b/py-scripts/lf_report.py
index a9d76de1..6f2aea90 100755
--- a/py-scripts/lf_report.py
+++ b/py-scripts/lf_report.py
@@ -28,107 +28,145 @@ INCLUDE_IN_README
import os
import shutil
import datetime
+
import pandas as pd
import pdfkit
+
# internal candela references included during intial phases, to be deleted at future date
# https://candelatech.atlassian.net/wiki/spaces/LANFORGE/pages/372703360/Scripting+Data+Collection+March+2021
# base report class
class lf_report():
def __init__(self,
- #_path the report directory under which the report directories will be created.
- _path = "/home/lanforge/html-reports",
- _alt_path = "",
- _date = "",
- _title="LANForge Test Run Heading",
- _table_title="LANForge Table Heading",
- _graph_title="LANForge Graph Title",
- _obj = "",
- _obj_title = "",
- _output_html="outfile.html",
- _output_pdf="outfile.pdf",
- _results_dir_name = "LANforge_Test_Results",
- _output_format = 'html', # pass in on the write functionality, current not used
- _dataframe="",
- _path_date_time=""): # this is where the final report is placed.
- #other report paths,
+ # _path the report directory under which the report directories will be created.
+ _path="/home/lanforge/html-reports",
+ _alt_path="",
+ _date="",
+ _title="LANForge Test Run Heading",
+ _table_title="LANForge Table Heading",
+ _graph_title="LANForge Graph Title",
+ _obj="",
+ _obj_title="",
+ _output_html="outfile.html",
+ _output_pdf="outfile.pdf",
+ _results_dir_name="LANforge_Test_Results",
+ _output_format='html', # pass in on the write functionality, current not used
+ _dataframe="",
+ _path_date_time="",
+ _custom_css='custom-example.css'): # this is where the final report is placed.
+ # other report paths,
- # _path is where the directory with the data time will be created
- if _path == "local" or _path == "here":
- self.path = os.path.abspath(__file__)
- print("path set to file path: {}".format(self.path))
- elif _alt_path != "":
- self.path = _alt_path
- print("path set to alt path: {}".format(self.path))
- else:
- self.path = _path
- print("path set: {}".format(self.path))
-
- self.dataframe=_dataframe
- self.text = ""
- self.title=_title
- self.table_title=_table_title
- self.graph_title=_graph_title
- self.date=_date
- self.output_html=_output_html
- self.path_date_time = _path_date_time
- self.write_output_html = ""
- self.output_pdf=_output_pdf
- self.write_output_pdf = ""
- self.banner_html = ""
- self.graph_titles=""
- self.graph_image=""
- self.html = ""
- self.custom_html = ""
- self.objective = _obj
- self.obj_title = _obj_title
- #self.systeminfopath = ""
- self.date_time_directory = ""
- self.banner_directory = "artifacts"
- self.banner_file_name = "banner.png" # does this need to be configurable
- self.logo_directory = "artifacts"
- self.logo_file_name = "CandelaLogo2-90dpi-200x90-trans.png" # does this need to be configurable.
- self.current_path = os.path.dirname(os.path.abspath(__file__))
+ # _path is where the directory with the data time will be created
+ if _path == "local" or _path == "here":
+ self.path = os.path.abspath(__file__)
+ print("path set to file path: {}".format(self.path))
+ elif _alt_path != "":
+ self.path = _alt_path
+ print("path set to alt path: {}".format(self.path))
+ else:
+ self.path = _path
+ print("path set: {}".format(self.path))
- # pass in _date to allow to change after construction
- self.set_date_time_directory(_date,_results_dir_name)
- self.build_date_time_directory()
+ self.dataframe = _dataframe
+ self.text = ""
+ self.title = _title
+ self.table_title = _table_title
+ self.graph_title = _graph_title
+ self.date = _date
+ self.output_html = _output_html
+ self.path_date_time = _path_date_time
+ self.write_output_html = ""
+ self.output_pdf = _output_pdf
+ self.write_output_pdf = ""
+ self.banner_html = ""
+ self.footer_html = ""
+ self.graph_titles = ""
+ self.graph_image = ""
+ self.csv_file_name = ""
+ self.html = ""
+ self.custom_html = ""
+ self.objective = _obj
+ self.obj_title = _obj_title
+ # self.systeminfopath = ""
+ self.date_time_directory = ""
+ self.banner_directory = "artifacts"
+ self.banner_file_name = "banner.png" # does this need to be configurable
+ self.logo_directory = "artifacts"
+ self.logo_file_name = "CandelaLogo2-90dpi-200x90-trans.png" # does this need to be configurable.
+ self.logo_footer_file_name = "candela_swirl_small-72h.png" # does this need to be configurable.
+ self.current_path = os.path.dirname(os.path.abspath(__file__))
+ self.custom_css = _custom_css
+ # pass in _date to allow to change after construction
+ self.set_date_time_directory(_date, _results_dir_name)
+ self.build_date_time_directory()
+
+ self.font_file = "CenturyGothic.woff"
+ # move the banners and candela images to report path
+ self.copy_banner()
+ self.copy_css()
+ self.copy_logo()
+ self.copy_logo_footer()
- # move the banners and candela images to report path
- self.copy_banner()
- self.copy_logo()
-
def copy_banner(self):
- banner_src_file = str(self.current_path)+'/'+str(self.banner_directory)+'/'+str(self.banner_file_name)
- banner_dst_file = str(self.path_date_time)+'/'+ str(self.banner_file_name)
- #print("banner src_file: {}".format(banner_src_file))
- #print("dst_file: {}".format(banner_dst_file))
- shutil.copy(banner_src_file,banner_dst_file)
+ banner_src_file = str(self.current_path) + '/' + str(self.banner_directory) + '/' + str(self.banner_file_name)
+ banner_dst_file = str(self.path_date_time) + '/' + str(self.banner_file_name)
+ # print("banner src_file: {}".format(banner_src_file))
+ # print("dst_file: {}".format(banner_dst_file))
+ shutil.copy(banner_src_file, banner_dst_file)
+
+ def copy_css(self):
+ reportcss_src_file = str(self.current_path) + '/' + str(self.banner_directory) + '/report.css'
+ reportcss_dest_file = str(self.path_date_time) + '/report.css'
+
+ customcss_src_file = str(self.current_path) + '/' + str(self.banner_directory) + '/' + str(self.custom_css)
+ customcss_dest_file = str(self.path_date_time) + '/custom.css'
+
+ font_src_file = str(self.current_path) + '/' + str(self.banner_directory) + '/' + str(self.font_file)
+ font_dest_file = str(self.path_date_time) + '/' + str(self.font_file)
+
+ shutil.copy(reportcss_src_file, reportcss_dest_file)
+ shutil.copy(customcss_src_file, customcss_dest_file)
+ shutil.copy(font_src_file, font_dest_file)
def copy_logo(self):
- logo_src_file = str(self.current_path)+'/'+str(self.logo_directory)+'/'+str(self.logo_file_name)
- logo_dst_file = str(self.path_date_time)+'/'+ str(self.logo_file_name)
- #print("logo_src_file: {}".format(logo_src_file))
- #print("logo_dst_file: {}".format(logo_dst_file))
- shutil.copy(logo_src_file,logo_dst_file)
+ logo_src_file = str(self.current_path) + '/' + str(self.logo_directory) + '/' + str(self.logo_file_name)
+ logo_dst_file = str(self.path_date_time) + '/' + str(self.logo_file_name)
+ # print("logo_src_file: {}".format(logo_src_file))
+ # print("logo_dst_file: {}".format(logo_dst_file))
+ shutil.copy(logo_src_file, logo_dst_file)
- def move_graph_image(self,):
+ def copy_logo_footer(self):
+ logo_footer_src_file = str(self.current_path) + '/' + str(self.logo_directory) + '/' + str(
+ self.logo_footer_file_name)
+ logo_footer_dst_file = str(self.path_date_time) + '/' + str(self.logo_footer_file_name)
+ # print("logo_footer_src_file: {}".format(logo_footer_src_file))
+ # print("logo_footer_dst_file: {}".format(logo_footer_dst_file))
+ shutil.copy(logo_footer_src_file, logo_footer_dst_file)
+
+ def move_graph_image(self, ):
graph_src_file = str(self.graph_image)
- graph_dst_file = str(self.path_date_time)+'/'+ str(self.graph_image)
+ graph_dst_file = str(self.path_date_time) + '/' + str(self.graph_image)
print("graph_src_file: {}".format(graph_src_file))
print("graph_dst_file: {}".format(graph_dst_file))
- shutil.move(graph_src_file,graph_dst_file)
+ shutil.move(graph_src_file, graph_dst_file)
- def set_path(self,_path):
+ def move_csv_file(self):
+ csv_src_file = str(self.csv_file_name)
+ csv_dst_file = str(self.path_date_time) + '/' + str(self.csv_file_name)
+ print("csv_src_file: {}".format(csv_src_file))
+ print("csv_dst_file: {}".format(csv_dst_file))
+ shutil.move(csv_src_file, csv_dst_file)
+
+ def set_path(self, _path):
self.path = _path
- def set_date_time_directory(self,_date,_results_dir_name):
+ def set_date_time_directory(self, _date, _results_dir_name):
self.date = _date
self.results_dir_name = _results_dir_name
if self.date != "":
self.date_time_directory = str(self.date) + str("_") + str(self.results_dir_name)
else:
- self.date = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")).replace(':','-')
+ self.date = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")).replace(':', '-')
self.date_time_directory = self.date + str("_") + str(self.results_dir_name)
def build_date_time_directory(self):
@@ -136,49 +174,59 @@ class lf_report():
self.set_date_time_directory()
self.path_date_time = os.path.join(self.path, self.date_time_directory)
print("path_date_time {}".format(self.path_date_time))
- try:
+ try:
if not os.path.exists(self.path_date_time):
os.mkdir(self.path_date_time)
except:
self.path_date_time = os.path.join(self.current_path, self.date_time_directory)
if not os.path.exists(self.path_date_time):
os.mkdir(self.path_date_time)
- print("report path : {}".format(self.path_date_time))
+ print("report path : {}".format(self.path_date_time))
- def set_text(self,_text):
+ def set_text(self, _text):
self.text = _text
- def set_title(self,_title):
+ def set_title(self, _title):
self.title = _title
- def set_table_title(self,_table_title):
+ def set_table_title(self, _table_title):
self.table_title = _table_title
- def set_graph_title(self,_graph_title):
+ def set_graph_title(self, _graph_title):
self.graph_title = _graph_title
- def set_date(self,_date):
+ # sets the csv file name as graph title
+ def set_csv_filename(self, _graph_title):
+ fname, ext = os.path.splitext(_graph_title)
+ self.csv_file_name = fname + ".csv"
+
+ # The _date is set when class is enstanciated / created so this set_date should be used with caution, used to synchronize results
+ def set_date(self, _date):
self.date = _date
- def set_table_dataframe(self,_dataframe):
+ def set_table_dataframe(self, _dataframe):
self.dataframe = _dataframe
- def set_table_dataframe_from_csv(self,_csv):
+ def set_table_dataframe_from_csv(self, _csv):
self.dataframe = pd.read_csv(_csv)
- def set_custom_html(self,_custom_html):
+ def set_custom_html(self, _custom_html):
self.custom_html = _custom_html
- def set_obj_html(self,_obj_title, _obj ):
+ def set_obj_html(self, _obj_title, _obj):
self.objective = _obj
self.obj_title = _obj_title
- def set_graph_image(self,_graph_image):
+ def set_graph_image(self, _graph_image):
self.graph_image = _graph_image
+ def get_date(self):
+ return self.date
+
def get_path(self):
return self.path
- # get_path_date_time, get_report_path and need to be the same ()
+
+ # get_path_date_time, get_report_path and need to be the same
def get_path_date_time(self):
return self.path_date_time
@@ -186,12 +234,12 @@ class lf_report():
return self.path_date_time
def file_add_path(self, file):
- output_file = str(self.path_date_time)+'/'+ str(file)
+ output_file = str(self.path_date_time) + '/' + str(file)
print("output file {}".format(output_file))
return output_file
- def write_html(self):
- self.write_output_html = str(self.path_date_time)+'/'+ str(self.output_html)
+ def write_html(self):
+ self.write_output_html = str(self.path_date_time) + '/' + str(self.output_html)
print("write_output_html: {}".format(self.write_output_html))
try:
test_file = open(self.write_output_html, "w")
@@ -201,8 +249,8 @@ class lf_report():
print("write_html failed")
return self.write_output_html
- def write_html_with_timestamp(self):
- self.write_output_html = "{}/{}-{}".format(self.path_date_time,self.date,self.output_html)
+ def write_html_with_timestamp(self):
+ self.write_output_html = "{}/{}-{}".format(self.path_date_time, self.date, self.output_html)
print("write_output_html: {}".format(self.write_output_html))
try:
test_file = open(self.write_output_html, "w")
@@ -212,161 +260,232 @@ class lf_report():
print("write_html failed")
return self.write_output_html
+ # https://wkhtmltopdf.org/usage/wkhtmltopdf.txt
+ # page_size A4, A3, Letter, Legal
+ # orientation Portrait , Landscape
+ def write_pdf(self, _page_size='A4', _orientation='Portrait'):
+ # write logic to generate pdf here
+ # wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
+ # sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
+
+ options = {"enable-local-file-access": None,
+ 'orientation': _orientation,
+ 'page-size': _page_size} # prevent error Blocked access to file
+ self.write_output_pdf = str(self.path_date_time) + '/' + str(self.output_pdf)
+ pdfkit.from_file(self.write_output_html, self.write_output_pdf, options=options)
# https://wkhtmltopdf.org/usage/wkhtmltopdf.txt
# page_size A4, A3, Letter, Legal
# orientation Portrait , Landscape
- def write_pdf(self, _page_size = 'A4', _orientation = 'Portrait'):
- # write logic to generate pdf here
- # wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
- # sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
-
- options = {"enable-local-file-access" : None,
- 'orientation': _orientation,
- 'page-size': _page_size} # prevent error Blocked access to file
- self.write_output_pdf = str(self.path_date_time)+'/'+ str(self.output_pdf)
- pdfkit.from_file(self.write_output_html, self.write_output_pdf, options=options)
-
- # https://wkhtmltopdf.org/usage/wkhtmltopdf.txt
- # page_size A4, A3, Letter, Legal
- # orientation Portrait , Landscape
- def write_pdf_with_timestamp(self, _page_size = 'A4', _orientation = 'Portrait'):
- # write logic to generate pdf here
- # wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
- # sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
-
- options = {"enable-local-file-access" : None,
- 'orientation': _orientation,
- 'page-size': _page_size} # prevent error Blocked access to file
- self.write_output_pdf = "{}/{}-{}".format(self.path_date_time,self.date,self.output_pdf)
- pdfkit.from_file(self.write_output_html, self.write_output_pdf, options=options)
+ def write_pdf_with_timestamp(self, _page_size='A4', _orientation='Portrait'):
+ # write logic to generate pdf here
+ # wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
+ # sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
+ options = {"enable-local-file-access": None,
+ 'orientation': _orientation,
+ 'page-size': _page_size} # prevent error Blocked access to file
+ self.write_output_pdf = "{}/{}-{}".format(self.path_date_time, self.date, self.output_pdf)
+ pdfkit.from_file(self.write_output_html, self.write_output_pdf, options=options)
def generate_report(self):
- self.write_html()
+ self.write_html()
self.write_pdf()
def build_all(self):
self.build_banner()
+ self.start_content_div()
self.build_table_title()
self.build_table()
+ self.end_content_div()
def build_banner(self):
- self.banner_html = """
-
-
-
-
-
-
-
-
- BANNER
-
-
-
-
-
-
- """
+ # NOTE: {{ }} are the ESCAPED curly braces
+ self.banner_html = """
+
+
+
+
+
+
+
+
{title}
+
+
+
+
+
+
+
+
+
+ """.format(
+ title=self.title,
+ date=self.date,
+ )
self.html += self.banner_html
def build_table_title(self):
self.table_title_html = """
-
-
-
-
-