Merged dipti-branch

This commit is contained in:
Dipti
2021-01-29 15:20:30 -08:00
4 changed files with 221 additions and 86 deletions

View File

@@ -19,6 +19,9 @@ import datetime
import base64
import xlsxwriter
import pandas as pd
import requests
import ast
def wpa_ent_list():
@@ -640,7 +643,6 @@ class Realm(LFCliBase):
num_sta_with_ips += 1
return num_sta_with_ips
def duration_time_to_seconds(self, time_string):
if isinstance(time_string, str):
pattern = re.compile("^(\d+)([dhms]$)")
@@ -1140,7 +1142,6 @@ class L3CXProfile(BaseProfile):
return False
else:
return False
def monitor(self,
duration_sec=60,
monitor_interval=1,
@@ -1150,7 +1151,8 @@ class L3CXProfile(BaseProfile):
report_file=None,
output_format=None,
script_name=None,
arguments=None):
arguments=None,
compared_report=None):
try:
duration_sec = self.parse_time(duration_sec).seconds
except:
@@ -1164,16 +1166,22 @@ class L3CXProfile(BaseProfile):
raise ValueError("Monitor needs a list of Layer 3 connections")
if (monitor_interval is None) or (monitor_interval < 1):
raise ValueError("L3CXProfile::monitor wants monitor_interval >= 1 second")
#if col_names is None:
#raise ValueError("L3CXProfile::monitor wants a list of column names to monitor")
if output_format is not None:
if output_format.lower() != report_file.split('.')[-1]:
if output_format.lower() != 'excel':
raise ValueError('Filename %s does not match output format %s' % (report_file, output_format))
else:
output_format = report_file.split('.')[-1]
#retrieve compared report if specified - turn into dataframe
if compared_report is not None:
supported_formats = ['csv', 'json', 'stata', 'pickle','html']
for format in supported_formats:
if compared_format.lower() == format:
# exec('df.to_' + x + '("' + report_file + '",index=False' + ')')
previous_data_df= read_csv()
# Step 1, column names . what is this for?
# Step 1, column names
fields=None
if col_names is not None and len(col_names) > 0:
fields = ",".join(col_names)
@@ -1217,44 +1225,47 @@ class L3CXProfile(BaseProfile):
self._fail("FAIL: Not all stations increased traffic")
self.exit_fail()
old_cx_rx_values = new_cx_rx_values
#write csv file here - open, write, and close file
time.sleep(monitor_interval)
if self.debug:
print("Printing value map...")
print(value_map)
#step 3 organize data
endpoints=list()
for endpoint in value_map.values():
endpoints.append(endpoint['endpoint'])
endpoints2 = []
for y in range(0, len(endpoints)):
for x in range(0, len(endpoints[0])):
endpoints2.append(list(list(endpoints[y][x].values())[0].values()))
import itertools
timestamps2 = list(
itertools.chain.from_iterable(itertools.repeat(x, len(created_cx.split(','))) for x in timestamps))
for point in range(0, len(endpoints2)):
endpoints2[point].insert(0, timestamps2[point])
# step 4 save and close
header_row.insert(0, 'Timestamp')
# print(header_row)
if output_format.lower() in ['excel', 'xlsx'] or report_file.split('.')[-1] == 'xlsx':
workbook = xlsxwriter.Workbook(report_file)
worksheet = workbook.add_worksheet()
for col_num, data in enumerate(header_row):
worksheet.write(0, col_num, data)
row_num = 1
for x in endpoints2:
for col_num, data in enumerate(x):
worksheet.write(row_num, col_num, str(data))
row_num += 1
workbook.close()
else:
df = pd.DataFrame(endpoints2)
df.columns = header_row
import requests
import ast
#organize data
full_test_data_list = []
for test_timestamp, data in value_map.items():
#reduce the endpoint data to single dictionary of dictionaries
for datum in data["endpoint"]:
for endpoint_data in datum.values():
if self.debug:
print(endpoint_data)
endpoint_data["Timestamp"] = test_timestamp
full_test_data_list.append(endpoint_data)
if self.debug:
print("Printing full data list...")
print(full_test_data_list)
header_row.append("Timestamp")
header_row.append('Timestamp milliseconds')
df = pd.DataFrame(full_test_data_list)
df["Timestamp milliseconds"] = (df["Timestamp"] - datetime.datetime(1970,1,1)).dt.total_seconds()*1000
#round entire column
df["Timestamp milliseconds"]=df["Timestamp milliseconds"].astype(int)
df["Timestamp"]=df["Timestamp"].apply(lambda x:x.strftime("%m/%d/%Y %I:%M:%S"))
df=df[["Timestamp","Timestamp milliseconds", *header_row[:-2]]]
#compare previous data to current data
try:
systeminfo = ast.literal_eval(requests.get('http://'+str(self.lfclient_host)+':'+str(self.lfclient_port)).text)
except:
systeminfo = ast.literal_eval(requests.get('http://'+str(self.lfclient_host)+':'+str(self.lfclient_port)).text)
df['LFGUI Release'] = systeminfo['VersionInfo']['BuildVersion']
df['Script Name'] = script_name
df['Arguments'] = arguments
for x in ['LFGUI Release', 'Script Name', 'Arguments']:
df[x][1:] = ''
if output_format == 'hdf':
@@ -1264,14 +1275,15 @@ class L3CXProfile(BaseProfile):
if output_format == 'png':
fig = df.plot().get_figure()
fig.savefig(report_file)
if output_format.lower() in ['excel', 'xlsx'] or report_file.split('.')[-1] == 'xlsx':
df.to_excel(report_file, index=False)
if output_format == 'df':
return df
supported_formats = ['csv','json','stata','pickle']
supported_formats = ['csv', 'json', 'stata', 'pickle','html']
for x in supported_formats:
if output_format.lower() == x or report_file.split('.')[-1] == x:
exec('df.to_' + x + '("' + report_file + '")')
else:
pass
exec('df.to_' + x + '("' + report_file + '",index=False' + ')')
def refresh_cx(self):
for cx_name in self.created_cx.keys():
@@ -1707,7 +1719,6 @@ class L4CXProfile(LFCliBase):
if debug:
print(fields)
else:
#todo:rename this...
header_row=list((list(self.json_get("/layer4/all")['endpoint'][0].values())[0].keys()))
if debug:
print(header_row)
@@ -1718,20 +1729,22 @@ class L4CXProfile(LFCliBase):
end_time = start_time + datetime.timedelta(seconds=duration_sec)
sleep_interval = round(duration_sec // 5)
if debug:
print("Sleep_interval is..." + sleep_interval)
print("Start time is..."+ start_time)
print("End time is..."+ end_time)
print("Sleep_interval is %s ", sleep_interval)
print("Start time is %s " , start_time)
print("End time is %s " ,end_time)
value_map = dict()
passes = 0
expected_passes = 0
timestamps = []
for test in range(1+iterations):
while datetime.datetime.now() < end_time:
response=self.json_get("layer4/all")
#response = self.json_get("layer4/list?fields=urls/s")
if fields is None:
response = self.json_get("/layer4/all")
else:
response = self.json_get("/layer4/%s?fields=%s" % (created_cx, fields))
if debug:
print(response)
if "endpoint" not in response:
if response is None:
print(response)
raise ValueError("Cannot find any endpoints")
if monitor:
@@ -1744,7 +1757,7 @@ class L4CXProfile(LFCliBase):
value_map[t] = response
expected_passes += 1
if self.check_errors(debug):
if self.__check_request_rate(): #need to changed
if self.check_request_rate():
passes += 1
else:
self._fail("FAIL: Request rate did not exceed 90% target rate")
@@ -1752,10 +1765,9 @@ class L4CXProfile(LFCliBase):
else:
self._fail("FAIL: Errors found getting to %s " % self.url)
self.exit_fail()
#check monitor sleep time
time.sleep(monitor_interval)
print(value_map)
############################################# edited 'til here - dipti 1/21/20
# step 3 organize data
endpoints = list()
for endpoint in value_map.values():
@@ -1787,13 +1799,14 @@ class L4CXProfile(LFCliBase):
workbook.close()
else:
df = pd.DataFrame(endpoints2)
print(header_row)
df.columns = header_row
import requests
import ast
try:
systeminfo = ast.literal_eval(requests.get('http://localhost:8090').text)
systeminfo = ast.literal_eval(requests.get('http://'+str(self.lfclient_host)+':'+str(self.lfclient_port)).text)
except:
systeminfo = ast.literal_eval(requests.get('http://localhost:8090').text)
systeminfo = ast.literal_eval(requests.get('http://'+str(self.lfclient_host)+':'+str(self.lfclient_port)).text)
df['LFGUI Release'] = systeminfo['VersionInfo']['BuildVersion']
df['Script Name'] = script_name
df['Arguments'] = arguments
@@ -2072,7 +2085,10 @@ class WifiMonitor:
baseurl=self.lfclient_url,
debug=self.debug)
<<<<<<< HEAD
=======
>>>>>>> dipti-branch
def admin_up(self):
up_request = LFUtils.port_up_request(resource_id=self.resource, port_name=self.monitor_name)
self.local_realm.json_post("/cli-json/set_port", up_request)
@@ -2552,7 +2568,6 @@ class VRProfile(LFCliBase):
else:
raise ValueError("vr_name must be set. Current name: %s" % self.vr_name)
def create(self, resource, upstream_port="eth1", debug=False,
upstream_subnets="20.20.20.0/24", upstream_nexthop="20.20.20.1",
local_subnets="10.40.0.0/24", local_nexthop="10.40.3.198",
@@ -3123,7 +3138,6 @@ class MACVLANProfile(LFCliBase):
# And now see if they are gone
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=self.created_macvlans)
def admin_up(self):
for macvlan in self.created_macvlans:
self.local_realm.admin_up(macvlan)
@@ -3660,7 +3674,10 @@ class StationProfile:
time.sleep(delay)
# And now see if they are gone
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=desired_stations)
<<<<<<< HEAD
=======
>>>>>>> dipti-branch
# Checks for errors in initialization values and creates specified number of stations using init parameters
def create(self, radio,

View File

@@ -14,6 +14,7 @@ CURR_TEST_NUM=0
CURR_TEST_NAME="BLANK"
STOP_NUM=9
<<<<<<< HEAD
DATA_DIR="${TEST_DIR}"
REPORT_DIR="/home/lanforge/html-reports"
@@ -22,10 +23,15 @@ REPORT_DIR="/home/lanforge/html-reports"
#Test array
testCommands=(
"./example_security_connection.py --num_stations $NUM_STA --ssid jedway-r8000-36 --passwd jedway-r8000-36 --radio $RADIO_USED --security wpa"
=======
#Test array
testCommands=("./example_security_connection.py --num_stations $NUM_STA --ssid jedway-r8000-36 --passwd jedway-r8000-36 --radio $RADIO_USED --security wpa "
>>>>>>> dipti-branch
"./example_security_connection.py --num_stations $NUM_STA --ssid $SSID_USED --passwd $SSID_USED --radio $RADIO_USED --security wpa2"
"./example_security_connection.py --num_stations $NUM_STA --ssid jedway-wep-48 --passwd jedway-wep-48 --radio $RADIO_USED --security wep"
"./example_security_connection.py --num_stations $NUM_STA --ssid jedway-wpa3-1 --passwd jedway-wpa3-1 --radio $RADIO_USED --security wpa3"
"./test_ipv4_connection.py --radio wiphy2 --num_stations $NUM_STA --ssid $SSID_USED --passwd $PASSWD_USED --security $SECURITY"
<<<<<<< HEAD
"./test_generic.py --radio $RADIO_USED --ssid $SSID_USED --passwd $PASSWD_USED --num_stations $NUM_STA --type lfping --dest 10.40.0.1 --security $SECURITY"
"./test_generic.py --radio $RADIO_USED --ssid $SSID_USED --passwd $PASSWD_USED --num_stations $NUM_STA --type speedtest --speedtest_min_up 20 --speedtest_min_dl 20 --speedtest_max_ping 150 --security $SECURITY"
"./test_ipv4_l4_urls_per_ten.py --radio $RADIO_USED --num_stations $NUM_STA --security $SECURITY --ssid $SSID_USED --passwd $PASSWD_USED --num_tests 1 --requests_per_ten 600 --target_per_ten 600"
@@ -34,6 +40,16 @@ testCommands=(
"./test_ipv4_variable_time.py --radio wiphy1 --ssid $SSID_USED --passwd $PASSWD_USED --security $SECURITY --test_duration 30s --output_format excel"
"./test_ipv4_variable_time.py --radio wiphy1 --ssid $SSID_USED --passwd $PASSWD_USED --security $SECURITY --test_duration 30s --output_format csv"
#"./create_bridge.py --radio wiphy1 --upstream_port eth1 --target_device sta0000"
=======
"./test_generic.py --mgr localhost --radio $RADIO_USED --ssid $SSID_USED --passwd $PASSWD_USED --num_stations $NUM_STA --type lfping --dest 10.40.0.1 --security $SECURITY"
"./test_generic.py --mgr localhost --radio $RADIO_USED --ssid $SSID_USED --passwd $PASSWD_USED --num_stations $NUM_STA --type speedtest --speedtest_min_up 20 --speedtest_min_dl 20 --speedtest_max_ping 150 --security $SECURITY"
"./test_ipv4_l4_urls_per_ten.py --radio $RADIO_USED --num_stations $NUM_STA --security $SECURITY --ssid $SSID_USED --passwd $PASSWD_USED --num_tests 1 --requests_per_ten 600 --target_per_ten 600"
"./test_ipv4_l4_wifi.py --radio wiphy0 --num_stations $NUM_STA --security $SECURITY --ssid $SSID_USED --passwd $PASSWD_USED --test_duration 2m"
"./test_ipv4_l4.py --radio wiphy3 --num_stations 4 --security $SECURITY --ssid $SSID_USED --passwd $PASSWD_USED --url \"dl http://10.40.0.1 /dev/null\" --test_duration 2m"
"./test_ipv4_variable_time.py --radio wiphy1 --ssid $SSID_USED --passwd $PASSWD_USED --security $SECURITY --test_duration 30s --output_format excel"
"./test_ipv4_variable_time.py --radio wiphy1 --ssid $SSID_USED --passwd $PASSWD_USED --security $SECURITY --test_duration 30s --output_format csv"
"./create_bridge.py --radio wiphy1 --upstream_port eth1 --target_device sta0000"
>>>>>>> dipti-branch
#"./create_l3.py --radio wiphy1 --ssid $SSID_USED --passwd $PASSWD_USED --security $SECURITY"
#"./create_l4.py --radio wiphy1 --ssid $SSID_USED --passwd $PASSWD_USED --security $SECURITY"
#"./create_macvlan.py --radio wiphy1"
@@ -66,20 +82,28 @@ function echo_print() {
echo "Beginning $CURR_TEST_NAME test..." >>~/test_all_output_file.txt
}
results=()
<<<<<<< HEAD
detailedresults=()
NOW=$(date +"%Y-%m-%d-%H-%M")
NOW="${NOW/:/-}"
TEST_DIR="/home/lanforge/report-data/${NOW}"
mkdir "$TEST_DIR"
=======
>>>>>>> dipti-branch
function run_test() {
for i in "${testCommands[@]}"; do
CURR_TEST_NAME=${i%%.py*}
CURR_TEST_NAME=${CURR_TEST_NAME#./*}
CURR_TEST_NUM="${name_to_num[$CURR_TEST_NAME]}"
<<<<<<< HEAD
=======
echo "$CURR_TEST_NAME $CURR_TEST_NUM"
>>>>>>> dipti-branch
if (( $CURR_TEST_NUM > $STOP_NUM )) || (( $STOP_NUM == $CURR_TEST_NUM )) && (( $STOP_NUM != 0 )); then
exit 1
fi
<<<<<<< HEAD
echo ""
echo "Test $CURR_TEST_NUM: $CURR_TEST_NAME"
@@ -94,6 +118,15 @@ function run_test() {
results+=("<tr><td>${CURR_TEST_NAME}</td><td class='scriptdetails'>${i}</td><td class='success'>Success</td><td><button onclick=\"toggle_visibility('${i}');\">Show/Hide</button></td></tr>")
else
results+=("<tr><td>${CURR_TEST_NAME}</td><td class='scriptdetails'>${i}</td><td class='failure'>Failure</td><td><button onclick=\"toggle_visibility('${i}');\">Show/Hide</button></td></tr>")
=======
if (( $CURR_TEST_NUM > $START_NUM )) || (( $CURR_TEST_NUM == $START_NUM )); then
echo_print
echo "$i"
if $i; then
results+=("<tr><td>${CURR_TEST_NAME}</td><td>Success</td></tr>")
else
results+=("<tr><td>${CURR_TEST_NAME}</td><td>Failure</td></tr>")
>>>>>>> dipti-branch
fi
fi
done
@@ -110,6 +143,7 @@ function check_args() {
function html_generator() {
NOW=$(date +"%Y-%m-%d-%T")
header="<html>
<<<<<<< HEAD
<head>
<title>Test All Scripts Results $NOW</title>
<style>
@@ -168,5 +202,32 @@ function html_generator() {
check_args $1 $2
run_test
echo "${detailedresults}"
=======
<head>
<title>Candela Test All Scripts Results</title>
<style>
success {
background-color:green;
}
failure {
background-color:red;
}
</style>
</head>
<body>
<p>Candela Technologies</p>
<table border ='1'>
"
tail="</table>
</body>
</html>"
fname="/home/lanforge/html-reports/test_all_output_file-${NOW}.html"
echo $fname
echo $header"${results[@]}"$tail >> $fname
}
#true >~/test_all_output_file.txt
check_args $1 $2
run_test
>>>>>>> dipti-branch
html_generator
#test generic and fileio are for macvlans

View File

@@ -197,6 +197,34 @@ python3 ./test_ipv4_l4_urls_per_ten.py
output_form=args.output_format
#Create directory
if args.report_file is None:
try:
homedir = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-%M")).replace(':','-')+'test_ipv4_l4_urls_per_ten'
path = os.path.join('/home/lanforge/report-data/',homedir)
os.mkdir(path)
except:
path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
print('Saving file to local directory')
else:
pass
if args.report_file is None:
if args.output_format in ['csv','json','html','hdf','stata','pickle','pdf','png','df','parquet','xlsx']:
rpt_file=path+'/data.' + args.output_format
output=args.output_format
else:
print('Defaulting data file output type to Excel')
rpt_file=path+'/data.xlsx'
output='xlsx'
else:
rpt_file=args.report_file
if args.output_format is None:
output=str(args.report_file).split('.')[-1]
else:
output=args.output_format
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta-1, padding_number_=10000,
radio=args.radio)
@@ -223,11 +251,19 @@ python3 ./test_ipv4_l4_urls_per_ten.py
layer4traffic=','.join([[*x.keys()][0] for x in ip_test.local_realm.json_get('layer4')['endpoint']])
except:
pass
<<<<<<< HEAD
ip_test.l4cxprofile.monitor(col_names=['Name','bytes-rd','rx rate (1 min)', 'urls/s'],
report_file=rpt_file,
duration_sec=ip_test.local_realm.parse_time(args.test_duration).total_seconds(),
created_cx=layer4traffic,
output_format=output_form,
=======
ip_test.l4cxprofile.monitor(col_names=['Name','bytes-rd','urls/s'],
report_file=rpt_file,
duration_sec=ip_test.local_realm.parse_time(args.test_duration).total_seconds(),
created_cx=layer4traffic,
output_format=output,
>>>>>>> dipti-branch
script_name='test_ipv4_l4_urls_per_ten',
arguments=args,
debug=args.debug)

View File

@@ -78,7 +78,6 @@ class IPV4VariableTime(LFCliBase):
self.station_profile.mode = mode
if self.ap is not None:
self.station_profile.set_command_param("add_sta", "ap",self.ap)
#self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user defined variable from terminal.
self.cx_profile.host = self.host
@@ -165,11 +164,13 @@ python3 ./test_ipv4_variable_time.py
--ssid netgear
--password admin123
--test_duration 2m (default)
--a_min 1000
--a_min 3000
--b_min 1000
--ap "00:0e:8e:78:e1:76"
--output_format csv
--report_file ~/Documents/results.csv (if csv file - please use another extension for other files)
--report_file ~/Documents/results.csv (Example of csv file output - please use another extension for other files)
--compared_report ~/Documents/results_prev.csv (Example of csv file retrieval - please use another extension for other files) - UNDER CONSTRUCTION
--col_names 'name','tx bytes', 'rx bytes','dropped'
--debug
''')
@@ -193,8 +194,10 @@ python3 ./test_ipv4_variable_time.py
optional_args.add_argument('--a_min', help='--a_min bps rate minimum for side_a', default=256000)
optional_args.add_argument('--b_min', help='--b_min bps rate minimum for side_b', default=256000)
optional_args.add_argument('--test_duration', help='--test_duration sets the duration of the test', default="2m")
optional_args.add_argument('--col_names', help='Which columns you want to monitor', default=['Name','Rx Rate','Rx PDUs'])
optional_args.add_argument('--col_names', help='Columns wished to be monitor',default=None)
optional_args.add_argument('--compared_report',help='report path and file which is wished to be compared with new report', default=None)
args = parser.parse_args()
#['name','tx bytes', 'rx bytes','dropped']
num_sta = 2
if (args.num_stations is not None) and (int(args.num_stations) > 0):
@@ -214,11 +217,11 @@ python3 ./test_ipv4_variable_time.py
if args.report_file is None:
if args.output_format in ['csv','json','html','hdf','stata','pickle','pdf','png','df','parquet','xlsx']:
report_f=path+'/data.' + args.output_format
report_f='/home/lanforge/report-data/'+homedir+'/data.' + args.output_format
output=args.output_format
else:
print('Defaulting data file output type to Excel')
report_f=path+'/data.xlsx'
report_f='/home/lanforge/report-data/'+homedir+'/data.xlsx'
output='xlsx'
else:
report_f=args.report_file
@@ -227,6 +230,17 @@ python3 ./test_ipv4_variable_time.py
else:
output=args.output_format
#Retrieve last data file
compared_rept=None
if args.compared_report:
#check if last report format is same as current rpt format
last_report_format = args.compared_report.split('.')[-1]
if output == last_report_format:
compared_rept = args.compared_report
else:
ValueError("Compared report format is not the same as the new report format. Please make sure they are of the same file type.")
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta-1, padding_number_=10000, radio=args.radio)
ip_var_test = IPV4VariableTime(host=args.mgr,
port=args.mgr_port,
@@ -257,16 +271,23 @@ python3 ./test_ipv4_variable_time.py
layer3connections=','.join([[*x.keys()][0] for x in ip_var_test.local_realm.json_get('endp')['endpoint']])
except:
raise ValueError('Try setting the upstream port flag if your device does not have an eth1 port')
if args.col_names is not None:
print(args.col_names)
if type(args.col_names) is not list:
col_names=None
col_names=list(args.col_names.split(","))
else:
col_names = args.col_names
print(report_f)
else:
col_names=None
if args.debug:
print("Column names are...")
print(col_names)
ip_var_test.l3cxprofile.monitor(col_names=col_names,
report_file=report_f,
duration_sec=ip_var_test.local_realm.parse_time(args.test_duration).total_seconds(),
created_cx= layer3connections,
output_format=output,
compared_report=compared_rept,
script_name='test_ipv4_variable_time',
arguments=args)