mirror of
https://github.com/Telecominfraproject/wlan-lanforge-scripts.git
synced 2025-11-02 11:48:03 +00:00
1. Create pandas_extensions.py
2. Remove unused imports Signed-off-by: Matthew Stidham <stidmatt@gmail.com>
This commit is contained in:
@@ -1,3 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
# Class holds default settings for json requests -
|
||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
@@ -6,8 +8,6 @@ import os
|
||||
import importlib
|
||||
import urllib
|
||||
from urllib import request
|
||||
from urllib import error
|
||||
from urllib import parse
|
||||
import json
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
# Define useful common methods -
|
||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
@@ -7,8 +9,7 @@ import importlib
|
||||
import pprint
|
||||
import time
|
||||
from time import sleep
|
||||
from random import seed
|
||||
from random import randint
|
||||
from random import seed, randint
|
||||
import re
|
||||
import ipaddress
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import signal
|
||||
import traceback
|
||||
# Extend this class to use common set of debug and request features for your script
|
||||
from pprint import pprint
|
||||
@@ -11,9 +10,6 @@ import random
|
||||
import string
|
||||
import datetime
|
||||
import argparse
|
||||
import csv
|
||||
import pandas as pd
|
||||
import re
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
@@ -661,118 +657,3 @@ class LFCliBase:
|
||||
abgnAX : 12,
|
||||
bgnAX : 13
|
||||
"""
|
||||
|
||||
#================ Pandas Dataframe Functions ======================================
|
||||
|
||||
#takes any dataframe and returns the specified file extension of it
|
||||
def df_to_file(self, output_f=None,dataframe=None, save_path=None):
|
||||
if output_f.lower() == 'hdf':
|
||||
import tables
|
||||
dataframe.to_hdf(save_path.replace('csv','h5',1), 'table', append=True)
|
||||
if output_f.lower() == 'parquet':
|
||||
import pyarrow as pa
|
||||
dataframe.to_parquet(save_path.replace('csv','parquet',1), engine='pyarrow')
|
||||
if output_f.lower() == 'png':
|
||||
fig = dataframe.plot().get_figure()
|
||||
fig.savefig(save_path.replace('csv','png',1))
|
||||
if output_f.lower() == 'xlsx':
|
||||
dataframe.to_excel(save_path.replace('csv','xlsx',1))
|
||||
if output_f.lower() == 'json':
|
||||
dataframe.to_json(save_path.replace('csv','json',1))
|
||||
if output_f.lower() == 'stata':
|
||||
dataframe.to_stata(save_path.replace('csv','dta',1))
|
||||
if output_f.lower() == 'pickle':
|
||||
dataframe.to_pickle(save_path.replace('csv','pkl',1))
|
||||
if output_f.lower() == 'html':
|
||||
dataframe.to_html(save_path.replace('csv','html',1))
|
||||
|
||||
#takes any format of a file and returns a dataframe of it
|
||||
def file_to_df(self,file_name):
|
||||
if file_name.split('.')[-1] == 'csv':
|
||||
return pd.read_csv(file_name)
|
||||
|
||||
#only works for test_ipv4_variable_time at the moment
|
||||
def compare_two_df(self,dataframe_one=None,dataframe_two=None):
|
||||
#df one = current report
|
||||
#df two = compared report
|
||||
pd.set_option("display.max_rows", None, "display.max_columns", None)
|
||||
#get all of common columns besides Timestamp, Timestamp milliseconds
|
||||
common_cols = list(set(dataframe_one.columns).intersection(set(dataframe_two.columns)))
|
||||
cols_to_remove = ['Timestamp milliseconds epoch','Timestamp','LANforge GUI Build: 5.4.3']
|
||||
com_cols = [i for i in common_cols if i not in cols_to_remove]
|
||||
#check if dataframes have the same endpoints
|
||||
if dataframe_one.name.unique().tolist().sort() == dataframe_two.name.unique().tolist().sort():
|
||||
endpoint_names = dataframe_one.name.unique().tolist()
|
||||
if com_cols is not None:
|
||||
dataframe_one = dataframe_one[[c for c in dataframe_one.columns if c in com_cols]]
|
||||
dataframe_two = dataframe_two[[c for c in dataframe_one.columns if c in com_cols]]
|
||||
dataframe_one = dataframe_one.loc[:, ~dataframe_one.columns.str.startswith('Script Name:')]
|
||||
dataframe_two = dataframe_two.loc[:, ~dataframe_two.columns.str.startswith('Script Name:')]
|
||||
lowest_duration=min(dataframe_one['Duration elapsed'].max(),dataframe_two['Duration elapsed'].max())
|
||||
print("The max duration in the new dataframe will be... " + str(lowest_duration))
|
||||
|
||||
compared_values_dataframe = pd.DataFrame(columns=[col for col in com_cols if not col.startswith('Script Name:')])
|
||||
cols = compared_values_dataframe.columns.tolist()
|
||||
cols=sorted(cols, key=lambda L: (L.lower(), L))
|
||||
compared_values_dataframe= compared_values_dataframe[cols]
|
||||
print(compared_values_dataframe)
|
||||
for duration_elapsed in range(lowest_duration):
|
||||
for endpoint in endpoint_names:
|
||||
#check if value has a space in it or is a str.
|
||||
# if value as a space, only take value before space for calc, append that calculated value after space.
|
||||
#if str. check if values match from 2 df's. if values do not match, write N/A
|
||||
for_loop_df1 = dataframe_one.loc[(dataframe_one['name'] == endpoint) & (dataframe_one['Duration elapsed'] == duration_elapsed)]
|
||||
for_loop_df2 = dataframe_two.loc[(dataframe_one['name'] == endpoint) & (dataframe_two['Duration elapsed'] == duration_elapsed)]
|
||||
# print(for_loop_df1)
|
||||
# print(for_loop_df2)
|
||||
cols_to_loop = [i for i in com_cols if i not in ['Duration elapsed', 'Name', 'Script Name: test_ipv4_variable_time']]
|
||||
cols_to_loop=sorted(cols_to_loop, key=lambda L: (L.lower(), L))
|
||||
print(cols_to_loop)
|
||||
row_to_append={}
|
||||
row_to_append["Duration elapsed"] = duration_elapsed
|
||||
for col in cols_to_loop:
|
||||
print(col)
|
||||
print(for_loop_df1)
|
||||
#print(for_loop_df2)
|
||||
print(for_loop_df1.at[0, col])
|
||||
print(for_loop_df2.at[0, col])
|
||||
if type(for_loop_df1.at[0, col]) == str and type(for_loop_df2.at[0, col]) == str:
|
||||
if (' ' in for_loop_df1.at[0,col]) == True:
|
||||
#do subtraction
|
||||
new_value = float(for_loop_df1.at[0, col].split(" ")[0]) - float(for_loop_df2.at[0, col].split(" ")[0])
|
||||
#add on last half of string
|
||||
new_value = str(new_value)+ for_loop_df2.at[0, col].split(" ")[1]
|
||||
# print(new_value)
|
||||
row_to_append[col] = new_value
|
||||
else:
|
||||
if for_loop_df1.at[0, col] != for_loop_df2.at[0, col]:
|
||||
row_to_append[col] = 'NaN'
|
||||
else:
|
||||
row_to_append[col] = for_loop_df1.at[0,col]
|
||||
elif type(for_loop_df1.at[0, col]) == int and type(for_loop_df2.at[0, col]) == int or type(for_loop_df1.at[0, col]) == float and type(for_loop_df2.at[0,col]) == float:
|
||||
new_value = for_loop_df1.at[0, col] - for_loop_df2.at[0, col]
|
||||
row_to_append[col] = new_value
|
||||
compared_values_dataframe = compared_values_dataframe.append(row_to_append, ignore_index=True,)
|
||||
print(compared_values_dataframe)
|
||||
#add col name to new df
|
||||
print(dataframe_one)
|
||||
print(dataframe_two)
|
||||
print(compared_values_dataframe)
|
||||
else:
|
||||
ValueError("Unable to execute report comparison due to inadequate file commonalities. ")
|
||||
exit(1)
|
||||
else:
|
||||
ValueError("Two files do not have the same endpoints. Please try file comparison with files that have the same endpoints.")
|
||||
exit(1)
|
||||
|
||||
|
||||
#take those columns and separate those columns from others in DF.
|
||||
|
||||
|
||||
pass
|
||||
#return compared_df
|
||||
|
||||
def append_df_to_file(self,dataframe, file_name):
|
||||
pass
|
||||
|
||||
# ~class
|
||||
|
||||
118
py-json/LANforge/pandas_extensions.py
Normal file
118
py-json/LANforge/pandas_extensions.py
Normal file
@@ -0,0 +1,118 @@
|
||||
#!/usr/bin/env pythonn3
|
||||
|
||||
import pandas as pd
|
||||
|
||||
class pandas_extensions:
|
||||
|
||||
# ================ Pandas Dataframe Functions ======================================
|
||||
|
||||
# takes any dataframe and returns the specified file extension of it
|
||||
def df_to_file(self, output_f=None, dataframe=None, save_path=None):
|
||||
if output_f.lower() == 'hdf':
|
||||
dataframe.to_hdf(save_path.replace('csv', 'h5', 1), 'table', append=True)
|
||||
if output_f.lower() == 'parquet':
|
||||
dataframe.to_parquet(save_path.replace('csv', 'parquet', 1), engine='pyarrow')
|
||||
if output_f.lower() == 'png':
|
||||
fig = dataframe.plot().get_figure()
|
||||
fig.savefig(save_path.replace('csv', 'png', 1))
|
||||
if output_f.lower() == 'xlsx':
|
||||
dataframe.to_excel(save_path.replace('csv', 'xlsx', 1))
|
||||
if output_f.lower() == 'json':
|
||||
dataframe.to_json(save_path.replace('csv', 'json', 1))
|
||||
if output_f.lower() == 'stata':
|
||||
dataframe.to_stata(save_path.replace('csv', 'dta', 1))
|
||||
if output_f.lower() == 'pickle':
|
||||
dataframe.to_pickle(save_path.replace('csv', 'pkl', 1))
|
||||
if output_f.lower() == 'html':
|
||||
dataframe.to_html(save_path.replace('csv', 'html', 1))
|
||||
|
||||
# takes any format of a file and returns a dataframe of it
|
||||
def file_to_df(self, file_name):
|
||||
if file_name.split('.')[-1] == 'csv':
|
||||
return pd.read_csv(file_name)
|
||||
|
||||
# only works for test_ipv4_variable_time at the moment
|
||||
def compare_two_df(self, dataframe_one=None, dataframe_two=None):
|
||||
# df one = current report
|
||||
# df two = compared report
|
||||
pd.set_option("display.max_rows", None, "display.max_columns", None)
|
||||
# get all of common columns besides Timestamp, Timestamp milliseconds
|
||||
common_cols = list(set(dataframe_one.columns).intersection(set(dataframe_two.columns)))
|
||||
cols_to_remove = ['Timestamp milliseconds epoch', 'Timestamp', 'LANforge GUI Build: 5.4.3']
|
||||
com_cols = [i for i in common_cols if i not in cols_to_remove]
|
||||
# check if dataframes have the same endpoints
|
||||
if dataframe_one.name.unique().tolist().sort() == dataframe_two.name.unique().tolist().sort():
|
||||
endpoint_names = dataframe_one.name.unique().tolist()
|
||||
if com_cols is not None:
|
||||
dataframe_one = dataframe_one[[c for c in dataframe_one.columns if c in com_cols]]
|
||||
dataframe_two = dataframe_two[[c for c in dataframe_one.columns if c in com_cols]]
|
||||
dataframe_one = dataframe_one.loc[:, ~dataframe_one.columns.str.startswith('Script Name:')]
|
||||
dataframe_two = dataframe_two.loc[:, ~dataframe_two.columns.str.startswith('Script Name:')]
|
||||
lowest_duration = min(dataframe_one['Duration elapsed'].max(), dataframe_two['Duration elapsed'].max())
|
||||
print("The max duration in the new dataframe will be... " + str(lowest_duration))
|
||||
|
||||
compared_values_dataframe = pd.DataFrame(
|
||||
columns=[col for col in com_cols if not col.startswith('Script Name:')])
|
||||
cols = compared_values_dataframe.columns.tolist()
|
||||
cols = sorted(cols, key=lambda L: (L.lower(), L))
|
||||
compared_values_dataframe = compared_values_dataframe[cols]
|
||||
print(compared_values_dataframe)
|
||||
for duration_elapsed in range(lowest_duration):
|
||||
for endpoint in endpoint_names:
|
||||
# check if value has a space in it or is a str.
|
||||
# if value as a space, only take value before space for calc, append that calculated value after space.
|
||||
# if str. check if values match from 2 df's. if values do not match, write N/A
|
||||
for_loop_df1 = dataframe_one.loc[(dataframe_one['name'] == endpoint) & (
|
||||
dataframe_one['Duration elapsed'] == duration_elapsed)]
|
||||
for_loop_df2 = dataframe_two.loc[(dataframe_one['name'] == endpoint) & (
|
||||
dataframe_two['Duration elapsed'] == duration_elapsed)]
|
||||
# print(for_loop_df1)
|
||||
# print(for_loop_df2)
|
||||
cols_to_loop = [i for i in com_cols if
|
||||
i not in ['Duration elapsed', 'Name', 'Script Name: test_ipv4_variable_time']]
|
||||
cols_to_loop = sorted(cols_to_loop, key=lambda L: (L.lower(), L))
|
||||
print(cols_to_loop)
|
||||
row_to_append = {}
|
||||
row_to_append["Duration elapsed"] = duration_elapsed
|
||||
for col in cols_to_loop:
|
||||
print(col)
|
||||
print(for_loop_df1)
|
||||
# print(for_loop_df2)
|
||||
print(for_loop_df1.at[0, col])
|
||||
print(for_loop_df2.at[0, col])
|
||||
if type(for_loop_df1.at[0, col]) == str and type(for_loop_df2.at[0, col]) == str:
|
||||
if (' ' in for_loop_df1.at[0, col]) == True:
|
||||
# do subtraction
|
||||
new_value = float(for_loop_df1.at[0, col].split(" ")[0]) - float(
|
||||
for_loop_df2.at[0, col].split(" ")[0])
|
||||
# add on last half of string
|
||||
new_value = str(new_value) + for_loop_df2.at[0, col].split(" ")[1]
|
||||
# print(new_value)
|
||||
row_to_append[col] = new_value
|
||||
else:
|
||||
if for_loop_df1.at[0, col] != for_loop_df2.at[0, col]:
|
||||
row_to_append[col] = 'NaN'
|
||||
else:
|
||||
row_to_append[col] = for_loop_df1.at[0, col]
|
||||
elif type(for_loop_df1.at[0, col]) == int and type(for_loop_df2.at[0, col]) == int or type(
|
||||
for_loop_df1.at[0, col]) == float and type(for_loop_df2.at[0, col]) == float:
|
||||
new_value = for_loop_df1.at[0, col] - for_loop_df2.at[0, col]
|
||||
row_to_append[col] = new_value
|
||||
compared_values_dataframe = compared_values_dataframe.append(row_to_append, ignore_index=True, )
|
||||
print(compared_values_dataframe)
|
||||
# add col name to new df
|
||||
print(dataframe_one)
|
||||
print(dataframe_two)
|
||||
print(compared_values_dataframe)
|
||||
else:
|
||||
ValueError("Unable to execute report comparison due to inadequate file commonalities. ")
|
||||
exit(1)
|
||||
else:
|
||||
ValueError(
|
||||
"Two files do not have the same endpoints. Please try file comparison with files that have the same endpoints.")
|
||||
exit(1)
|
||||
|
||||
# take those columns and separate those columns from others in DF.
|
||||
|
||||
pass
|
||||
# return compared_df
|
||||
@@ -1,12 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
import re
|
||||
import time
|
||||
import pprint
|
||||
import csv
|
||||
import datetime
|
||||
import random
|
||||
import string
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
|
||||
class BaseProfile:
|
||||
|
||||
@@ -10,7 +10,6 @@ if sys.version_info[0] != 3:
|
||||
exit(1)
|
||||
|
||||
import os
|
||||
import time
|
||||
from time import sleep
|
||||
from urllib import error
|
||||
import pprint
|
||||
|
||||
@@ -426,7 +426,7 @@ class cv_test(Realm):
|
||||
_influx_bucket=args.influx_bucket)
|
||||
|
||||
# lf_wifi_capacity_test.py may be run / initiated by a remote system against a lanforge
|
||||
# the local_lf_report_dir is data is stored, if there is no local_lf_report_dir then the test is run directly on lanforge
|
||||
# the local_lf_report_dir is where data is stored, if there is no local_lf_report_dir then the test is run directly on lanforge
|
||||
if self.local_lf_report_dir == "":
|
||||
csv_path = "%s/kpi.csv" % (self.lf_report_dir)
|
||||
else:
|
||||
|
||||
@@ -2,9 +2,7 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import time
|
||||
import base64
|
||||
|
||||
|
||||
|
||||
@@ -6,8 +6,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import time
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import csv
|
||||
import pandas as pd
|
||||
@@ -15,6 +14,7 @@ sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||
|
||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||
LFCliBase = lfcli_base.LFCliBase
|
||||
pandas_extensions = importlib.import_module("py-json.LANforge.pandas_extensions")
|
||||
|
||||
|
||||
class GenCXProfile(LFCliBase):
|
||||
@@ -597,12 +597,12 @@ class GenCXProfile(LFCliBase):
|
||||
|
||||
# comparison to last report / report inputted
|
||||
if compared_report is not None:
|
||||
compared_df = self.compare_two_df(dataframe_one=self.file_to_df(report_file),
|
||||
dataframe_two=self.file_to_df(compared_report))
|
||||
compared_df = pandas_extensions.compare_two_df(dataframe_one=pandas_extensions.file_to_df(report_file),
|
||||
dataframe_two=pandas_extensions.file_to_df(compared_report))
|
||||
exit(1)
|
||||
# append compared df to created one
|
||||
if output_format.lower() != 'csv':
|
||||
self.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
|
||||
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
|
||||
else:
|
||||
if output_format.lower() != 'csv':
|
||||
self.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
|
||||
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
from pprint import pprint
|
||||
import pprint
|
||||
import time
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import csv
|
||||
import pandas as pd
|
||||
@@ -14,6 +13,7 @@ sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||
|
||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||
LFCliBase = lfcli_base.LFCliBase
|
||||
pandas_extensions = importlib.import_module("py-json.LANforge.pandas_extensions")
|
||||
|
||||
|
||||
class L3CXProfile(LFCliBase):
|
||||
@@ -288,15 +288,15 @@ class L3CXProfile(LFCliBase):
|
||||
|
||||
# comparison to last report / report inputted
|
||||
if compared_report is not None:
|
||||
compared_df = self.compare_two_df(dataframe_one=self.file_to_df(report_file),
|
||||
dataframe_two=self.file_to_df(compared_report))
|
||||
compared_df = pandas_extensions.compare_two_df(dataframe_one=pandas_extensions.file_to_df(report_file),
|
||||
dataframe_two=pandas_extensions.file_to_df(compared_report))
|
||||
exit(1)
|
||||
# append compared df to created one
|
||||
if output_format.lower() != 'csv':
|
||||
self.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
|
||||
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
|
||||
else:
|
||||
if output_format.lower() != 'csv':
|
||||
self.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
|
||||
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
|
||||
|
||||
def refresh_cx(self):
|
||||
for cx_name in self.created_cx.keys():
|
||||
|
||||
@@ -2,22 +2,16 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import re
|
||||
import time
|
||||
import pprint
|
||||
import datetime
|
||||
import base64
|
||||
import csv
|
||||
from pprint import pprint
|
||||
import time
|
||||
import random
|
||||
import string
|
||||
import datetime
|
||||
|
||||
|
||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||
|
||||
lfdata = importlib.import_module("py-json.lfdata")
|
||||
LFDataCollection = lfdata.LFDataCollection
|
||||
base_profile = importlib.import_module("py-json.base_profile")
|
||||
BaseProfile = base_profile.BaseProfile
|
||||
|
||||
@@ -674,7 +668,7 @@ class L3CXProfile2(BaseProfile):
|
||||
end_time = start_time + datetime.timedelta(seconds=duration_sec)
|
||||
|
||||
#create lf data object
|
||||
lf_data_collection= LFDataCollection(local_realm=self.local_realm,debug=self.debug)
|
||||
lf_data_collection = LFDataCollection(local_realm=self.local_realm,debug=self.debug)
|
||||
while datetime.datetime.now() < end_time:
|
||||
csvwriter.writerow(lf_data_collection.monitor_interval(start_time_=start_time,sta_list_=sta_list_edit, created_cx_=created_cx, layer3_fields_=layer3_fields,port_mgr_fields_=",".join(port_mgr_cols)))
|
||||
time.sleep(monitor_interval_ms)
|
||||
|
||||
@@ -2,14 +2,11 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import requests
|
||||
import pandas as pd
|
||||
import time
|
||||
import datetime
|
||||
import ast
|
||||
import csv
|
||||
|
||||
|
||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||
|
||||
@@ -1,16 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
import re
|
||||
import time
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import os
|
||||
import datetime
|
||||
import base64
|
||||
import xlsxwriter
|
||||
import pandas as pd
|
||||
import requests
|
||||
import ast
|
||||
import csv
|
||||
|
||||
|
||||
# LFData class actions:
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import time
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import time
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@ import os
|
||||
import importlib
|
||||
import re
|
||||
import time
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
|
||||
# ---- ---- ---- ---- LANforge Base Imports ---- ---- ---- ----
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
import time
|
||||
from pprint import pprint
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import json
|
||||
import pprint
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import time
|
||||
|
||||
|
||||
@@ -1,14 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
|
||||
|
||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||
|
||||
lfdata = importlib.import_module("py-json.lfdata")
|
||||
LFDataCollection = lfdata.LFDataCollection
|
||||
|
||||
|
||||
class TestBase:
|
||||
def __init__(self):
|
||||
|
||||
@@ -2,9 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import time
|
||||
|
||||
|
||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
|
||||
|
||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||
|
||||
@@ -8,10 +8,6 @@ Date :
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
from pprint import pprint
|
||||
from uuid import uuid1
|
||||
import argparse
|
||||
import datetime
|
||||
import time
|
||||
import matplotlib.pyplot as plt
|
||||
import threading
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import time
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
import time
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
'''
|
||||
|
||||
Candela Technologies Inc.
|
||||
|
||||
@@ -14,9 +14,7 @@ import importlib
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import pprint
|
||||
import traceback
|
||||
import time
|
||||
from time import sleep
|
||||
import websocket
|
||||
import re
|
||||
@@ -329,7 +327,7 @@ def m_error(wsock, err):
|
||||
# ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
|
||||
def m_open(wsock):
|
||||
def run(*args):
|
||||
time.sleep(0.1)
|
||||
sleep(0.1)
|
||||
# ping = json.loads();
|
||||
wsock.send('{"text":"ping"}')
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""TestRail API binding for Python 3.x.
|
||||
|
||||
"""
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import base64
|
||||
import urllib.request
|
||||
from bs4 import BeautifulSoup
|
||||
import ssl
|
||||
import subprocess, os
|
||||
import subprocess
|
||||
from artifactory import ArtifactoryPath
|
||||
import tarfile
|
||||
import paramiko
|
||||
from paramiko import SSHClient
|
||||
from scp import SCPClient
|
||||
import os
|
||||
import pexpect
|
||||
from pexpect import pxssh
|
||||
|
||||
@@ -11,9 +11,7 @@ import importlib
|
||||
import argparse
|
||||
import datetime
|
||||
import time
|
||||
from test_utility import CreateHTML
|
||||
# from test_utility import RuntimeUpdates
|
||||
from test_utility import StatusMsg
|
||||
from test_utility import CreateHTML, StatusMsg
|
||||
import pdfkit
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
|
||||
@@ -14,7 +14,6 @@ import os
|
||||
import importlib
|
||||
import argparse
|
||||
import time
|
||||
import pprint
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import time
|
||||
import pprint
|
||||
import argparse
|
||||
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
#!/usr/bin/env python3
|
||||
# Create Layer-3 Cross Connection Using LANforge JSON AP : https://www.candelatech.com/cookbook.php?vol=fire&book=scripted+layer-3+test
|
||||
# Written by Candela Technologies Inc.
|
||||
# Updated by: Erin Grimes
|
||||
"""
|
||||
Example Command:
|
||||
./create_l3.py --endp_a 'eth1' --endp_b 'eth2' --min_rate_a '56000' --min_rate_b '40000'
|
||||
|
||||
Create Layer-3 Cross Connection Using LANforge JSON AP : https://www.candelatech.com/cookbook.php?vol=fire&book=scripted+layer-3+test
|
||||
Written by Candela Technologies Inc.
|
||||
Updated by: Erin Grimes
|
||||
Example Command:
|
||||
./create_l3.py --endp_a 'eth1' --endp_b 'eth2' --min_rate_a '56000' --min_rate_b '40000'
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import argparse
|
||||
import time
|
||||
import datetime
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
198
py-scripts/create_l3_stations.py
Executable file
198
py-scripts/create_l3_stations.py
Executable file
@@ -0,0 +1,198 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
This script will create a variable number of layer3 stations each with their own set of cross-connects and endpoints.
|
||||
|
||||
Example script:
|
||||
'./create_l3_stations.py --radio wiphy0 --ssid lanforge --password password --security wpa2'
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
exit(1)
|
||||
|
||||
if 'py-json' not in sys.path:
|
||||
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
|
||||
|
||||
import argparse
|
||||
from LANforge.lfcli_base import LFCliBase
|
||||
from LANforge import LFUtils
|
||||
from realm import Realm
|
||||
|
||||
|
||||
class CreateL3(Realm):
|
||||
def __init__(self,
|
||||
ssid, security, password, sta_list, name_prefix, upstream, radio,
|
||||
host="localhost", port=8080, mode=0, ap=None,
|
||||
side_a_min_rate=56, side_a_max_rate=0,
|
||||
side_b_min_rate=56, side_b_max_rate=0,
|
||||
number_template="00000", use_ht160=False,
|
||||
_debug_on=False,
|
||||
_exit_on_error=False,
|
||||
_exit_on_fail=False):
|
||||
super().__init__(host, port)
|
||||
self.upstream = upstream
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.ssid = ssid
|
||||
self.sta_list = sta_list
|
||||
self.security = security
|
||||
self.password = password
|
||||
self.radio = radio
|
||||
self.mode = mode
|
||||
self.ap = ap
|
||||
self.number_template = number_template
|
||||
self.debug = _debug_on
|
||||
self.name_prefix = name_prefix
|
||||
self.station_profile = self.new_station_profile()
|
||||
self.cx_profile = self.new_l3_cx_profile()
|
||||
self.station_profile.lfclient_url = self.lfclient_url
|
||||
self.station_profile.ssid = self.ssid
|
||||
self.station_profile.ssid_pass = self.password
|
||||
self.station_profile.security = self.security
|
||||
self.station_profile.number_template_ = self.number_template
|
||||
self.station_profile.debug = self.debug
|
||||
self.station_profile.use_ht160 = use_ht160
|
||||
if self.station_profile.use_ht160:
|
||||
self.station_profile.mode = 9
|
||||
self.station_profile.mode = mode
|
||||
if self.ap is not None:
|
||||
self.station_profile.set_command_param("add_sta", "ap", self.ap)
|
||||
# self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user defined variable from terminal.
|
||||
|
||||
self.cx_profile.host = self.host
|
||||
self.cx_profile.port = self.port
|
||||
self.cx_profile.name_prefix = self.name_prefix
|
||||
self.cx_profile.side_a_min_bps = side_a_min_rate
|
||||
self.cx_profile.side_a_max_bps = side_a_max_rate
|
||||
self.cx_profile.side_b_min_bps = side_b_min_rate
|
||||
self.cx_profile.side_b_max_bps = side_b_max_rate
|
||||
|
||||
def pre_cleanup(self):
|
||||
self.cx_profile.cleanup_prefix()
|
||||
for sta in self.sta_list:
|
||||
self.rm_port(sta, check_exists=True)
|
||||
|
||||
def build(self):
|
||||
|
||||
self.station_profile.use_security(self.security,
|
||||
self.ssid,
|
||||
self.password)
|
||||
self.station_profile.set_number_template(self.number_template)
|
||||
print("Creating stations")
|
||||
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
|
||||
self.station_profile.set_command_param("set_port", "report_timer", 1500)
|
||||
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
|
||||
self.station_profile.create(radio=self.radio,
|
||||
sta_names_=self.sta_list,
|
||||
debug=self.debug)
|
||||
self.cx_profile.create(endp_type="lf_udp",
|
||||
side_a=self.station_profile.station_names,
|
||||
side_b=self.upstream,
|
||||
sleep_time=0)
|
||||
self._pass("PASS: Station build finished")
|
||||
|
||||
|
||||
def main():
|
||||
parser = LFCliBase.create_basic_argparse(
|
||||
prog='create_l3_stations.py',
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
epilog='''\
|
||||
Create stations to test connection and traffic on VAPs of varying security types (WEP, WPA, WPA2, WPA3, Open)
|
||||
''',
|
||||
|
||||
description='''\
|
||||
create_l3_stations.py:
|
||||
--------------------
|
||||
Generic command layout:
|
||||
|
||||
python3 ./create_l3_stations.py
|
||||
--upstream_port eth1
|
||||
--radio wiphy0
|
||||
--num_stations 32
|
||||
--security {open|wep|wpa|wpa2|wpa3} \\
|
||||
--mode 1
|
||||
{"auto" : "0",
|
||||
"a" : "1",
|
||||
"b" : "2",
|
||||
"g" : "3",
|
||||
"abg" : "4",
|
||||
"abgn" : "5",
|
||||
"bgn" : "6",
|
||||
"bg" : "7",
|
||||
"abgnAC" : "8",
|
||||
"anAC" : "9",
|
||||
"an" : "10",
|
||||
"bgnAC" : "11",
|
||||
"abgnAX" : "12",
|
||||
"bgnAX" : "13",
|
||||
--ssid netgear
|
||||
--password admin123
|
||||
--a_min 1000
|
||||
--b_min 1000
|
||||
--ap "00:0e:8e:78:e1:76"
|
||||
--number_template 0000
|
||||
--debug
|
||||
''')
|
||||
|
||||
required_args = None
|
||||
for group in parser._action_groups:
|
||||
if group.title == "required arguments":
|
||||
required_args = group
|
||||
break;
|
||||
if required_args is not None:
|
||||
required_args.add_argument('--a_min', help='--a_min bps rate minimum for side_a', default=256000)
|
||||
required_args.add_argument('--b_min', help='--b_min bps rate minimum for side_b', default=256000)
|
||||
|
||||
optional_args = None
|
||||
for group in parser._action_groups:
|
||||
if group.title == "optional arguments":
|
||||
optional_args = group
|
||||
break;
|
||||
if optional_args is not None:
|
||||
optional_args.add_argument('--mode', help='Used to force mode of stations')
|
||||
optional_args.add_argument('--ap', help='Used to force a connection to a particular AP')
|
||||
optional_args.add_argument('--number_template', help='Start the station numbering with a particular number. Default is 0000', default=0000)
|
||||
optional_args.add_argument('--station_list', help='Optional: User defined station names', action='append',default=None)
|
||||
args = parser.parse_args()
|
||||
|
||||
num_sta = 2
|
||||
if (args.num_stations is not None) and (int(args.num_stations) > 0):
|
||||
num_sta = int(args.num_stations)
|
||||
|
||||
if args.station_list is None:
|
||||
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=int(args.number_template), end_id_=num_sta+int(args.number_template) - 1, padding_number_=10000,
|
||||
radio=args.radio)
|
||||
else:
|
||||
station_list = args.station_list
|
||||
|
||||
ip_var_test = CreateL3(host=args.mgr,
|
||||
port=args.mgr_port,
|
||||
number_template=str(args.number_template),
|
||||
sta_list=station_list,
|
||||
name_prefix="VT",
|
||||
upstream=args.upstream_port,
|
||||
ssid=args.ssid,
|
||||
password=args.passwd,
|
||||
radio=args.radio,
|
||||
security=args.security,
|
||||
use_ht160=False,
|
||||
side_a_min_rate=args.a_min,
|
||||
side_b_min_rate=args.b_min,
|
||||
mode=args.mode,
|
||||
ap=args.ap,
|
||||
_debug_on=args.debug)
|
||||
|
||||
ip_var_test.pre_cleanup()
|
||||
ip_var_test.build()
|
||||
if not ip_var_test.passes():
|
||||
print(ip_var_test.get_fail_message())
|
||||
ip_var_test.exit_fail()
|
||||
print('Creates %s stations and connections' % num_sta)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -8,8 +8,6 @@ import sys
|
||||
import os
|
||||
import importlib
|
||||
import argparse
|
||||
import time
|
||||
import datetime
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -6,7 +6,6 @@ import sys
|
||||
import os
|
||||
import importlib
|
||||
import argparse
|
||||
import time
|
||||
import pprint
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
#!/usr/bin/python3
|
||||
# Create and modify WAN Links Using LANforge JSON AP : http://www.candelatech.com/cookbook.php?vol=cli&book=JSON:+Managing+WANlinks+using+JSON+and+Python
|
||||
# Written by Candela Technologies Inc.
|
||||
# Updated by: Erin Grimes
|
||||
"""
|
||||
|
||||
Create and modify WAN Links Using LANforge JSON AP : http://www.candelatech.com/cookbook.php?vol=cli&book=JSON:+Managing+WANlinks+using+JSON+and+Python
|
||||
Written by Candela Technologies Inc.
|
||||
Updated by: Erin Grimes
|
||||
|
||||
"""
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
@@ -13,7 +17,6 @@ if sys.version_info[0] != 3:
|
||||
|
||||
from time import sleep
|
||||
import urllib
|
||||
from urllib import error
|
||||
import pprint
|
||||
|
||||
sys.path.append("../py-json")
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
#!/usr/bin/env python3
|
||||
# This program is used to read in a LANforge Dataplane CSV file and output
|
||||
# a csv file that works with a customer's RvRvO visualization tool.
|
||||
#
|
||||
# Example use case:
|
||||
#
|
||||
# Read in ~/text-csv-0-candela.csv, output is stored at outfile.csv
|
||||
# ./py-scripts/csv_convert.py -i ~/text-csv-0-candela.csv
|
||||
#
|
||||
# Output is csv file with mixxed columns, top part:
|
||||
# Test Run,Position [Deg],Attenuation 1 [dB], Pal Stats Endpoint 1 Control Rssi [dBm], Pal Stats Endpoint 1 Data Rssi [dBm]
|
||||
"""
|
||||
|
||||
# Second part:
|
||||
# Step Index,Position [Deg],Attenuation [dB],Traffic Pair 1 Throughput [Mbps]
|
||||
This program is used to read in a LANforge Dataplane CSV file and output
|
||||
a csv file that works with a customer's RvRvO visualization tool.
|
||||
|
||||
Example use case:
|
||||
|
||||
Read in ~/text-csv-0-candela.csv, output is stored at outfile.csv
|
||||
./py-scripts/csv_convert.py -i ~/text-csv-0-candela.csv
|
||||
|
||||
Output is csv file with mixxed columns, top part:
|
||||
Test Run,Position [Deg],Attenuation 1 [dB], Pal Stats Endpoint 1 Control Rssi [dBm], Pal Stats Endpoint 1 Data Rssi [dBm]
|
||||
|
||||
Second part:
|
||||
Step Index,Position [Deg],Attenuation [dB],Traffic Pair 1 Throughput [Mbps]
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
|
||||
import ast
|
||||
import os
|
||||
import pprint
|
||||
|
||||
|
||||
class DocstringCollector:
|
||||
|
||||
@@ -7,10 +7,7 @@ Please use concurrently with event_flood.py.
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import datetime
|
||||
from datetime import datetime
|
||||
import time
|
||||
from time import sleep
|
||||
import pprint
|
||||
import argparse
|
||||
|
||||
|
||||
@@ -8,11 +8,8 @@ import sys
|
||||
import os
|
||||
import importlib
|
||||
import argparse
|
||||
import datetime
|
||||
from datetime import datetime
|
||||
import time
|
||||
from time import sleep
|
||||
import pprint
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -3,8 +3,6 @@ import sys
|
||||
import os
|
||||
import importlib
|
||||
import argparse
|
||||
import time
|
||||
import pprint
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from matplotlib import pyplot as plt
|
||||
from datetime import datetime
|
||||
import numpy as np
|
||||
import os.path
|
||||
from os import path
|
||||
|
||||
@@ -9,7 +9,6 @@ import sys
|
||||
import os
|
||||
import importlib
|
||||
import argparse
|
||||
import string
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -3,12 +3,7 @@
|
||||
date - 11- feb - 2021
|
||||
-Nikita Yadav
|
||||
"""
|
||||
|
||||
from matplotlib import pyplot as plt
|
||||
from datetime import datetime
|
||||
import numpy as np
|
||||
import os.path
|
||||
from os import path
|
||||
import sys
|
||||
|
||||
print(sys.path)
|
||||
|
||||
@@ -12,7 +12,6 @@ import importlib
|
||||
import argparse
|
||||
import datetime
|
||||
import time
|
||||
import pdfkit
|
||||
import json
|
||||
import re
|
||||
|
||||
|
||||
@@ -167,8 +167,6 @@ import os
|
||||
import importlib
|
||||
import argparse
|
||||
import time
|
||||
import json
|
||||
from os import path
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -22,7 +22,6 @@ import sys
|
||||
import os
|
||||
import importlib
|
||||
import argparse
|
||||
import time
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -100,7 +100,6 @@ import importlib
|
||||
import argparse
|
||||
import time
|
||||
import json
|
||||
from os import path
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
'''
|
||||
This Scrip has two classes :
|
||||
This Script has two classes :
|
||||
1. LoadScenario : It will load the existing saved scenario to the Lanforge (Here used for Loading Bridged VAP)
|
||||
2. CreateSTA_CX : It will create stations and L3 Cross connects and start them
|
||||
3. Login_DUT : This class is specifically used to test the Linux based DUT that has SSH Server. It is used to read the CPU Core temperature during testing
|
||||
@@ -31,11 +31,9 @@ import logging
|
||||
import paramiko as pm
|
||||
from paramiko.ssh_exception import NoValidConnectionsError as exception
|
||||
import xlsxwriter
|
||||
from bokeh.io import output_file, show
|
||||
from bokeh.io import show
|
||||
from bokeh.plotting import figure
|
||||
from bokeh.models import LinearAxis, Range1d
|
||||
from bokeh.models import HoverTool
|
||||
from bokeh.layouts import row
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -4,11 +4,9 @@
|
||||
License: Free to distribute and modify. LANforge systems must be licensed.
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import paramiko
|
||||
import argparse
|
||||
import datetime
|
||||
from datetime import datetime
|
||||
import time
|
||||
import os
|
||||
|
||||
@@ -21,11 +21,8 @@ import sys
|
||||
import os
|
||||
import importlib
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib as mpl
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pdfkit
|
||||
import math
|
||||
from matplotlib.colors import ListedColormap
|
||||
|
||||
|
||||
|
||||
@@ -118,8 +118,6 @@ import os
|
||||
import importlib
|
||||
import argparse
|
||||
import time
|
||||
import json
|
||||
from os import path
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
2
py-scripts/lf_multipsk.py
Normal file → Executable file
2
py-scripts/lf_multipsk.py
Normal file → Executable file
@@ -1,3 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
NAME: lf_multipsk.py
|
||||
|
||||
|
||||
@@ -307,8 +307,6 @@ import os
|
||||
import importlib
|
||||
import argparse
|
||||
import time
|
||||
import json
|
||||
from os import path
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -8,7 +8,6 @@ import os
|
||||
import importlib
|
||||
import argparse
|
||||
import datetime
|
||||
import pprint
|
||||
import pandas as pd
|
||||
import time
|
||||
|
||||
|
||||
@@ -197,9 +197,8 @@ if [[ $MGRLEN -gt 0 ]]; then
|
||||
# --ap_name WAC505 --ap_ip 192.168.213.90 --bands Both --directions Download --twog_radio wiphy1 --fiveg_radio wiphy0 --file_size 2MB --num_stations 40 --Both_duration 1 --traffic_duration 2 --ssh_port 22_"
|
||||
"./lf_ftp_test.py --mgr $MGR --ssid $SSID --passwd $PASSWD_USED --security $SECURITY --bands 5G --direction Download \
|
||||
--file_size 2MB --num_stations 2"
|
||||
#./lf_graph
|
||||
#./lf_influx_db
|
||||
#./lf_mesh_test
|
||||
"./lf_graph.py --mgr $MGR"
|
||||
#"./lf_mesh_test.py --mgr $MGR --upstream $UPSTREAM --raw_line 'selected_dut2 RootAP wactest $BSSID'"
|
||||
#./lf_multipsk
|
||||
#./lf_report
|
||||
#./lf_report_test
|
||||
@@ -207,7 +206,7 @@ if [[ $MGRLEN -gt 0 ]]; then
|
||||
#./lf_rx_sensitivity_test.py
|
||||
#./lf_sniff_radio
|
||||
#./lf_snp_test
|
||||
#./lf_tr398_test
|
||||
"./lf_tr398_test.py --mgr $MGR"
|
||||
#./lf_webpage
|
||||
"./lf_wifi_capacity_test.py --mgr $MGR --port 8080 --lf_user lanforge --lf_password lanforge \
|
||||
--instance_name this_inst --config_name test_con --upstream 1.1.eth2 --batch_size 1,5,25,50,100 --loop_iter 1 \
|
||||
|
||||
@@ -5,7 +5,6 @@ import sys
|
||||
import os
|
||||
import importlib
|
||||
import argparse
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
from time import sleep
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
import argparse
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
|
||||
@@ -18,13 +18,6 @@ Example:
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import argparse
|
||||
import time
|
||||
import json
|
||||
from os import path
|
||||
import argparse
|
||||
import time
|
||||
import datetime
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -28,9 +28,7 @@ sys.path.append(os.path.join(os.path.abspath('..'), 'py-dashboard'))
|
||||
import argparse
|
||||
from LANforge import LFUtils
|
||||
from realm import Realm
|
||||
import datetime
|
||||
import time
|
||||
import pprint
|
||||
|
||||
|
||||
class StaScan(Realm):
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -5,7 +5,6 @@ import importlib
|
||||
import argparse
|
||||
import time
|
||||
import datetime
|
||||
import pprint
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -32,7 +32,6 @@ import importlib
|
||||
import argparse
|
||||
import time
|
||||
import datetime
|
||||
import pprint
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -38,7 +38,6 @@ import pprint
|
||||
import argparse
|
||||
import time
|
||||
import datetime
|
||||
import json
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -49,14 +49,12 @@ import sys
|
||||
import os
|
||||
import importlib
|
||||
from pprint import pprint
|
||||
import re
|
||||
import serial
|
||||
import pexpect
|
||||
from pexpect_serial import SerialSpawn
|
||||
import argparse
|
||||
import time
|
||||
import datetime
|
||||
import subprocess
|
||||
import csv
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
@@ -67,8 +65,6 @@ if sys.version_info[0] != 3:
|
||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||
|
||||
lf_report = importlib.import_module("py-scripts.lf_report")
|
||||
# lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||
# LFCliBase = lfcli_base.LFCliBase
|
||||
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
||||
realm = importlib.import_module("py-json.realm")
|
||||
Realm = realm.Realm
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
import argparse
|
||||
import time
|
||||
import datetime
|
||||
|
||||
|
||||
@@ -33,11 +33,9 @@ import logging
|
||||
import paramiko as pmgo
|
||||
from paramiko.ssh_exception import NoValidConnectionsError as exception
|
||||
import xlsxwriter
|
||||
from bokeh.io import output_file, show
|
||||
from bokeh.io import show
|
||||
from bokeh.plotting import figure
|
||||
from bokeh.models import LinearAxis, Range1d
|
||||
from bokeh.models import HoverTool, Range1d
|
||||
from bokeh.layouts import row
|
||||
from datetime import datetime
|
||||
import socket
|
||||
|
||||
|
||||
@@ -2,10 +2,8 @@
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
# import time
|
||||
import argparse
|
||||
from uuid import uuid1
|
||||
import pprint
|
||||
from pprint import pprint
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
|
||||
@@ -8,7 +8,6 @@ License: Free to distribute and modify. LANforge systems must be licensed.
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import argparse
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -9,7 +9,6 @@ import os
|
||||
import importlib
|
||||
import argparse
|
||||
import time
|
||||
import datetime
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -3,7 +3,6 @@ import sys
|
||||
import os
|
||||
import importlib
|
||||
import pprint
|
||||
import argparse
|
||||
import time
|
||||
import datetime
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ This is an outdated example. Please see modern py-scripts/test_X example scripts
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import traceback
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
print("This script requires Python 3")
|
||||
|
||||
@@ -7,7 +7,6 @@ This script when running, will monitor the events triggered by test_ipv4_connect
|
||||
import sys
|
||||
import os
|
||||
import importlib
|
||||
import json
|
||||
|
||||
|
||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||
|
||||
Reference in New Issue
Block a user