mirror of
https://github.com/Telecominfraproject/wlan-lanforge-scripts.git
synced 2025-11-01 19:28:00 +00:00
Merge branch 'master' of github.com:greearb/lanforge-scripts
This commit is contained in:
1219
py-scripts/scripts_deprecated/lf_check_orig.py
Normal file
1219
py-scripts/scripts_deprecated/lf_check_orig.py
Normal file
File diff suppressed because it is too large
Load Diff
42
py-scripts/tools/ct_igg.json
Normal file
42
py-scripts/tools/ct_igg.json
Normal file
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"ct_igg":{
|
||||
"Notes":[
|
||||
"This json file is used as an input to the ./lf_check.py file",
|
||||
"The variables that are all capitalized below are replaced with configuration",
|
||||
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
|
||||
"The replacement is loosely coupled so the upper and lower case convention is used",
|
||||
"to identify replaced strings in the lf_check.py code.",
|
||||
"this file contains the Influx, Grafana and Ghost configuration"
|
||||
]
|
||||
},
|
||||
"test_database":{
|
||||
"database_config": "True",
|
||||
"database_host": "192.168.100.201",
|
||||
"database_port": "8086",
|
||||
"database_token": "-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ==",
|
||||
"database_org": "Candela",
|
||||
"database_bucket": "lanforge_qa_testing",
|
||||
"dut_set_name": "DUT_NAME ASUSRT-AX88U",
|
||||
"database_tag": "testbed CT-US-001",
|
||||
"test_rig": "CT-US-001"
|
||||
},
|
||||
"test_dashboard":{
|
||||
"dashboard_config": "True",
|
||||
"dashboard_host": "192.168.100.201",
|
||||
"dashboard_token": "eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ=="
|
||||
},
|
||||
"test_blog":{
|
||||
"blog_config": "True",
|
||||
"blog_host": "192.168.100.153",
|
||||
"blog_token": "60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742",
|
||||
"blog_authors": "Matthew",
|
||||
"blog_customer": "candela",
|
||||
"blog_user_push": "lanforge",
|
||||
"blog_password_push": "lanforge",
|
||||
"blog_flag": "--kpi_to_ghost"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
379
py-scripts/tools/ct_tests.json
Normal file
379
py-scripts/tools/ct_tests.json
Normal file
@@ -0,0 +1,379 @@
|
||||
{
|
||||
"ct_tests_001":{
|
||||
"Notes":[
|
||||
"The json is used to orchastrate the tests to be run on testbed ct_us_001",
|
||||
"This json file is used as an input to the ./lf_check.py file",
|
||||
"The variables that are all capitalized below are replaced with configuration",
|
||||
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
|
||||
"The replacement is loosely coupled so the upper and lower case convention is used",
|
||||
"to identify replaced strings in the lf_check.py code."
|
||||
]
|
||||
},
|
||||
"test_suites":{
|
||||
"suite_l3":{
|
||||
"test_l3_longevity":{"enabled":"TRUE","load_db":"skip","command":"test_l3_longevity.py","args":"--test_duration 15s --polling_interval 5s --upstream_port eth2 --radio 'radio==wiphy1,stations==4,ssid==asus11ax-5,ssid_pw==hello123,security==wpa2' --endp_type lf_udp --rates_are_totals --side_a_min_bps=20000 --side_b_min_bps=300000000"}
|
||||
},
|
||||
"suite_wc_dp_mt":{
|
||||
"CT-US-001_create_chamberview_dut_0":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview_dut.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
|
||||
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_mt7915e_sta19":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
|
||||
" --create_scenario ct-us-001-scenario ",
|
||||
" --raw_line \"profile_link 1.1 STA-AC 19 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
|
||||
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
|
||||
]
|
||||
},
|
||||
"CT-US-001_wifi_capacity_mt7915e":{
|
||||
"enabled":"TRUE",
|
||||
"timeout":"600",
|
||||
"load_db":"skip",
|
||||
"command":"lf_wifi_capacity_test.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
|
||||
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
|
||||
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e'",
|
||||
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
|
||||
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_mt7915e_sta1":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
|
||||
" --create_scenario ct-us-001-scenario ",
|
||||
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
|
||||
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
|
||||
]
|
||||
},
|
||||
"CT-US-001_dataplane_ATH10K_mt7915e_sta1":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"lf_dataplane_test.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
|
||||
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan7",
|
||||
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
|
||||
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
|
||||
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e' ",
|
||||
" --test_rig TEST_RIG"
|
||||
]
|
||||
},
|
||||
"CT-US-001_QA":{
|
||||
"enabled":"TRUE",
|
||||
"timeout":"600",
|
||||
"load_db":"skip",
|
||||
"command":"./tools/lf_qa.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --path REPORT_PATH --store --png --database ./tools/qa_test_db"
|
||||
]
|
||||
}
|
||||
|
||||
},
|
||||
"suite_wc_dp_short":{
|
||||
"CT-US-001_create_chamberview_dut_for_ATH10K":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview_dut.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
|
||||
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_ATH10K(9984)_sta50":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
|
||||
" --create_scenario ct-us-001-scenario ",
|
||||
" --raw_line \"profile_link 1.1 STA-AC 50 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
|
||||
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
|
||||
]
|
||||
},
|
||||
"CT-US-001_wifi_capacity_ATH10K(9984)":{
|
||||
"enabled":"TRUE",
|
||||
"timeout":"600",
|
||||
"load_db":"skip",
|
||||
"command":"lf_wifi_capacity_test.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
|
||||
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
|
||||
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
|
||||
" --test_rig TEST_RIG "
|
||||
]
|
||||
},
|
||||
"CT-US-001_QA":{
|
||||
"enabled":"TRUE",
|
||||
"timeout":"600",
|
||||
"load_db":"skip",
|
||||
"command":"./tools/lf_qa.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --path REPORT_PATH --store --png --database ./tools/qa_test_db"
|
||||
]
|
||||
}
|
||||
},
|
||||
"suite_wc_dp":{
|
||||
"CT-US-001_create_chamberview_dut_for_ATH10K":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview_dut.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
|
||||
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_ATH10K(9984)_sta50":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
|
||||
" --create_scenario ct-us-001-scenario ",
|
||||
" --raw_line \"profile_link 1.1 STA-AC 50 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
|
||||
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
|
||||
]
|
||||
},
|
||||
"CT-US-001_wifi_capacity_ATH10K(9984)":{
|
||||
"enabled":"TRUE",
|
||||
"timeout":"600",
|
||||
"load_db":"skip",
|
||||
"command":"lf_wifi_capacity_test.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
|
||||
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
|
||||
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
|
||||
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
|
||||
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_ATH10K(9984)_sta1":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
|
||||
" --create_scenario ct-us-001-scenario ",
|
||||
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
|
||||
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
|
||||
]
|
||||
},
|
||||
"CT-US-001_dataplane_ATH10K(9984)_sta1":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"lf_dataplane_test.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
|
||||
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan1",
|
||||
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
|
||||
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
|
||||
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)' ",
|
||||
" --test_rig TEST_RIG"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_dut_for_AX210":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview_dut.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
"--lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
|
||||
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_wiphy3_AX210_sta1":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
|
||||
" --create_scenario ct-us-001-scenario ",
|
||||
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy3,AUTO -1 NA\" ",
|
||||
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\" "
|
||||
]
|
||||
},
|
||||
"CT-US-001_wifi_capacity_wiphy3_AX210_sta1":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"lf_wifi_capacity_test.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
|
||||
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
|
||||
" --pull_report --local_lf_report_dir REPORT_PATH --stations 1.1.wlan3 --test_tag 'AX210'",
|
||||
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
|
||||
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
|
||||
]
|
||||
},
|
||||
"CT-US-001_dataplane_wiphy3_AX210_sta1":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"lf_dataplane_test.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
|
||||
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan3",
|
||||
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
|
||||
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
|
||||
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'AX210'",
|
||||
" --test_rig TEST_RIG"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_dut_for_mt7915e":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview_dut.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
|
||||
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_mt7915e_sta19":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
|
||||
" --create_scenario ct-us-001-scenario ",
|
||||
" --raw_line \"profile_link 1.1 STA-AC 19 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
|
||||
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
|
||||
]
|
||||
},
|
||||
"CT-US-001_wifi_capacity_mt7915e":{
|
||||
"enabled":"TRUE",
|
||||
"timeout":"600",
|
||||
"load_db":"skip",
|
||||
"command":"lf_wifi_capacity_test.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
|
||||
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
|
||||
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e'",
|
||||
" --test_rig TEST_RIG"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_mt7915e_sta1":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
|
||||
" --create_scenario ct-us-001-scenario ",
|
||||
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy7,AUTO -1 NA\" ",
|
||||
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
|
||||
]
|
||||
},
|
||||
"CT-US-001_dataplane_ATH10K_mt7915e_sta1":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"lf_dataplane_test.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-dpt",
|
||||
" --config_name test_con --upstream 1.1.eth2 --dut asus_5g --duration 30s --station 1.1.wlan7",
|
||||
" --download_speed 85% --upload_speed 0 --raw_line 'pkts: 60;88;120;256;512;1024;MTU' ",
|
||||
" --raw_line 'directions: DUT Transmit' --raw_line 'traffic_types: UDP' --raw_line 'bandw_options: 20'",
|
||||
" --raw_line 'spatial_streams: 1' --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'mt7915e' ",
|
||||
" --test_rig TEST_RIG"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_dut_2":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview_dut.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
|
||||
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
|
||||
]},
|
||||
"CT-US-001_create_chamberview_ap":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
|
||||
" --create_scenario ucentral-scenario ",
|
||||
" --raw_line \"profile_link 1.1 STA-AC 64 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
|
||||
" --raw_line \"profile_link 1.1 STA-AC 1 'DUT: DUT_NAME Radio-1' NA wiphy4,AUTO -1 NA\" ",
|
||||
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA \" "
|
||||
]
|
||||
},
|
||||
"CT-US-001_lf_ap_auto_test": {
|
||||
"enabled": "TRUE",
|
||||
"command": "lf_ap_auto_test.py",
|
||||
"timeout":"1200",
|
||||
"args": "",
|
||||
"args_list":[
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge",
|
||||
" --instance_name ap-auto-instance --config_name test_con --upstream UPSTREAM_PORT",
|
||||
" --dut5_0 'DUT_NAME lanforge DUT_BSSID_5G (1)' --dut2_0 'DUT_NAME lanforge DUT_BSSID_5G (1)'",
|
||||
" --max_stations_2 32 --max_stations_5 32 --max_stations_dual 100 --radio2 1.1.wiphy1",
|
||||
" --radio5 1.1.wiphy2 --set 'Basic Client Connectivity' 1",
|
||||
" --set 'Multi Band Performance' 0 --set 'Stability' 0 --set 'Multi-Station Throughput vs Pkt Size' 0,",
|
||||
" --set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Band-Steering' 0 --set 'Skip 2.4 Ghz Tests' 1",
|
||||
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
|
||||
" --test_rig TEST_RIG"
|
||||
]
|
||||
},
|
||||
"CT-US-001_QA":{
|
||||
"enabled":"TRUE",
|
||||
"timeout":"600",
|
||||
"load_db":"skip",
|
||||
"command":"./tools/lf_qa.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --path REPORT_PATH --store --png --database ./tools/qa_test_db"
|
||||
]
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -281,7 +281,66 @@
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
|
||||
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
|
||||
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
|
||||
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG"
|
||||
" --test_rig TEST_RIG "
|
||||
]
|
||||
},
|
||||
"CT-US-001_QA":{
|
||||
"enabled":"TRUE",
|
||||
"timeout":"600",
|
||||
"load_db":"skip",
|
||||
"command":"./tools/lf_qa.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --path REPORT_PATH --store --png --database ./tools/qa_test_db"
|
||||
]
|
||||
}
|
||||
},
|
||||
"suite_wc_dp_short_igg":{
|
||||
"CT-US-001_create_chamberview_dut_for_ATH10K":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview_dut.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --dut_name DUT_NAME",
|
||||
" --ssid 'ssid_idx=0 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --ssid 'ssid_idx=1 ssid=asus11ax-5 security=WPA2 password=hello123 bssid=DUT_BSSID_5G'",
|
||||
" --sw_version DUT_SW --hw_version DUT_HW --serial_num DUT_SERIAL --model_num DUT_NAME"
|
||||
]
|
||||
},
|
||||
"CT-US-001_create_chamberview_ATH10K(9984)_sta50":{
|
||||
"enabled":"TRUE",
|
||||
"load_db":"skip",
|
||||
"command":"create_chamberview.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --lfmgr LF_MGR_IP --port LF_MGR_PORT --delete_scenario",
|
||||
" --create_scenario ct-us-001-scenario ",
|
||||
" --raw_line \"profile_link 1.1 STA-AC 50 'DUT: DUT_NAME Radio-1' NA wiphy1,AUTO -1 NA\" ",
|
||||
" --raw_line \"profile_link 1.1 upstream-dhcp 1 NA NA UPSTREAM_PORT,AUTO -1 NA\""
|
||||
]
|
||||
},
|
||||
"CT-US-001_wifi_capacity_ATH10K(9984)":{
|
||||
"enabled":"TRUE",
|
||||
"timeout":"600",
|
||||
"load_db":"skip",
|
||||
"command":"lf_wifi_capacity_test.py",
|
||||
"args":"",
|
||||
"args_list":[
|
||||
" --mgr LF_MGR_IP --port LF_MGR_PORT --lf_user lanforge --lf_password lanforge --instance_name cicd-wct",
|
||||
" --upstream 1.1.eth2 --batch_size 1,5,25 --loop_iter 1 --protocol UDP-IPv4 --duration 6000",
|
||||
" --pull_report --local_lf_report_dir REPORT_PATH --test_tag 'ATH10K(9984)'",
|
||||
" --test_rig TEST_RIG --influx_host DATABASE_HOST --influx_port DATABASE_PORT --influx_org DATABASE_ORG",
|
||||
" --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET --influx_tag DATABASE_TAG --set DUT_SET_NAME"
|
||||
]
|
||||
},
|
||||
"GHOST":{"enabled":"TRUE","load_db":"skip","command":"ghost_profile.py","args":"",
|
||||
"args_list":[
|
||||
" --ghost_token BLOG_TOKEN --ghost_host BLOG_HOST --authors BLOG_AUTHORS --customer BLOG_CUSTOMER",
|
||||
" --user_push BLOG_USER_PUSH --password BLOG_PASSWORD_PUSH BLOG_FLAG --grafana_token DASHBOARD_TOKEN",
|
||||
" --grafana_host DASHBOARD_HOST --grafana_bucket DATABASE_BUCKET --parent_folder REPORT_PATH",
|
||||
" --influx_host DATABASE_HOST --influx_org DATABASE_ORG --influx_token=DATABASE_TOKEN --influx_bucket DATABASE_BUCKET",
|
||||
" --influx_tag DATABASE_TAG "
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
80
py-scripts/tools/ct_us_001_rig.json
Normal file
80
py-scripts/tools/ct_us_001_rig.json
Normal file
@@ -0,0 +1,80 @@
|
||||
{
|
||||
"ct_us_001":{
|
||||
"Notes":[
|
||||
"The json is used to orchastrate the tests to be run on testbed ct_us_001",
|
||||
"This json file is used as an input to the ./lf_check.py file",
|
||||
"The variables that are all capitalized below are replaced with configuration",
|
||||
"from the json file. so LF_MGR_IP in the test below is replaced by the json lf_mgr_ip",
|
||||
"The replacement is loosely coupled so the upper and lower case convention is used",
|
||||
"to identify replaced strings in the lf_check.py code."
|
||||
]
|
||||
},
|
||||
"test_parameters":{
|
||||
"test_bed": "CT-US-001",
|
||||
"lf_mgr_ip": "192.168.100.116",
|
||||
"lf_mgr_port": "8080",
|
||||
"dut_name": "ASUSRT-AX88U",
|
||||
"dut_bssid_2g": "3c:7c:3f:55:4d:60",
|
||||
"dut_bssid_5g": "3c:7c:3f:55:4d:64",
|
||||
"dut_sw": "3.0.0.4.386_44266",
|
||||
"test_timeout": 300,
|
||||
"load_blank_db": false,
|
||||
"load_factory_default_db": true,
|
||||
"load_custom_db": false,
|
||||
"custom_db": "DFLT_ETH1_GEN",
|
||||
"email_list_production": "konikofi@candelatech.com,greearb@candelatech.com,logan.lipke@candelatech.com,dipti.dhond@candelatech.com,chuck.rekiere@candelatech.com,matthew@candelatech.com,iain.davidson@candelatech.com,jreynolds@candelatech.com",
|
||||
"host_ip_production": "192.168.100.201",
|
||||
"email_list_test": "chuck.rekiere@candelatech.com",
|
||||
"host_ip_test": "192.168.100.201",
|
||||
"email_title_txt": "Lanforge QA Testing CT-US-001",
|
||||
"email_txt": "Lanforge QA Testing CT-US-001 "
|
||||
},
|
||||
"test_network":{
|
||||
"http_test_ip": "10.40.0.10",
|
||||
"ftp_test_ip": "10.40.0.10",
|
||||
"test_ip": "192.168.0.104"
|
||||
},
|
||||
"test_generic":{
|
||||
"radio_used": "wiphy1",
|
||||
"ssid_used": "asus11ax-5",
|
||||
"ssid_pw_used": "hello123",
|
||||
"security_used": "wpa2",
|
||||
"num_sta": 1,
|
||||
"col_names": "name,tx_byptes,rx_bytes,dropped",
|
||||
"upstream_port": "eth2"
|
||||
},
|
||||
"test_database":{
|
||||
"database_config": "True",
|
||||
"database_host": "192.168.100.201",
|
||||
"database_port": "8086",
|
||||
"database_token": "-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ==",
|
||||
"database_org": "Candela",
|
||||
"database_bucket": "lanforge_qa_testing",
|
||||
"dut_set_name": "DUT_NAME ASUSRT-AX88U",
|
||||
"database_tag": "testbed CT-US-001",
|
||||
"test_rig": "CT-US-001"
|
||||
},
|
||||
"test_dashboard":{
|
||||
"dashboard_config": "True",
|
||||
"dashboard_host": "192.168.100.201",
|
||||
"dashboard_token": "eyJrIjoiS1NGRU8xcTVBQW9lUmlTM2dNRFpqNjFqV05MZkM0dzciLCJuIjoibWF0dGhldyIsImlkIjoxfQ=="
|
||||
},
|
||||
"test_blog":{
|
||||
"blog_config": "True",
|
||||
"blog_host": "192.168.100.153",
|
||||
"blog_token": "60df4b0175953f400cd30650:d50e1fabf9a9b5d3d30fe97bc3bf04971d05496a89e92a169a0d72357c81f742",
|
||||
"blog_authors": "Matthew",
|
||||
"blog_customer": "candela",
|
||||
"blog_user_push": "lanforge",
|
||||
"blog_password_push": "lanforge",
|
||||
"blog_flag": "--kpi_to_ghost"
|
||||
},
|
||||
"radio_dict":{
|
||||
"RADIO_0_CFG":{"KEY":"RADIO_0_CFG","RADIO":"wiphy0","STATIONS":"1","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"},
|
||||
"RADIO_1_CFG":{"KEY":"RADIO_1_CFG","RADIO":"wiphy1","STATIONS":"1","SSID":"asus11ax-5","PASSWD":"hello123","SECURITY":"wpa2"}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@ lf_check.py
|
||||
|
||||
PURPOSE:
|
||||
lf_check.py will tests based on .ini file or .json file.
|
||||
The config file may be copied from lf_check_config_template.ini, or can be generated.
|
||||
The config file name can be passed in as a configuraiton parameter.
|
||||
The json file may be copied from lf_check.json and updated. Currently all the parameters are needed to be set to a value
|
||||
|
||||
@@ -14,11 +13,8 @@ The --production flag determine the email list for results
|
||||
|
||||
EXAMPLE:
|
||||
lf_check.py # this will use the defaults
|
||||
lf_check.py --ini <unique ini file> --test_suite <suite to use in .ini file>
|
||||
lf_check.py --ini <unique ini file> --test_suite <suite to use in .ini file> --production
|
||||
|
||||
lf_check.py --use_json --json <unique json file> --test_suite
|
||||
lf_check.py --use_json --json <unique json file> --production
|
||||
lf_check.py --json <unique json file> --test_suite
|
||||
lf_check.py --json <unique json file> --production
|
||||
|
||||
NOTES:
|
||||
Before using lf_check.py
|
||||
@@ -61,7 +57,6 @@ Starting LANforge:
|
||||
|
||||
'''
|
||||
import datetime
|
||||
import pprint
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] != 3:
|
||||
@@ -75,10 +70,8 @@ import time
|
||||
from time import sleep
|
||||
import argparse
|
||||
import json
|
||||
import configparser
|
||||
import subprocess
|
||||
import csv
|
||||
import shutil
|
||||
import shlex
|
||||
import paramiko
|
||||
import pandas as pd
|
||||
@@ -100,20 +93,20 @@ FORMAT = '%(asctime)s %(name)s %(levelname)s: %(message)s'
|
||||
# lf_check class contains verificaiton configuration and ocastrates the testing.
|
||||
class lf_check():
|
||||
def __init__(self,
|
||||
_use_json,
|
||||
_config_ini,
|
||||
_json_data,
|
||||
_test_suite,
|
||||
_production,
|
||||
_csv_results,
|
||||
_outfile,
|
||||
_outfile_name,
|
||||
_report_path,
|
||||
_log_path):
|
||||
self.use_json = _use_json
|
||||
self.json_data = _json_data
|
||||
self.config_ini = _config_ini
|
||||
_json_rig,
|
||||
_json_test,
|
||||
_test_suite,
|
||||
_json_igg,
|
||||
_production,
|
||||
_csv_results,
|
||||
_outfile,
|
||||
_outfile_name,
|
||||
_report_path,
|
||||
_log_path):
|
||||
self.json_rig = _json_rig
|
||||
self.json_test = _json_test
|
||||
self.test_suite = _test_suite
|
||||
self.json_igg = _json_igg
|
||||
self.production_run = _production
|
||||
self.report_path = _report_path
|
||||
self.log_path = _log_path
|
||||
@@ -183,6 +176,12 @@ class lf_check():
|
||||
|
||||
# DUT , Test rig must match testbed
|
||||
self.test_rig = "CT-US-NA"
|
||||
self.test_rig_json = ""
|
||||
|
||||
# QA report
|
||||
self.qa_report_html = "NA"
|
||||
self.database_qa = ""
|
||||
self.table_qa = ""
|
||||
|
||||
# database configuration # database
|
||||
self.database_json = ""
|
||||
@@ -282,6 +281,11 @@ class lf_check():
|
||||
time.sleep(1)
|
||||
return lanforge_gui_version
|
||||
|
||||
def get_radio_status(self):
|
||||
radio_status = self.json_get("/radiostatus/all")
|
||||
print("radio status {radio_status}".format(radio_status=radio_status))
|
||||
|
||||
|
||||
# NOT complete : will send the email results
|
||||
def send_results_email(self, report_file=None):
|
||||
if (report_file is None):
|
||||
@@ -290,6 +294,9 @@ class lf_check():
|
||||
report_url = report_file.replace('/home/lanforge/', '')
|
||||
if report_url.startswith('/'):
|
||||
report_url = report_url[1:]
|
||||
qa_url = self.qa_report_html.replace('home/lanforge','')
|
||||
if qa_url.startswith('/'):
|
||||
qa_url = qa_url[1:]
|
||||
# following recommendation
|
||||
# NOTE: https://stackoverflow.com/questions/24196932/how-can-i-get-the-ip-address-from-nic-in-python
|
||||
# Mail
|
||||
@@ -297,22 +304,23 @@ class lf_check():
|
||||
# command = 'echo "$HOSTNAME mail system works!" | mail -s "Test: $HOSTNAME $(date)" chuck.rekiere@candelatech.com'
|
||||
hostname = socket.gethostname()
|
||||
ip = socket.gethostbyname(hostname)
|
||||
|
||||
if (self.email_txt != ""):
|
||||
message_txt = """{email_txt} lanforge target {lf_mgr_ip}
|
||||
Results from {hostname}:
|
||||
http://{ip}/{report}
|
||||
Blog:
|
||||
http://{blog}:2368
|
||||
NOTE: for now to see stdout and stderr remove /home/lanforge from path.
|
||||
QA Report Dashboard:
|
||||
http://{ip_qa}/{qa_url}
|
||||
NOTE: Diagrams are links in dashboard
|
||||
""".format(hostname=hostname, ip=ip, report=report_url, email_txt=self.email_txt, lf_mgr_ip=self.lf_mgr_ip,
|
||||
blog=self.blog_host)
|
||||
ip_qa=ip,qa_url=qa_url)
|
||||
|
||||
else:
|
||||
message_txt = """Results from {hostname}:
|
||||
http://{ip}/{report}
|
||||
Blog:
|
||||
blog: http://{blog}:2368
|
||||
""".format(hostname=hostname, ip=ip, report=report_url, blog=self.blog_host)
|
||||
QA Report Dashboard:
|
||||
QA Report: http://{ip_qa}/{qa_url}
|
||||
""".format(hostname=hostname, ip=ip,report=report_url,ip_qa=ip,qa_url=qa_url)
|
||||
|
||||
if (self.email_title_txt != ""):
|
||||
mail_subject = "{} [{hostname}] {date}".format(self.email_title_txt, hostname=hostname,
|
||||
@@ -383,406 +391,330 @@ blog: http://{blog}:2368
|
||||
<br>
|
||||
"""
|
||||
|
||||
def read_config(self):
|
||||
if self.use_json:
|
||||
self.read_config_json()
|
||||
else:
|
||||
self.read_config_ini()
|
||||
|
||||
# there is probably a more efficient way to do this in python
|
||||
# Keeping it obvious for now, may be refactored later
|
||||
def read_config_json(self):
|
||||
# self.logger.info("read_config_json_contents {}".format(self.json_data))
|
||||
if "test_parameters" in self.json_data:
|
||||
# Top level for reading the test rig configuration
|
||||
def read_json_rig(self):
|
||||
# self.logger.info("read_config_json_contents {}".format(self.json_rig))
|
||||
if "test_parameters" in self.json_rig:
|
||||
self.logger.info("json: read test_parameters")
|
||||
# self.logger.info("test_parameters {}".format(self.json_data["test_parameters"]))
|
||||
# self.logger.info("test_parameters {}".format(self.json_rig["test_parameters"]))
|
||||
self.read_test_parameters()
|
||||
else:
|
||||
self.logger.info("EXITING test_parameters not in json {}".format(self.json_data))
|
||||
self.logger.info("EXITING test_parameters not in json {}".format(self.json_rig))
|
||||
exit(1)
|
||||
|
||||
if "test_network" in self.json_data:
|
||||
if "test_network" in self.json_rig:
|
||||
self.logger.info("json: read test_network")
|
||||
# self.logger.info("test_network {}".format(self.json_data["test_network"]))
|
||||
# self.logger.info("test_network {}".format(self.json_rig["test_network"]))
|
||||
self.read_test_network()
|
||||
else:
|
||||
self.logger.info("EXITING test_network not in json {}".format(self.json_data))
|
||||
self.logger.info("EXITING test_network not in json {}".format(self.json_rig))
|
||||
exit(1)
|
||||
|
||||
if "test_database" in self.json_data:
|
||||
if "test_generic" in self.json_rig:
|
||||
self.logger.info("json: read test_generic")
|
||||
# self.logger.info("test_generic {}".format(self.json_rig["test_generic"]))
|
||||
self.read_test_generic()
|
||||
else:
|
||||
self.logger.info("EXITING test_generic not in json {}".format(self.json_rig))
|
||||
exit(1)
|
||||
|
||||
if "radio_dict" in self.json_rig:
|
||||
self.logger.info("json: read radio_dict")
|
||||
# self.logger.info("radio_dict {}".format(self.json_rig["radio_dict"]))
|
||||
self.radio_dict = self.json_rig["radio_dict"]
|
||||
self.logger.info("self.radio_dict {}".format(self.radio_dict))
|
||||
else:
|
||||
self.logger.info("EXITING radio_dict not in json {}".format(self.json_rig))
|
||||
exit(1)
|
||||
# Top Level for reading the tests to run
|
||||
def read_json_test(self):
|
||||
if "test_suites" in self.json_test:
|
||||
self.logger.info("json: read test_suites looking for: {}".format(self.test_suite))
|
||||
# self.logger.info("test_suites {}".format(self.json_test["test_suites"]))
|
||||
if self.test_suite in self.json_test["test_suites"]:
|
||||
self.test_dict = self.json_test["test_suites"][self.test_suite]
|
||||
# self.logger.info("self.test_dict {}".format(self.test_dict))
|
||||
else:
|
||||
self.logger.info("EXITING test_suite {} Not Present in json test_suites: {}".format(self.test_suite,
|
||||
self.json_test[
|
||||
"test_suites"]))
|
||||
exit(1)
|
||||
else:
|
||||
self.logger.info("EXITING test_suites not in json {}".format(self.json_test))
|
||||
exit(1)
|
||||
|
||||
# Top Level for Influx, Grafana, Ghost configuration
|
||||
def read_json_igg(self):
|
||||
if "test_database" in self.json_igg:
|
||||
self.logger.info("json: read test_database")
|
||||
# self.logger.info("test_database {}".format(self.json_data["test_database"]))
|
||||
# self.logger.info("test_database {}".format(self.json_rig["test_database"]))
|
||||
self.read_test_database()
|
||||
else:
|
||||
self.logger.info("NOTE: test_database not found in json")
|
||||
|
||||
if "test_dashboard" in self.json_data:
|
||||
if "test_dashboard" in self.json_igg:
|
||||
self.logger.info("json: read test_dashboard")
|
||||
# self.logger.info("test_dashboard {}".format(self.json_data["test_dashboard"]))
|
||||
# self.logger.info("test_dashboard {}".format(self.json_rig["test_dashboard"]))
|
||||
self.read_test_dashboard()
|
||||
else:
|
||||
self.logger.info("NOTE: test_dashboard not found in json")
|
||||
|
||||
if "test_blog" in self.json_data:
|
||||
if "test_blog" in self.json_igg:
|
||||
self.logger.info("json: read test_blog")
|
||||
# self.logger.info("test_blog {}".format(self.json_data["test_blog"]))
|
||||
# self.logger.info("test_blog {}".format(self.json_rig["test_blog"]))
|
||||
self.read_test_blog()
|
||||
else:
|
||||
self.logger.info("NOTE: test_blog not found in json")
|
||||
|
||||
if "test_generic" in self.json_data:
|
||||
self.logger.info("json: read test_generic")
|
||||
# self.logger.info("test_generic {}".format(self.json_data["test_generic"]))
|
||||
self.read_test_generic()
|
||||
else:
|
||||
self.logger.info("EXITING test_generic not in json {}".format(self.json_data))
|
||||
exit(1)
|
||||
|
||||
if "radio_dict" in self.json_data:
|
||||
self.logger.info("json: read radio_dict")
|
||||
# self.logger.info("radio_dict {}".format(self.json_data["radio_dict"]))
|
||||
self.radio_dict = self.json_data["radio_dict"]
|
||||
self.logger.info("self.radio_dict {}".format(self.radio_dict))
|
||||
else:
|
||||
self.logger.info("EXITING radio_dict not in json {}".format(self.json_data))
|
||||
exit(1)
|
||||
|
||||
if "test_suites" in self.json_data:
|
||||
self.logger.info("json: read test_suites looking for: {}".format(self.test_suite))
|
||||
# self.logger.info("test_suites {}".format(self.json_data["test_suites"]))
|
||||
if self.test_suite in self.json_data["test_suites"]:
|
||||
self.test_dict = self.json_data["test_suites"][self.test_suite]
|
||||
# self.logger.info("self.test_dict {}".format(self.test_dict))
|
||||
else:
|
||||
self.logger.info("EXITING test_suite {} Not Present in json test_suites: {}".format(self.test_suite,
|
||||
self.json_data[
|
||||
"test_suites"]))
|
||||
exit(1)
|
||||
else:
|
||||
self.logger.info("EXITING test_suites not in json {}".format(self.json_data))
|
||||
exit(1)
|
||||
|
||||
def read_test_parameters(self):
|
||||
if "test_timeout" in self.json_data["test_parameters"]:
|
||||
self.test_timeout = self.json_data["test_parameters"]["test_timeout"]
|
||||
if "test_timeout" in self.json_rig["test_parameters"]:
|
||||
self.test_timeout = self.json_rig["test_parameters"]["test_timeout"]
|
||||
self.test_timeout_default = self.test_timeout
|
||||
else:
|
||||
self.logger.info("test_timeout not in test_parameters json")
|
||||
exit(1)
|
||||
if "load_blank_db" in self.json_data["test_parameters"]:
|
||||
self.load_blank_db = self.json_data["test_parameters"]["load_blank_db"]
|
||||
if "load_blank_db" in self.json_rig["test_parameters"]:
|
||||
self.load_blank_db = self.json_rig["test_parameters"]["load_blank_db"]
|
||||
else:
|
||||
self.logger.info("load_blank_db not in test_parameters json")
|
||||
exit(1)
|
||||
if "load_factory_default_db" in self.json_data["test_parameters"]:
|
||||
self.load_factory_default_db = self.json_data["test_parameters"]["load_factory_default_db"]
|
||||
if "load_factory_default_db" in self.json_rig["test_parameters"]:
|
||||
self.load_factory_default_db = self.json_rig["test_parameters"]["load_factory_default_db"]
|
||||
else:
|
||||
self.logger.info("load_factory_default_db not in test_parameters json")
|
||||
exit(1)
|
||||
if "load_custom_db" in self.json_data["test_parameters"]:
|
||||
self.load_custom_db = self.json_data["test_parameters"]["load_custom_db"]
|
||||
if "load_custom_db" in self.json_rig["test_parameters"]:
|
||||
self.load_custom_db = self.json_rig["test_parameters"]["load_custom_db"]
|
||||
else:
|
||||
self.logger.info("load_custom_db not in test_parameters json")
|
||||
exit(1)
|
||||
if "custom_db" in self.json_data["test_parameters"]:
|
||||
self.custom_db = self.json_data["test_parameters"]["custom_db"]
|
||||
if "custom_db" in self.json_rig["test_parameters"]:
|
||||
self.custom_db = self.json_rig["test_parameters"]["custom_db"]
|
||||
else:
|
||||
self.logger.info("custom_db not in test_parameters json, if not using custom_db just put in a name")
|
||||
exit(1)
|
||||
if "email_list_production" in self.json_data["test_parameters"]:
|
||||
self.email_list_production = self.json_data["test_parameters"]["email_list_production"]
|
||||
if "email_list_production" in self.json_rig["test_parameters"]:
|
||||
self.email_list_production = self.json_rig["test_parameters"]["email_list_production"]
|
||||
else:
|
||||
self.logger.info("email_list_production not in test_parameters json")
|
||||
exit(1)
|
||||
if "host_ip_production" in self.json_data["test_parameters"]:
|
||||
self.host_ip_production = self.json_data["test_parameters"]["host_ip_production"]
|
||||
if "host_ip_production" in self.json_rig["test_parameters"]:
|
||||
self.host_ip_production = self.json_rig["test_parameters"]["host_ip_production"]
|
||||
else:
|
||||
self.logger.info("host_ip_production not in test_parameters json")
|
||||
exit(1)
|
||||
if "email_list_test" in self.json_data["test_parameters"]:
|
||||
self.email_list_test = self.json_data["test_parameters"]["email_list_test"]
|
||||
if "email_list_test" in self.json_rig["test_parameters"]:
|
||||
self.email_list_test = self.json_rig["test_parameters"]["email_list_test"]
|
||||
print(self.email_list_test)
|
||||
else:
|
||||
self.logger.info("email_list_test not in test_parameters json")
|
||||
exit(1)
|
||||
if "host_ip_test" in self.json_data["test_parameters"]:
|
||||
self.host_ip_test = self.json_data["test_parameters"]["host_ip_test"]
|
||||
if "host_ip_test" in self.json_rig["test_parameters"]:
|
||||
self.host_ip_test = self.json_rig["test_parameters"]["host_ip_test"]
|
||||
else:
|
||||
self.logger.info("host_ip_test not in test_parameters json")
|
||||
exit(1)
|
||||
if "email_title_txt" in self.json_data["test_parameters"]:
|
||||
self.email_title_txt = self.json_data["test_parameters"]["email_title_txt"]
|
||||
if "email_title_txt" in self.json_rig["test_parameters"]:
|
||||
self.email_title_txt = self.json_rig["test_parameters"]["email_title_txt"]
|
||||
else:
|
||||
self.logger.info("email_title_txt not in test_parameters json")
|
||||
if "email_txt" in self.json_data["test_parameters"]:
|
||||
self.email_txt = self.json_data["test_parameters"]["email_txt"]
|
||||
if "email_txt" in self.json_rig["test_parameters"]:
|
||||
self.email_txt = self.json_rig["test_parameters"]["email_txt"]
|
||||
else:
|
||||
self.logger.info("email_txt not in test_parameters json")
|
||||
if "lf_mgr_ip" in self.json_data["test_parameters"]:
|
||||
self.lf_mgr_ip = self.json_data["test_parameters"]["lf_mgr_ip"]
|
||||
if "lf_mgr_ip" in self.json_rig["test_parameters"]:
|
||||
self.lf_mgr_ip = self.json_rig["test_parameters"]["lf_mgr_ip"]
|
||||
else:
|
||||
self.logger.info("lf_mgr_ip not in test_parameters json")
|
||||
if "lf_mgr_port" in self.json_data["test_parameters"]:
|
||||
self.lf_mgr_port = self.json_data["test_parameters"]["lf_mgr_port"]
|
||||
if "lf_mgr_port" in self.json_rig["test_parameters"]:
|
||||
self.lf_mgr_port = self.json_rig["test_parameters"]["lf_mgr_port"]
|
||||
else:
|
||||
self.logger.info("lf_mgr_port not in test_parameters json")
|
||||
if "dut_name" in self.json_data["test_parameters"]:
|
||||
self.dut_name = self.json_data["test_parameters"]["dut_name"]
|
||||
if "dut_name" in self.json_rig["test_parameters"]:
|
||||
self.dut_name = self.json_rig["test_parameters"]["dut_name"]
|
||||
else:
|
||||
self.logger.info("dut_name not in test_parameters json")
|
||||
if "dut_hw" in self.json_data["test_parameters"]:
|
||||
self.dut_hw = self.json_data["test_parameters"]["dut_hw"]
|
||||
if "dut_hw" in self.json_rig["test_parameters"]:
|
||||
self.dut_hw = self.json_rig["test_parameters"]["dut_hw"]
|
||||
else:
|
||||
self.logger.info("dut_hw not in test_parameters json")
|
||||
if "dut_sw" in self.json_data["test_parameters"]:
|
||||
self.dut_sw = self.json_data["test_parameters"]["dut_sw"]
|
||||
if "dut_sw" in self.json_rig["test_parameters"]:
|
||||
self.dut_sw = self.json_rig["test_parameters"]["dut_sw"]
|
||||
else:
|
||||
self.logger.info("dut_sw not in test_parameters json")
|
||||
if "dut_model" in self.json_data["test_parameters"]:
|
||||
self.dut_model = self.json_data["test_parameters"]["dut_model"]
|
||||
if "dut_model" in self.json_rig["test_parameters"]:
|
||||
self.dut_model = self.json_rig["test_parameters"]["dut_model"]
|
||||
else:
|
||||
self.logger.info("dut_model not in test_parameters json")
|
||||
if "dut_serial" in self.json_data["test_parameters"]:
|
||||
self.dut_serial = self.json_data["test_parameters"]["dut_serial"]
|
||||
if "dut_serial" in self.json_rig["test_parameters"]:
|
||||
self.dut_serial = self.json_rig["test_parameters"]["dut_serial"]
|
||||
else:
|
||||
self.logger.info("dut_serial not in test_parameters json")
|
||||
if "dut_bssid_2g" in self.json_data["test_parameters"]:
|
||||
self.dut_bssid_2g = self.json_data["test_parameters"]["dut_bssid_2g"]
|
||||
if "dut_bssid_2g" in self.json_rig["test_parameters"]:
|
||||
self.dut_bssid_2g = self.json_rig["test_parameters"]["dut_bssid_2g"]
|
||||
else:
|
||||
self.logger.info("dut_bssid_2G not in test_parameters json")
|
||||
if "dut_bssid_5g" in self.json_data["test_parameters"]:
|
||||
self.dut_bssid_5g = self.json_data["test_parameters"]["dut_bssid_5g"]
|
||||
if "dut_bssid_5g" in self.json_rig["test_parameters"]:
|
||||
self.dut_bssid_5g = self.json_rig["test_parameters"]["dut_bssid_5g"]
|
||||
else:
|
||||
self.logger.info("dut_bssid_5g not in test_parameters json")
|
||||
if "dut_bssid_6g" in self.json_data["test_parameters"]:
|
||||
self.dut_bssid_6g = self.json_data["test_parameters"]["dut_bssid_6g"]
|
||||
if "dut_bssid_6g" in self.json_rig["test_parameters"]:
|
||||
self.dut_bssid_6g = self.json_rig["test_parameters"]["dut_bssid_6g"]
|
||||
else:
|
||||
self.logger.info("dut_bssid_6g not in test_parameters json")
|
||||
|
||||
def read_test_network(self):
|
||||
if "http_test_ip" in self.json_data["test_network"]:
|
||||
self.http_test_ip = self.json_data["test_network"]["http_test_ip"]
|
||||
if "http_test_ip" in self.json_rig["test_network"]:
|
||||
self.http_test_ip = self.json_rig["test_network"]["http_test_ip"]
|
||||
else:
|
||||
self.logger.info("http_test_ip not in test_network json")
|
||||
exit(1)
|
||||
if "ftp_test_ip" in self.json_data["test_network"]:
|
||||
self.ftp_test_ip = self.json_data["test_network"]["ftp_test_ip"]
|
||||
if "ftp_test_ip" in self.json_rig["test_network"]:
|
||||
self.ftp_test_ip = self.json_rig["test_network"]["ftp_test_ip"]
|
||||
else:
|
||||
self.logger.info("ftp_test_ip not in test_network json")
|
||||
exit(1)
|
||||
if "test_ip" in self.json_data["test_network"]:
|
||||
self.ftp_test_ip = self.json_data["test_network"]["test_ip"]
|
||||
if "test_ip" in self.json_rig["test_network"]:
|
||||
self.ftp_test_ip = self.json_rig["test_network"]["test_ip"]
|
||||
else:
|
||||
self.logger.info("test_ip not in test_network json")
|
||||
exit(1)
|
||||
|
||||
def read_test_database(self):
|
||||
if "database_config" in self.json_data["test_database"]:
|
||||
self.database_config = self.json_data["test_database"]["database_config"]
|
||||
if "database_config" in self.json_igg["test_database"]:
|
||||
self.database_config = self.json_igg["test_database"]["database_config"]
|
||||
else:
|
||||
self.logger.info("database_config not in test_database json")
|
||||
if "database_host" in self.json_data["test_database"]:
|
||||
self.database_host = self.json_data["test_database"]["database_host"]
|
||||
if "database_host" in self.json_igg["test_database"]:
|
||||
self.database_host = self.json_igg["test_database"]["database_host"]
|
||||
else:
|
||||
self.logger.info("database_host not in test_database json")
|
||||
if "database_port" in self.json_data["test_database"]:
|
||||
self.database_port = self.json_data["test_database"]["database_port"]
|
||||
if "database_port" in self.json_igg["test_database"]:
|
||||
self.database_port = self.json_igg["test_database"]["database_port"]
|
||||
else:
|
||||
self.logger.info("database_port not in test_database json")
|
||||
if "database_token" in self.json_data["test_database"]:
|
||||
self.database_token = self.json_data["test_database"]["database_token"]
|
||||
if "database_token" in self.json_igg["test_database"]:
|
||||
self.database_token = self.json_igg["test_database"]["database_token"]
|
||||
else:
|
||||
self.logger.info("database_token not in test_database json")
|
||||
if "database_org" in self.json_data["test_database"]:
|
||||
self.database_org = self.json_data["test_database"]["database_org"]
|
||||
if "database_org" in self.json_igg["test_database"]:
|
||||
self.database_org = self.json_igg["test_database"]["database_org"]
|
||||
else:
|
||||
self.logger.info("database_org not in test_database json")
|
||||
if "database_bucket" in self.json_data["test_database"]:
|
||||
self.database_bucket = self.json_data["test_database"]["database_bucket"]
|
||||
if "database_bucket" in self.json_igg["test_database"]:
|
||||
self.database_bucket = self.json_igg["test_database"]["database_bucket"]
|
||||
else:
|
||||
self.logger.info("database_bucket not in test_database json")
|
||||
if "database_tag" in self.json_data["test_database"]:
|
||||
self.database_tag = self.json_data["test_database"]["database_tag"]
|
||||
if "database_tag" in self.json_igg["test_database"]:
|
||||
self.database_tag = self.json_igg["test_database"]["database_tag"]
|
||||
else:
|
||||
self.logger.info("database_tag not in test_database json")
|
||||
if "test_rig" in self.json_data["test_database"]:
|
||||
self.test_rig = self.json_data["test_database"]["test_rig"]
|
||||
if "test_rig" in self.json_igg["test_database"]:
|
||||
self.test_rig = self.json_igg["test_database"]["test_rig"]
|
||||
else:
|
||||
self.logger.info("test_rig not in test_database json")
|
||||
if "dut_set_name" in self.json_data["test_database"]:
|
||||
self.dut_set_name = self.json_data["test_database"]["dut_set_name"]
|
||||
if "dut_set_name" in self.json_igg["test_database"]:
|
||||
self.dut_set_name = self.json_igg["test_database"]["dut_set_name"]
|
||||
else:
|
||||
self.logger.info("dut_set_name not in test_database json")
|
||||
|
||||
def read_test_dashboard(self):
|
||||
if "dashboard_config" in self.json_data["test_dashboard"]:
|
||||
self.dashboard_config = self.json_data["test_dashboard"]["dashboard_config"]
|
||||
if "dashboard_config" in self.json_igg["test_dashboard"]:
|
||||
self.dashboard_config = self.json_igg["test_dashboard"]["dashboard_config"]
|
||||
else:
|
||||
self.logger.info("dashboard_config not in test_dashboard json")
|
||||
|
||||
if "dashboard_host" in self.json_data["test_dashboard"]:
|
||||
self.dashboard_host = self.json_data["test_dashboard"]["dashboard_host"]
|
||||
if "dashboard_host" in self.json_igg["test_dashboard"]:
|
||||
self.dashboard_host = self.json_igg["test_dashboard"]["dashboard_host"]
|
||||
else:
|
||||
self.logger.info("dashboard_host not in test_dashboard json")
|
||||
|
||||
if "dashboard_token" in self.json_data["test_dashboard"]:
|
||||
self.dashboard_token = self.json_data["test_dashboard"]["dashboard_token"]
|
||||
if "dashboard_token" in self.json_igg["test_dashboard"]:
|
||||
self.dashboard_token = self.json_igg["test_dashboard"]["dashboard_token"]
|
||||
else:
|
||||
self.logger.info("dashboard_token not in test_dashboard json")
|
||||
|
||||
def read_test_blog(self):
|
||||
if "blog_config" in self.json_data["test_blog"]:
|
||||
self.blog_config = self.json_data["test_blog"]["blog_config"]
|
||||
if "blog_config" in self.json_igg["test_blog"]:
|
||||
self.blog_config = self.json_igg["test_blog"]["blog_config"]
|
||||
else:
|
||||
self.logger.info("blog_config not in test_blog json")
|
||||
|
||||
if "blog_host" in self.json_data["test_blog"]:
|
||||
self.blog_host = self.json_data["test_blog"]["blog_host"]
|
||||
if "blog_host" in self.json_igg["test_blog"]:
|
||||
self.blog_host = self.json_igg["test_blog"]["blog_host"]
|
||||
else:
|
||||
self.logger.info("blog_host not in test_blog json")
|
||||
|
||||
if "blog_token" in self.json_data["test_blog"]:
|
||||
self.blog_token = self.json_data["test_blog"]["blog_token"]
|
||||
if "blog_token" in self.json_igg["test_blog"]:
|
||||
self.blog_token = self.json_igg["test_blog"]["blog_token"]
|
||||
else:
|
||||
self.logger.info("blog_token not in test_blog json")
|
||||
|
||||
if "blog_authors" in self.json_data["test_blog"]:
|
||||
self.blog_authors = self.json_data["test_blog"]["blog_authors"]
|
||||
if "blog_authors" in self.json_igg["test_blog"]:
|
||||
self.blog_authors = self.json_igg["test_blog"]["blog_authors"]
|
||||
else:
|
||||
self.logger.info("blog_authors not in test_blog json")
|
||||
|
||||
if "blog_customer" in self.json_data["test_blog"]:
|
||||
self.blog_customer = self.json_data["test_blog"]["blog_customer"]
|
||||
if "blog_customer" in self.json_igg["test_blog"]:
|
||||
self.blog_customer = self.json_igg["test_blog"]["blog_customer"]
|
||||
else:
|
||||
self.logger.info("blog_customer not in test_blog json")
|
||||
|
||||
if "blog_user_push" in self.json_data["test_blog"]:
|
||||
self.blog_user_push = self.json_data["test_blog"]["blog_user_push"]
|
||||
if "blog_user_push" in self.json_igg["test_blog"]:
|
||||
self.blog_user_push = self.json_igg["test_blog"]["blog_user_push"]
|
||||
else:
|
||||
self.logger.info("blog_user_push not in test_blog json")
|
||||
|
||||
if "blog_password_push" in self.json_data["test_blog"]:
|
||||
self.blog_password_push = self.json_data["test_blog"]["blog_password_push"]
|
||||
if "blog_password_push" in self.json_igg["test_blog"]:
|
||||
self.blog_password_push = self.json_igg["test_blog"]["blog_password_push"]
|
||||
else:
|
||||
self.logger.info("blog_password_push not in test_blog json")
|
||||
|
||||
if "blog_flag" in self.json_data["test_blog"]:
|
||||
self.blog_flag = self.json_data["test_blog"]["blog_flag"]
|
||||
if "blog_flag" in self.json_igg["test_blog"]:
|
||||
self.blog_flag = self.json_igg["test_blog"]["blog_flag"]
|
||||
else:
|
||||
self.logger.info("blog_flag not in test_blog json")
|
||||
|
||||
def read_test_generic(self):
|
||||
if "radio_used" in self.json_data["test_generic"]:
|
||||
self.radio_lf = self.json_data["test_generic"]["radio_used"]
|
||||
if "radio_used" in self.json_rig["test_generic"]:
|
||||
self.radio_lf = self.json_rig["test_generic"]["radio_used"]
|
||||
else:
|
||||
self.logger.info("radio_used not in test_generic json")
|
||||
exit(1)
|
||||
if "ssid_used" in self.json_data["test_generic"]:
|
||||
self.ssid = self.json_data["test_generic"]["ssid_used"]
|
||||
if "ssid_used" in self.json_rig["test_generic"]:
|
||||
self.ssid = self.json_rig["test_generic"]["ssid_used"]
|
||||
else:
|
||||
self.logger.info("ssid_used not in test_generic json")
|
||||
exit(1)
|
||||
if "ssid_pw_used" in self.json_data["test_generic"]:
|
||||
self.ssid_pw = self.json_data["test_generic"]["ssid_pw_used"]
|
||||
if "ssid_pw_used" in self.json_rig["test_generic"]:
|
||||
self.ssid_pw = self.json_rig["test_generic"]["ssid_pw_used"]
|
||||
else:
|
||||
self.logger.info("ssid_pw_used not in test_generic json")
|
||||
exit(1)
|
||||
if "security_used" in self.json_data["test_generic"]:
|
||||
self.security = self.json_data["test_generic"]["security_used"]
|
||||
if "security_used" in self.json_rig["test_generic"]:
|
||||
self.security = self.json_rig["test_generic"]["security_used"]
|
||||
else:
|
||||
self.logger.info("security_used not in test_generic json")
|
||||
exit(1)
|
||||
if "num_sta" in self.json_data["test_generic"]:
|
||||
self.num_sta = self.json_data["test_generic"]["num_sta"]
|
||||
if "num_sta" in self.json_rig["test_generic"]:
|
||||
self.num_sta = self.json_rig["test_generic"]["num_sta"]
|
||||
else:
|
||||
self.logger.info("num_sta not in test_generic json")
|
||||
exit(1)
|
||||
if "col_names" in self.json_data["test_generic"]:
|
||||
self.num_sta = self.json_data["test_generic"]["col_names"]
|
||||
if "col_names" in self.json_rig["test_generic"]:
|
||||
self.num_sta = self.json_rig["test_generic"]["col_names"]
|
||||
else:
|
||||
self.logger.info("col_names not in test_generic json")
|
||||
exit(1)
|
||||
if "upstream_port" in self.json_data["test_generic"]:
|
||||
self.upstream_port = self.json_data["test_generic"]["upstream_port"]
|
||||
if "upstream_port" in self.json_rig["test_generic"]:
|
||||
self.upstream_port = self.json_rig["test_generic"]["upstream_port"]
|
||||
else:
|
||||
self.logger.info("upstream_port not in test_generic json")
|
||||
exit(1)
|
||||
|
||||
# functions in this section are/can be overridden by descendants
|
||||
# this code reads the lf_check_config.ini file to populate the test variables
|
||||
def read_config_ini(self):
|
||||
# self.logger.info("read_config_ini_contents {}".format(self.config_ini))
|
||||
config_file = configparser.ConfigParser()
|
||||
success = True
|
||||
success = config_file.read(self.config_ini)
|
||||
self.logger.info("config_file.read result {}".format(success))
|
||||
|
||||
# LF_MGR parameters not used yet
|
||||
if 'LF_MGR' in config_file.sections():
|
||||
section = config_file['LF_MGR']
|
||||
self.lf_mgr = section['LF_MGR_IP']
|
||||
self.lf_mgr_port = section['LF_MGR_PORT']
|
||||
self.logger.info("lf_mgr {}".format(self.lf_mgr))
|
||||
self.logger.info("lf_mgr_port {}".format(self.lf_mgr_port))
|
||||
|
||||
if 'TEST_PARAMETERS' in config_file.sections():
|
||||
section = config_file['TEST_PARAMETERS']
|
||||
self.test_timeout = section['TEST_TIMEOUT']
|
||||
self.use_blank_db = section['LOAD_BLANK_DB']
|
||||
self.use_factory_default_db = section['LOAD_FACTORY_DEFAULT_DB']
|
||||
self.use_custom_db = section['LOAD_CUSTOM_DB']
|
||||
self.custom_db = section['CUSTOM_DB']
|
||||
self.email_list_production = section['EMAIL_LIST_PRODUCTION']
|
||||
self.host_ip_production = section['HOST_IP_PRODUCTION']
|
||||
self.email_list_test = section['EMAIL_LIST_TEST']
|
||||
self.host_ip_test = section['HOST_IP_TEST']
|
||||
self.logger.info("self.email_list_test:{}".format(self.email_list_test))
|
||||
|
||||
if 'TEST_NETWORK' in config_file.sections():
|
||||
section = config_file['TEST_NETWORK']
|
||||
self.http_test_ip = section['HTTP_TEST_IP']
|
||||
self.logger.info("http_test_ip {}".format(self.http_test_ip))
|
||||
self.ftp_test_ip = section['FTP_TEST_IP']
|
||||
self.logger.info("ftp_test_ip {}".format(self.ftp_test_ip))
|
||||
self.test_ip = section['TEST_IP']
|
||||
self.logger.info("test_ip {}".format(self.test_ip))
|
||||
|
||||
if 'TEST_GENERIC' in config_file.sections():
|
||||
section = config_file['TEST_GENERIC']
|
||||
self.radio_lf = section['RADIO_USED']
|
||||
self.logger.info("radio_lf {}".format(self.radio_lf))
|
||||
self.ssid = section['SSID_USED']
|
||||
self.logger.info("ssid {}".format(self.ssid))
|
||||
self.ssid_pw = section['SSID_PW_USED']
|
||||
self.logger.info("ssid_pw {}".format(self.ssid_pw))
|
||||
self.security = section['SECURITY_USED']
|
||||
self.logger.info("secruity {}".format(self.security))
|
||||
self.num_sta = section['NUM_STA']
|
||||
self.logger.info("num_sta {}".format(self.num_sta))
|
||||
self.col_names = section['COL_NAMES']
|
||||
self.logger.info("col_names {}".format(self.col_names))
|
||||
self.upstream_port = section['UPSTREAM_PORT']
|
||||
self.logger.info("upstream_port {}".format(self.upstream_port))
|
||||
|
||||
if 'RADIO_DICTIONARY' in config_file.sections():
|
||||
section = config_file['RADIO_DICTIONARY']
|
||||
self.radio_dict = json.loads(section.get('RADIO_DICT', self.radio_dict))
|
||||
self.logger.info("self.radio_dict {}".format(self.radio_dict))
|
||||
|
||||
if self.test_suite in config_file.sections():
|
||||
section = config_file[self.test_suite]
|
||||
# for json replace the \n and \r they are invalid json characters, allows for multiple line args
|
||||
try:
|
||||
self.test_dict = json.loads(
|
||||
section.get('TEST_DICT', self.test_dict).replace('\n', ' ').replace('\r', ' '))
|
||||
self.logger.info("{}: {}".format(self.test_suite, self.test_dict))
|
||||
except:
|
||||
self.logger.info(
|
||||
"Exception loading {}, is there comma after the last entry? Check syntax".format(self.test_suite))
|
||||
else:
|
||||
self.logger.info("EXITING... NOT FOUND Test Suite with name : {}".format(self.test_suite))
|
||||
exit(1)
|
||||
|
||||
def load_factory_default_db(self):
|
||||
# self.logger.info("file_wd {}".format(self.scripts_wd))
|
||||
try:
|
||||
@@ -1047,9 +979,13 @@ blog: http://{blog}:2368
|
||||
stderr_log_size = os.path.getsize(stderr_log_txt)
|
||||
if stderr_log_size > 0:
|
||||
self.logger.info("File: {} is not empty: {}".format(stderr_log_txt, str(stderr_log_size)))
|
||||
|
||||
self.test_result = "Failure"
|
||||
background = self.background_red
|
||||
text = open(stderr_log_txt).read()
|
||||
if 'Error' in text:
|
||||
self.text_result = "Failure"
|
||||
background = self.background_red
|
||||
else:
|
||||
self.test_result = "Success"
|
||||
background = self.background_green
|
||||
else:
|
||||
self.logger.info("File: {} is empty: {}".format(stderr_log_txt, str(stderr_log_size)))
|
||||
self.test_result = "Success"
|
||||
@@ -1060,7 +996,7 @@ blog: http://{blog}:2368
|
||||
background = self.background_purple
|
||||
|
||||
# Ghost will put data in stderr
|
||||
if 'ghost' in command:
|
||||
if 'ghost' in command or 'lf_qa' in command:
|
||||
if self.test_result != "TIMEOUT":
|
||||
text = open(stderr_log_txt).read()
|
||||
if 'Error' in text:
|
||||
@@ -1069,6 +1005,16 @@ blog: http://{blog}:2368
|
||||
else:
|
||||
self.test_result = "Success"
|
||||
background = self.background_blue
|
||||
if 'lf_qa' in command:
|
||||
line_list = open(stdout_log_txt).readlines()
|
||||
for line in line_list:
|
||||
if 'html report:' in line:
|
||||
self.qa_report_html = line
|
||||
print("html_report: {report}".format(report=self.qa_report_html))
|
||||
break
|
||||
|
||||
self.qa_report_html = self.qa_report_html.replace('html report: ','')
|
||||
|
||||
|
||||
# stdout_log_link is used for the email reporting to have the corrected path
|
||||
stdout_log_link = str(stdout_log_txt).replace('/home/lanforge', '')
|
||||
@@ -1116,8 +1062,7 @@ Summary :
|
||||
running scripts listed in <config>.ini or <config>.json
|
||||
|
||||
Example :
|
||||
./lf_check.py --ini lf_check_test.ini --suite suite_one
|
||||
./lf_check.py --use_json --json lf_check_test.json --suite suite_two
|
||||
./lf_check.py --json lf_check_test.json --suite suite_two
|
||||
---------
|
||||
''')
|
||||
|
||||
@@ -1125,8 +1070,10 @@ Example :
|
||||
default="lf_check_config.ini")
|
||||
parser.add_argument('--dir', help="--dir <results directory>", default="lf_check")
|
||||
parser.add_argument('--path', help="--path <results path>", default="/home/lanforge/html-results")
|
||||
parser.add_argument('--json', help="--json <lf_ckeck_config.json file> ", default="lf_check_config.json")
|
||||
parser.add_argument('--use_json', help="--use_json ", action='store_true')
|
||||
parser.add_argument('--json_rig', help="--json_rig <rig json config> ", default="")
|
||||
parser.add_argument('--json_test', help="--json_test <test json config> ", default="")
|
||||
parser.add_argument('--json_igg', help="--json_igg <influx grafana ghost json config> ", default="")
|
||||
parser.add_argument('--use_json', help="--use_json FLAG DEPRECATED", action='store_true')
|
||||
parser.add_argument('--suite', help="--suite <suite name> default TEST_DICTIONARY", default="TEST_DICTIONARY")
|
||||
parser.add_argument('--production', help="--production stores true, sends email results to production email list",
|
||||
action='store_true')
|
||||
@@ -1137,25 +1084,34 @@ Example :
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# load test config file information either <config>.json or <config>.ini
|
||||
use_json = False
|
||||
json_data = ""
|
||||
config_ini = ""
|
||||
if args.use_json:
|
||||
use_json = True
|
||||
print("NOTE: --use_json flag deprecated and unused")
|
||||
# load test config file information either <config>.json or <config>.ini
|
||||
json_rig = ""
|
||||
try:
|
||||
print("args.json_rig {rig}".format(rig=args.json_rig))
|
||||
with open(args.json_rig, 'r') as json_rig_config:
|
||||
json_rig = json.load(json_rig_config)
|
||||
except:
|
||||
print("Error reading {}".format(args.json_rig))
|
||||
|
||||
json_test = ""
|
||||
try:
|
||||
print("args.json_test {}".format(args.json_test))
|
||||
with open(args.json_test, 'r') as json_test_config:
|
||||
json_test = json.load(json_test_config)
|
||||
except:
|
||||
print("Error reading {}".format(args.json_test))
|
||||
|
||||
json_igg = ""
|
||||
if args.json_igg != "":
|
||||
try:
|
||||
print("args.json {}".format(args.json))
|
||||
with open(args.json, 'r') as json_config:
|
||||
json_data = json.load(json_config)
|
||||
print("args.json_igg {}".format(args.json_igg))
|
||||
with open(args.json_igg, 'r') as json_igg_config:
|
||||
json_igg = json.load(json_igg_config)
|
||||
except:
|
||||
print("Error reading {}".format(args.json))
|
||||
else:
|
||||
config_ini = os.getcwd() + '/' + args.ini
|
||||
if os.path.exists(config_ini):
|
||||
print("TEST CONFIG : {}".format(config_ini))
|
||||
else:
|
||||
print("EXITING: NOTFOUND TEST CONFIG : {} ".format(config_ini))
|
||||
exit(1)
|
||||
print("Error reading {}".format(args.json_igg))
|
||||
|
||||
|
||||
# Test-rig information information
|
||||
lanforge_node_version = 'NO_LF_NODE_VER'
|
||||
@@ -1190,10 +1146,10 @@ Example :
|
||||
log_path = report.get_log_path()
|
||||
|
||||
# lf_check() class created
|
||||
check = lf_check(_use_json=use_json,
|
||||
_config_ini=config_ini,
|
||||
_json_data=json_data,
|
||||
check = lf_check(_json_rig=json_rig,
|
||||
_json_test=json_test,
|
||||
_test_suite=test_suite,
|
||||
_json_igg=json_igg,
|
||||
_production=production,
|
||||
_csv_results=csv_results,
|
||||
_outfile=outfile,
|
||||
@@ -1201,12 +1157,6 @@ Example :
|
||||
_report_path=report_path,
|
||||
_log_path=log_path)
|
||||
|
||||
# get git sha
|
||||
process = subprocess.Popen(["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE)
|
||||
(commit_hash, err) = process.communicate()
|
||||
exit_code = process.wait()
|
||||
git_sha = commit_hash.decode('utf-8', 'ignore')
|
||||
|
||||
# set up logging
|
||||
logfile = args.logfile[:-4]
|
||||
print("logfile: {}".format(logfile))
|
||||
@@ -1221,12 +1171,14 @@ Example :
|
||||
logger.addHandler(file_handler)
|
||||
logger.addHandler(logging.StreamHandler(sys.stdout)) # allows to logging to file and stdout
|
||||
|
||||
# logger setup print out sha
|
||||
logger.info("commit_hash: {}".format(commit_hash))
|
||||
logger.info("commit_hash2: {}".format(commit_hash.decode('utf-8', 'ignore')))
|
||||
|
||||
# read config and run tests
|
||||
check.read_config()
|
||||
check.read_json_rig() #check.read_config
|
||||
check.read_json_test()
|
||||
|
||||
if args.json_igg != "":
|
||||
print("Tests need to have influx parameters passed in")
|
||||
check.read_json_igg()
|
||||
|
||||
ping_result = check.check_if_port_exists()
|
||||
for key, value in ping_result.items():
|
||||
if value[1] is None:
|
||||
@@ -1249,7 +1201,7 @@ Example :
|
||||
|
||||
try:
|
||||
lanforge_node_version = check.get_lanforge_node_version()
|
||||
print("lanforge_node_version {node_ver}".format(node_node=lanforge_node_version))
|
||||
print("lanforge_node_version {node_ver}".format(node_ver=lanforge_node_version))
|
||||
except:
|
||||
print("lanforge_node_version exception")
|
||||
|
||||
@@ -1265,6 +1217,8 @@ Example :
|
||||
except:
|
||||
print("lanforge_gui_version exception")
|
||||
|
||||
#check.get_radio_status()
|
||||
|
||||
# LANforge and scripts config
|
||||
lf_test_setup = pd.DataFrame()
|
||||
lf_test_setup['LANforge'] = lanforge_node_version
|
||||
|
||||
@@ -290,7 +290,10 @@ class csv_sqlite_dash():
|
||||
# NOTE: html links to png do not like spaces
|
||||
png_server_img = self.server + png_path.replace(self.cut,'')
|
||||
# generate png image
|
||||
kpi_fig.write_image(png_path,scale=1,width=1200,height=350)
|
||||
try:
|
||||
kpi_fig.write_image(png_path,scale=1,width=1200,height=350)
|
||||
except:
|
||||
print("ERROR: {database} Was correct database passed in, moved or duplicates of same name?".format(database=self.database))
|
||||
#https://plotly.com/python/interactive-html-export/
|
||||
# generate html image (interactive)
|
||||
kpi_fig.write_html(html_path)
|
||||
Reference in New Issue
Block a user