fixed merge conflicts

Signed-off-by: shivamcandela <shivam.thakur@candelatech.com>
This commit is contained in:
shivamcandela
2021-12-13 13:01:01 +05:30
197 changed files with 29645 additions and 14907 deletions

2
.gitignore vendored
View File

@@ -5,3 +5,5 @@
*.iml
**/*.iml
.idea
*.env
*.zip

60
Quali/lanforge_resource/.gitignore vendored Normal file
View File

@@ -0,0 +1,60 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
cloudshell_config.yml

View File

@@ -0,0 +1,4 @@
TOSCA-Meta-File-Version: 1.0
CSAR-Version: 0.1.0
Created-By: Anonymous
Entry-Definitions: shell-definition.yaml

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

View File

@@ -0,0 +1,53 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<properties>
<!-- The address of the Quali server on which to deploy, mandatory -->
<serverRootAddress>localhost</serverRootAddress>
<!-- The port of the Quali server on which to deploy, defaults to "8029" -->
<port>8029</port>
<!-- The server admin username, password and domain to use when deploying -->
<username>YOUR_USERNAME</username>
<password>YOUR_PASSWORD</password>
<domain>Global</domain>
<!-- Simple patterns to filter when sending the driver to the server separated by semicolons (e.g. "file.xml;logs/", also supports regular expressions),
on top of the patterns specified here the plugin will automatically filter the "deployment/" and ".idea/" folders and the "deployment.xml" file -->
<fileFilters>dont_upload_me.xml</fileFilters>
<!-- The drivers to update, holds one or more drivers -->
<drivers>
<!-- runFromLocalProject - Decides whether to run the driver from the current project directory for debugging purposes, defaults to "false" -->
<!-- waitForDebugger - When `runFromLocalProject` is enabled, decides whether to wait for a debugger to attach before running any Python driver code, defaults to "false" -->
<!-- sourceRootFolder - The folder to refer to as the project source root (if specified, the folder will be zipped and deployed instead of the whole project), defaults to the root project folder -->
<driver runFromLocalProject="true" waitForDebugger="true" sourceRootFolder="lanforge-resource">
<!-- A list of paths to the driver's files or folders relative to the project's root.
may be a path to a directory, in which case all the files and folders under the directory are added into the driver's zip file.
if the <sources> element is not specified, all the files under the project are added to the driver's zip file -->
<sources>
<source>src</source>
</sources>
<!-- the driver name of the driver to update -->
<targetName>LanforgeResourceDriver</targetName>
</driver>
</drivers>
<!-- The scripts to update, holds one or more scripts -->
<!-- A list of paths to the script's files or folders relative to the project's root.
if the <sources> element is not specified, all the files under the project are added to the script's zip file.
if only one file is specified, the file will not be compressed into a zip file.
-->
<!--
<scripts>
<script>
<sources>
<source>script1.py</source>
</sources>
<targetName>scriptToUpdate</targetName>
</script>
</scripts>
-->
</properties>

View File

@@ -0,0 +1,3 @@
.. _readme:
.. include:: ../README.rst

View File

@@ -0,0 +1,45 @@
tosca_definitions_version: tosca_simple_yaml_1_0
metadata:
template_name: Lanforge Resource
template_author: Anonymous
template_version: 0.1.0
template_icon: shell-icon.png
description: >
TOSCA based resource shell
imports:
- cloudshell_standard: cloudshell_resource_standard_2_0_3.yaml
node_types:
vendor.resource.Lanforge Resource:
derived_from: cloudshell.nodes.GenericResource
#properties:
# my_property:
# type: string # optional values: string, integer, float, boolean, cloudshell.datatypes.Password
# default: fast
# description: Some attribute description
# constraints:
# - valid_values: [fast, slow]
capabilities:
auto_discovery_capability:
type: cloudshell.capabilities.AutoDiscovery
properties:
enable_auto_discovery:
type: boolean
default: true
auto_discovery_description:
type: string
default: Describe the auto discovery
inventory_description:
type: string
default: Describe the resource shell template
artifacts:
icon:
file: canvil2-64x64-gray-yel-ico.png
type: tosca.artifacts.File
driver:
file: LanforgeResourceDriver.zip
type: tosca.artifacts.File

Binary file not shown.

After

Width:  |  Height:  |  Size: 461 B

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,379 @@
from cloudshell.shell.core.resource_driver_interface import ResourceDriverInterface
from cloudshell.shell.core.driver_context import InitCommandContext, ResourceCommandContext, AutoLoadResource, \
AutoLoadAttribute, AutoLoadDetails, CancellationContext
from cloudshell.shell.core.session.cloudshell_session import CloudShellSessionContext
from cloudshell.api.cloudshell_api import CloudShellAPISession
from cloudshell.helpers.scripts.cloudshell_scripts_helpers import get_api_session, get_reservation_context_details
from cloudshell.shell.core.session.cloudshell_session import CloudShellSessionContext
import cloudshell.helpers.scripts.cloudshell_scripts_helpers as script_help
import cloudshell.helpers.scripts.cloudshell_dev_helpers as dev_helpers
# from cloudshell.shell.core.resource_driver_interface import ResourceDriverInterface
# from cloudshell.shell.core.context import InitCommandContext, ResourceCommandContext
import mock
from data_model import *
# run 'shellfoundry generate' to generate data model classes
import subprocess
import sys
import os
import importlib
import paramiko
from scp import SCPClient
import requests
import datetime
import os
# command = "./lanforge-scripts/py-scripts/update_dependencies.py"
# print("running:[{}]".format(command))
# process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
# outs, errs = process.communicate()
# print(outs)
# print(errs)
# if 'lanforge-scripts' not in sys.path:
# sys.path.append('./lanforge-scripts')
# create_wanlink = importlib.import_module("lanforge-scripts.py-json.create_wanlink")
# create_l3 = importlib.import_module("lanforge-scripts.py-scripts.create_l3")
# CreateL3 = create_l3.CreateL3
class LanforgeResourceDriver (ResourceDriverInterface):
def __init__(self):
"""
ctor must be without arguments, it is created with reflection at run time
"""
pass
def initialize(self, context):
"""
Initialize the driver session, this function is called everytime a new instance of the driver is created
This is a good place to load and cache the driver configuration, initiate sessions etc.
:param InitCommandContext context: the context the command runs on
"""
pass
def cleanup(self):
"""
Destroy the driver session, this function is called everytime a driver instance is destroyed
This is a good place to close any open sessions, finish writing to log files
"""
pass
def get_inventory(self, context):
"""
Discovers the resource structure and attributes.
:param AutoLoadCommandContext context: the context the command runs on
:return Attribute and sub-resource information for the Shell resource you can return an AutoLoadDetails object
:rtype: AutoLoadDetails
"""
# See below some example code demonstrating how to return the resource structure and attributes
# In real life, this code will be preceded by SNMP/other calls to the resource details and will not be static
# run 'shellfoundry generate' in order to create classes that represent your data model
'''
resource = LanforgeResource.create_from_context(context)
resource.vendor = 'specify the shell vendor'
resource.model = 'specify the shell model'
port1 = ResourcePort('Port 1')
port1.ipv4_address = '192.168.10.7'
resource.add_sub_resource('1', port1)
return resource.create_autoload_details()
'''
return AutoLoadDetails([], [])
def orchestration_save(self, context, cancellation_context, mode, custom_params):
"""
Saves the Shell state and returns a description of the saved artifacts and information
This command is intended for API use only by sandbox orchestration scripts to implement
a save and restore workflow
:param ResourceCommandContext context: the context object containing resource and reservation info
:param CancellationContext cancellation_context: Object to signal a request for cancellation. Must be enabled in drivermetadata.xml as well
:param str mode: Snapshot save mode, can be one of two values 'shallow' (default) or 'deep'
:param str custom_params: Set of custom parameters for the save operation
:return: SavedResults serialized as JSON
:rtype: OrchestrationSaveResult
"""
# See below an example implementation, here we use jsonpickle for serialization,
# to use this sample, you'll need to add jsonpickle to your requirements.txt file
# The JSON schema is defined at:
# https://github.com/QualiSystems/sandbox_orchestration_standard/blob/master/save%20%26%20restore/saved_artifact_info.schema.json
# You can find more information and examples examples in the spec document at
# https://github.com/QualiSystems/sandbox_orchestration_standard/blob/master/save%20%26%20restore/save%20%26%20restore%20standard.md
'''
# By convention, all dates should be UTC
created_date = datetime.datetime.utcnow()
# This can be any unique identifier which can later be used to retrieve the artifact
# such as filepath etc.
# By convention, all dates should be UTC
created_date = datetime.datetime.utcnow()
# This can be any unique identifier which can later be used to retrieve the artifact
# such as filepath etc.
identifier = created_date.strftime('%y_%m_%d %H_%M_%S_%f')
orchestration_saved_artifact = OrchestrationSavedArtifact('REPLACE_WITH_ARTIFACT_TYPE', identifier)
saved_artifacts_info = OrchestrationSavedArtifactInfo(
resource_name="some_resource",
created_date=created_date,
restore_rules=OrchestrationRestoreRules(requires_same_resource=True),
saved_artifact=orchestration_saved_artifact)
return OrchestrationSaveResult(saved_artifacts_info)
'''
pass
def orchestration_restore(self, context, cancellation_context, saved_artifact_info, custom_params):
"""
Restores a saved artifact previously saved by this Shell driver using the orchestration_save function
:param ResourceCommandContext context: The context object for the command with resource and reservation info
:param CancellationContext cancellation_context: Object to signal a request for cancellation. Must be enabled in drivermetadata.xml as well
:param str saved_artifact_info: A JSON string representing the state to restore including saved artifacts and info
:param str custom_params: Set of custom parameters for the restore operation
:return: None
"""
'''
# The saved_details JSON will be defined according to the JSON Schema and is the same object returned via the
# orchestration save function.
# Example input:
# {
# "saved_artifact": {
# "artifact_type": "REPLACE_WITH_ARTIFACT_TYPE",
# "identifier": "16_08_09 11_21_35_657000"
# },
# "resource_name": "some_resource",
# "restore_rules": {
# "requires_same_resource": true
# },
# "created_date": "2016-08-09T11:21:35.657000"
# }
# The example code below just parses and prints the saved artifact identifier
saved_details_object = json.loads(saved_details)
return saved_details_object[u'saved_artifact'][u'identifier']
'''
pass
def attach_file(self, report_server, resid, file_path, user, password, domain, filename):
# st = datetime.datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M%S')
data = {
'username': user,
'password': password,
'domain': domain
}
qq = 'Basic ' + requests.put(
url='http://' + report_server + ':9000/API/Auth/Login',
data=data
).text[1:-1]
head = {
'Authorization': qq,
}
dat_json ={
"reservationId": resid,
"saveFileAs": filename,
"overwriteIfExists": "true",
}
with open(file_path, 'rb') as upload_file:
xx = requests.post(
url='http://' + report_server + ':9000/API/Package/AttachFileToReservation',
headers=head,
data=dat_json,
files={'QualiPackage': upload_file}
)
return xx
def send_command(self, context, cmd):
msg = ""
resource = LanforgeResource.create_from_context(context)
session = CloudShellAPISession(host=context.connectivity.server_address,
token_id=context.connectivity.admin_auth_token,
domain=context.reservation.domain)
resource_model_name = resource.cloudshell_model_name
terminal_ip = context.resource.address
terminal_user = context.resource.attributes[f'{resource_model_name}.User']
terminal_pass = session.DecryptPassword(context.resource.attributes[f'{resource_model_name}.Password']).Value
msg += f"Initializing SSH connection to {terminal_ip}, with user {terminal_user} and password {terminal_pass}\n"
s = paramiko.SSHClient()
s.set_missing_host_key_policy(paramiko.AutoAddPolicy())
s.connect(hostname=terminal_ip, username=terminal_user, password=terminal_pass)
print(f"running:[{cmd}]")
(stdin, stdout, stderr) = s.exec_command(cmd)
output = ''
errors = ''
for line in stdout.readlines():
output += line
for line in stderr.readlines():
errors += line
msg += output + errors
s.close()
return msg
def example_command(self, context):
"""
this is my example command
:param ResourceCommandContext context
:return: str
"""
resource = LanforgeResource.create_from_context(context)
session = CloudShellAPISession(host=context.connectivity.server_address,
token_id=context.connectivity.admin_auth_token,
domain=context.reservation.domain)
resource_model_name = resource.cloudshell_model_name
password = session.DecryptPassword(context.resource.attributes[f'{resource_model_name}.Password']).Value
username = context.resource.attributes[f'{resource_model_name}.User']
msg = f"My resource {resource.name} at address {context.resource.address} has model name {resource_model_name}. "
msg += f"The username is {username} and password is {password}."
return msg
def create_wanlink(self, context, name, latency, rate):
cmd = "/home/lanforge/lanforge-scripts/py-json/create_wanlink.py --host {host} --port_A {port_A} --port_B {port_B} --name \"{name}\" --latency \"{latency}\" --latency_A \"{latency_A}\" --latency_B \"{latency_B}\" --rate {rate} --rate_A {rate_A} --rate_B {rate_B} --jitter {jitter} --jitter_A {jitter_A} --jitter_B {jitter_B} --jitter_freq_A {jitter_freq_A} --jitter_freq_B {jitter_freq_B} --drop_A {drop_A} --drop_B {drop_B}".format(
host="localhost",
port_A="eth1",
port_B="eth2",
name=name,
latency=latency,
latency_A=latency,
latency_B=latency,
rate=rate,
rate_A=rate,
rate_B=rate,
jitter="0",
jitter_A="0",
jitter_B="0",
jitter_freq_A="0",
jitter_freq_B="0",
drop_A="0",
drop_B="0"
)
output = self.send_command(context, cmd)
print(output)
return output
def create_l3(self, context, name, min_rate_a, min_rate_b, endp_a, endp_b):
cmd = f"/home/lanforge/lanforge-scripts/py-scripts/create_l3.py --endp_a \"{endp_a}\" --endp_b \"{endp_b}\" --min_rate_a \"{min_rate_a}\" --min_rate_b \"{min_rate_b}\""
output = self.send_command(context, cmd)
print(output)
return output
def pull_reports(self, hostname="", port=22,
username="lanforge", password="lanforge",
report_location="/home/lanforge/html-reports/",
report_dir="./"):
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname="juicer", username=username, password=password, port=port, allow_agent=False, look_for_keys=False)
with SCPClient(ssh.get_transport()) as scp:
scp.get(remote_path=report_location, local_path=report_dir, recursive=True)
scp.close()
def dataplane_test(self, context, instance_name, upstream, station, duration, download_speed, upload_speed, traffic_types, local_lf_report_dir, output_report_dir, mgr):
cmd = '''/home/lanforge/lanforge-scripts/py-scripts/lf_dataplane_test.py --mgr {mgr} --port 8080 --lf_user lanforge --lf_password lanforge \
--instance_name {instance_name} --config_name test_con \
--upstream {upstream} --station {station} --duration {duration}\
--download_speed {download_speed} --upload_speed {upload_speed} \
--raw_line 'pkts: 256;1024' \
--raw_line 'directions: DUT Transmit' \
--raw_line 'traffic_types: {traffic_types}' \
--test_rig juicer --pull_report \
--local_lf_report_dir {local_lf_report_dir}'''.format(
instance_name=instance_name,
mgr=mgr,
upstream=upstream,
station=station,
duration=duration,
download_speed=download_speed,
upload_speed=upload_speed,
traffic_types=traffic_types,
local_lf_report_dir=local_lf_report_dir
)
output = self.send_command(context, cmd)
print(output)
resource = LanforgeResource.create_from_context(context)
session = CloudShellAPISession(host=context.connectivity.server_address,
token_id=context.connectivity.admin_auth_token,
domain=context.reservation.domain)
terminal_ip = context.resource.address
resource_model_name = resource.cloudshell_model_name
terminal_pass = session.DecryptPassword(context.resource.attributes[f'{resource_model_name}.Password']).Value
terminal_user = context.resource.attributes[f'{resource_model_name}.User']
reservation_id = context.reservation.reservation_id
api = CloudShellSessionContext(context).get_api()
cwd = os.getcwd()
# session.AttachFileToReservation(context.reservation.reservation_id, f"C:/Users/Administrator/{output_report_dir}", "C:/Users/Administrator/AppData/Local/Temp", True)
self.pull_reports(hostname=context.resource.address, port=22,
username=terminal_user, password=terminal_pass,
report_location="/home/lanforge/html-reports/",
report_dir=f"C:/Users/Administrator/{output_report_dir}")
# api = get_api_session()
# api.WriteMessageToReservationOutput(reservation_id, f"Attaching report to sandbox.")
api.WriteMessageToReservationOutput(reservation_id, f"The current working directory is {cwd}")
self.attach_file(
report_server=context.connectivity.server_address,
resid=context.reservation.reservation_id,
user='admin',
password='admin',
domain=context.reservation.domain,
file_path="C:/Users/Administrator/Desktop/My_Reports/html-reports/dataplane-2021-10-13-03-32-40/dataplane-report-2021-10-13-03-31-50.pdf",
filename="C:/Users/Administrator/Desktop/test_report.txt"
)
return output
def scenario(self, context, load):
cmd = f"/home/lanforge/lanforge-scripts/py-scripts/scenario.py --load {load}"
output = self.send_command(context, cmd)
print(output)
return output
if __name__ == "__main__":
# setup for mock-debug environment
shell_name = "LanforgeResource"
cancellation_context = mock.create_autospec(CancellationContext)
context = mock.create_autospec(ResourceCommandContext)
context.resource = mock.MagicMock()
context.reservation = mock.MagicMock()
context.connectivity = mock.MagicMock()
context.reservation.reservation_id = "<RESERVATION_ID>"
context.resource.address = "192.168.100.176"
context.resource.name = "Lanforge_Resource"
context.resource.attributes = dict()
context.resource.attributes["{}.User".format(shell_name)] = "lanforge"
context.resource.attributes["{}.Password".format(shell_name)] = "lanforge"
context.resource.attributes["{}.SNMP Read Community".format(shell_name)] = "<READ_COMMUNITY_STRING>"
# add information for api connectivity
context.reservation.domain = "Global"
context.connectivity.server_address = "192.168.100.131"
driver = LanforgeResourceDriver()
# print driver.run_custom_command(context, custom_command="sh run", cancellation_context=cancellation_context)
# result = driver.example_command_with_api(context)
# driver.create_l3(context, "my_fire", "69000", "41000", "eth1", "eth2")
# driver.create_wanlink(context, name="my_wanlin", latency="49", rate="6000")
driver.dataplane_test(context, "instance", "upstream", "station", "duration")
print("done")

View File

@@ -0,0 +1,189 @@
<Driver Description="Describe the purpose of your CloudShell shell" MainClass="driver.LanforgeResourceDriver" Name="LanforgeResourceDriver" Version="1.0.0" PythonVersion="3">
<Layout>
<Category Name="Hidden Commands">
<Command Description=""
DisplayName="Orchestration Save"
Name="orchestration_save" />
<Command Description=""
DisplayName="Orchestration Restore"
Name="orchestration_restore" />
<Command Description="Send Command to Resource"
DisplayName="Scenario"
Name="send_command">
<Parameters>
<Parameter Name="cmd"
Type="String"
Mandatory="False"
DefaultValue=""
Description="The command to send"/>
</Parameters>
</Command>
<Command Description="Pull Reports from LANforge"
DisplayName="Pull Reports"
Name="pull_reports">
<Parameters>
<Parameter Name="hostname"
Type="String"
Mandatory="False"
DefaultValue=""
Description="hostname"/>
<Parameter Name="port"
Type="String"
Mandatory="False"
DefaultValue=""
Description="port"/>
<Parameter Name="username"
Type="String"
Mandatory="False"
DefaultValue=""
Description="username"/>
<Parameter Name="password"
Type="String"
Mandatory="False"
DefaultValue=""
Description="password"/>
<Parameter Name="report_location"
Type="String"
Mandatory="False"
DefaultValue=""
Description="report location"/>
<Parameter Name="report_dir"
Type="String"
Mandatory="False"
DefaultValue=""
Description="report dir"/>
</Parameters>
</Command>
</Category>
<Category Name="Example Commands">
<Command Description="Example Command from Demo"
DisplayName="Example Command"
Name="example_command"/>
</Category>
<Category Name="Scenario">
<Command Description="Load or start a scenario"
DisplayName="Scenario"
Name="scenario">
<Parameters>
<Parameter Name="load"
Type="String"
Mandatory="False"
DefaultValue="BLANK"
Description="The name of the database to load"/>
</Parameters>
</Command>
</Category>
<Category Name="ICE">
<Command Description="Create a virtual wanlink with custom impairments."
DisplayName="Create Wanlink"
Name="create_wanlink">
<Parameters>
<Parameter Name="name"
Type="String"
Mandatory="False"
DefaultValue="wl_eg1"
Description="Enter a name for the wanlink."/>
<Parameter Name="latency"
Type="String"
Mandatory="False"
DefaultValue="20"
Description="Latency of both endpoints"/>
<Parameter Name="rate"
Type="String"
Mandatory="False"
DefaultValue="1000000"
Description="The total throughput capacity of the wanlink."/>
</Parameters>
</Command>
</Category>
<Category Name="FIRE">
<Command Description="Generate traffic between two existing ports"
DisplayName="Create Layer-3"
Name="create_l3">
<Parameters>
<Parameter Name="name"
Type="String"
Mandatory="False"
DefaultValue="scr-test-1"
Description="Enter a name for the connection"/>
<Parameter Name="min_rate_a"
Type="String"
Mandatory="False"
DefaultValue="56000"
Description="Minimum transfer rate of side a"/>
<Parameter Name="min_rate_b"
Type="String"
Mandatory="False"
DefaultValue="56000"
Description="Minimum transfer rate of side b"/>
<Parameter Name="endp_a"
Type="String"
Mandatory="False"
DefaultValue="eth1"
Description="Station list"/>
<Parameter Name="endp_b"
Type="String"
Mandatory="False"
DefaultValue="eth2"
Description="Upstream port"/>
</Parameters>
</Command>
<Command Description="Initialize a dataplane test"
DisplayName="Dataplane Test"
Name="dataplane_test">
<Parameters>
<Parameter Name="instance_name"
Type="String"
Mandatory="False"
DefaultValue="dataplane-instance"
Description="The name for the dataplane test"/>
<Parameter Name="upstream"
Type="String"
Mandatory="False"
DefaultValue="1.1.eth1"
Description="The upstream port"/>
<Parameter Name="station"
Type="String"
Mandatory="False"
DefaultValue="1.1.eth2"
Description="The downstream port"/>
<Parameter Name="duration"
Type="String"
Mandatory="False"
DefaultValue="2s"
Description="The duration of the test (append 's' for seconds)"/>
<Parameter Name="download_speed"
Type="String"
Mandatory="False"
DefaultValue="10Mbps"
Description="The rate of upstream port (append Mbps/Bps)"/>
<Parameter Name="upload_speed"
Type="String"
Mandatory="False"
DefaultValue="0"
Description="The rate of downstream port (append Mbps/Bps)"/>
<Parameter Name="traffic_types"
Type="String"
Mandatory="False"
DefaultValue="UDP"
Description="The type of traffic (TCP/UDP)"/>
<Parameter Name="local_lf_report_dir"
Type="String"
Mandatory="False"
DefaultValue="tmp/my_report/"
Description="The LANforge directory to save generated reports"/>
<Parameter Name="output_report_dir"
Type="String"
Mandatory="False"
DefaultValue=""
Description="The server directory to save generated reports"/>
<Parameter Name="mgr"
Type="String"
Mandatory="False"
DefaultValue="localhost"
Description="The IP address to run the test on"/>
</Parameters>
</Command>
</Category>
</Layout>
</Driver>

View File

View File

@@ -0,0 +1,7 @@
mock
cloudshell-shell-core>=5.0.3,<6.0.0
cloudshell-automation-api
cloudshell-orch-core
requests
paramiko
scp

View File

View File

@@ -0,0 +1,7 @@
nose
coverage
unittest2
mock
teamcity-messages
jsonpickle
nose-exclude

View File

@@ -0,0 +1 @@
# -*- coding: utf-8 -*-

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Tests for `LanforgeResourceDriver`
"""
import unittest
from driver import LanforgeResourceDriver
class TestLanforgeResourceDriver(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())

View File

@@ -339,6 +339,14 @@ clean_old_kernels() {
echo "/lib/modules/$f"
done | xargs rm -rf
fi
if [ -d "/boot2" ]; then
rm -rf /boot2/*
rsync -a /boot/. /boot2/
local dev2=`df /boot2/ |awk '/dev/{print $1}'`
if [ x$dev2 != x ]; then
/usr/sbin/grub2-install $dev2 ||:
fi
fi
}
clean_core_files() {

44
desktop-hostname.bash Executable file
View File

@@ -0,0 +1,44 @@
#!/bin/bash
Q='"'
A="'"
function set_background() {
gsettings set "org.mate.background" "$1" "$2"
}
SourceFile="/usr/share/backgrounds/mate/desktop/Ubuntu-Mate-Cold-no-logo.png"
DesktopFile="/home/lanforge/desktop.png"
my_hostname=`hostname`
my_os="[os]"
if [ -f /etc/os-release ]; then
my_os=`egrep '^VERSION=' /etc/os-release`
if [ ! -z "$my_os" ]; then
my_os="${my_os/VERSION=/}"
my_os="${my_os//\"/}"
fi
fi
my_inver="[lfver]"
if [ -f "/var/www/html/installed-ver.txt" ]; then
my_inver=`cat /var/www/html/installed-ver.txt`;
fi
my_kver=`uname -r`
my_dev=`ip ro sho | awk '/default via/{print $5}'`
my_ip=`ip a sho $my_dev | awk '/inet /{print $2}'`
my_mac=`ip a sho | grep -A1 "$my_dev" | awk '/ether /{print $2}'`
fill_color=${my_mac//:/}
fill_color=${fill_color:6:12}
X=220
Y=150
convert -pointsize 80 -fill "#$fill_color" -stroke black -strokewidth 1 \
-draw "text $X,$Y \"$my_hostname\"" \
-draw "text $X,$(( Y + 75 )) \"LANForge $my_inver\"" \
-draw "text $X,$(( Y + 155 )) \"Kernel $my_kver $my_os\"" \
-draw "text $X,$(( Y + 225 )) \"$my_dev $my_ip\"" \
-draw "text $X,$(( Y + 295 )) \"$my_mac\"" \
$SourceFile \
-scale 1600x900 \
$DesktopFile
set_background picture-filename ${A}${DesktopFile}${A}
set_background picture-options 'stretched'
#

View File

259
lanforge_client/logg.py Normal file
View File

@@ -0,0 +1,259 @@
import sys
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
import logging
from logging import Logger
import time
import datetime
import inspect
# import traceback
# from typing import Optional
from pprint import pprint # pformat
from .strutil import nott # iss
class Logg:
"""
This method presently defines various log "levels" but does not yet express
ability to log "areas" or "keywords".
TODO:
- LOG BUFFER a list that only holds last 100 lines logged to it. This is useful
for emitting when an exception happens in a loop and you are not interested
in the first 10e6 log entries
- KEYWORD LOGGING: pair a --debug_kw=keyword,keyword set on the command line to only
recieve log output from log statements matching those keywords
- CLASS/METHOD/FUNCTION logging: --debug_fn=class.method,module.func set on the command
line that activates logging in the method or function listed. See inspection techniques
listed near this SO question https://stackoverflow.com/a/5104943/11014343
- BITWISE LOG LEVELS: --log_level=DEBUG|FILEIO|JSON|HTTP a maskable combination of enum_bitmask
names that combine to a value that can trigger logging.
These reserved words may not be used as tags:
debug, debugging, debug_log, digest, file, gui, http, json, log, method, tag
Protocol logging levels:
* always: X-Errors( stops script on halt_on_errors)
* timeouts: can be configured as halt level errors
- digest (POST set_port / GET /ports )
- url (POST /cli-json/set_port / GET /port/1/2/3/?fields)
- json (POST /cli-json/set_port { a:b } ; GET /port/1/2/3?fields {results interfaces[]}
- http that plus X-Warnings and ALL headers
- gui Xtra debugging messages generated by LANforgeGUI
Please also consider how log messages can be formatted:
https://stackoverflow.com/a/20112491/11014343:
logging.basicConfig(format="[%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s")
"""
DEFAULT_LEVEL = logging.WARNING
DefaultLogger = logging.getLogger(__name__)
method_name_list: list = [] # list[str]
tag_list: list = [] # list[str]
reserved_tags: list = [ # list[str]
"debug",
"debugging",
"debug_log",
"digest",
"file",
"gui",
"http",
"json",
"log",
"method",
"tag"
]
def __init__(self,
log_level: int = DEFAULT_LEVEL,
name: str = None,
filename: str = None,
debug: bool = False):
"""----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
Base class that can be used to send logging messages elsewhere. extend this
in order to send log messages from this framework elsewhere.
----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----"""
self.level = log_level
self.logger: Logger
# self.start_time = datetime.now() # py 3.9 maybe?
self.start_time = datetime.datetime.now() # py 3.9 maybe?
self.start_time_str = time.strftime("%Y%m%d-%I:%M%:%S")
if name:
self.name = name
if "@" in name:
self.name = name.replace('@', self.start_time_str)
else:
self.name = "started-" + self.start_time_str
self.logger = Logger(name, level=log_level)
if filename:
logging.basicConfig(filename=filename, filemode="a")
if debug:
self.logg(level=logging.WARNING,
msg="Logger {name} begun to {filename}".format(name=name,
filename=filename))
@classmethod
def logg(cls,
level: int = logging.WARNING,
tag: str = None,
msg: str = None) -> None:
"""
Use this *class method* to send logs to the DefaultLogger instance created when this class was created
:param level:
:param msg:
:return:
"""
if nott(msg):
return
if level == logging.CRITICAL:
cls.DefaultLogger.critical(msg)
return
if level == logging.ERROR:
cls.DefaultLogger.error(msg)
return
if level == logging.WARNING:
cls.DefaultLogger.warning(msg)
return
if level == logging.INFO:
cls.DefaultLogger.info(msg)
return
if level == logging.DEBUG:
cls.DefaultLogger.debug(msg)
return
def by_level(self,
level: int = logging.WARNING,
msg: str = None):
"""
Use this *instance* version of the method for logging when you have a specific logger
customized for a purpose. Otherwise please use Logg.logg().
:param level: python logging priority
:param msg: text to send to logging channel
:return: None
"""
if nott(msg):
return
if level == logging.CRITICAL:
self.logger.critical(msg)
return
if level == logging.ERROR:
self.logger.error(msg)
return
if level == logging.WARNING:
self.logger.warning(msg)
return
if level == logging.INFO:
self.logger.info(msg)
return
if level == logging.DEBUG:
self.logger.debug(msg)
return
print("UNKNOWN: " + msg)
def error(self, message: str = None):
if not message:
return
self.logg(level=logging.ERROR, msg=message)
def warning(self, message: str = None):
if not message:
return
self.logg(level=logging.WARNING, msg=message)
def info(self, message: str = None):
if not message:
return
self.logg(level=logging.INFO, msg=message)
def debug(self, message: str = None):
if not message:
return
self.logg(level=logging.DEBUG, msg=message)
@classmethod
def register_method_name(cls, methodname: str = None) -> None:
"""
Use this method to register names of functions you want to allow logging from
:param methodname:
:return:
"""
if not methodname:
return
cls.method_name_list.append(methodname)
if methodname not in cls.tag_list:
cls.tag_list.append(methodname)
@classmethod
def register_tag(cls, tag: str = None) -> None:
"""
Use this method to register keywords you want to allow logging from.
There are a list of reserved tags which will not be accepted.
:return:
"""
if not tag:
return
if tag in cls.tag_list:
return
if tag in cls.reserved_tags:
cls.logg(level=logging.ERROR,
msg=f"tag [{tag}] is reserved, ignoring")
# note: add directly to tag_list to append a reserved tag
cls.tag_list.append(tag)
@classmethod
def by_method(cls, msg: str = None) -> None:
"""
should only log if we're in the method_list
reminder: https://stackoverflow.com/a/13514318/11014343
import inspect
import types
from typing import cast
this_fn_name = cat(types.FrameType, inspect.currentframe()).f_code.co_name
:return: None
"""
try:
caller = inspect.currentframe().f_back.f_code.co_name
if caller in cls.method_name_list:
cls.logg(level=cls.DEFAULT_LEVEL, msg=f"[{caller}] {msg}")
except Exception as e:
pprint(e)
pass
@classmethod
def by_tag(cls, tag: str = None, msg: str = None) -> None:
"""
should only log if we're in the method_list
reminder: https://stackoverflow.com/a/13514318/11014343
import inspect
import types
from typing import cast
this_fn_name = cat(types.FrameType, inspect.currentframe()).f_code.co_name
:return:
"""
if (not cls.tag_list) or (tag not in cls.tag_list):
return
cls.logg(level=cls.DEFAULT_LEVEL, msg=f"[{tag}] {msg}")
def enable(self, reserved_tag: str = None) -> None:
if (not reserved_tag) or (reserved_tag not in self.reserved_tags):
return
if reserved_tag in self.tag_list:
return
self.tag_list.append(reserved_tag)

View File

@@ -0,0 +1,20 @@
def iss(text: str) -> bool:
"""
:param text: string to test
:return: true if text is at lease one non-whitespace character
"""
if text is None:
return False
if (len(text) == 0) or (text.strip() == ""):
return False
return True
def nott(text: str) -> bool:
"""
:param text:
:return: opposite of is
"""
return not iss(text=text)

2
pipupgrade.sh Executable file
View File

@@ -0,0 +1,2 @@
#!/bin/bash
pip3 install --user -r requirements.txt --upgrade

View File

@@ -29,8 +29,8 @@ RecordInflux = InfluxRequest.RecordInflux
class CSVReader:
def read_csv(self,
file,
@staticmethod
def read_csv(file,
sep='\t'):
df = open(file).read().split('\n')
rows = list()
@@ -39,8 +39,8 @@ class CSVReader:
rows.append(x.split(sep))
return rows
def get_column(self,
df,
@staticmethod
def get_column(df,
value):
index = df[0].index(value)
values = []
@@ -48,7 +48,8 @@ class CSVReader:
values.append(row[index])
return values
def get_columns(self, df, targets):
@staticmethod
def get_columns(df, targets):
target_index = []
for item in targets:
target_index.append(df[0].index(item))
@@ -60,7 +61,8 @@ class CSVReader:
results.append(row_data)
return results
def to_html(self, df):
@staticmethod
def to_html(df):
html = ''
html = html + ('<table style="border:1px solid #ddd">'
'<colgroup>'
@@ -78,7 +80,8 @@ class CSVReader:
'</table>')
return html
def filter_df(self, df, column, expression, target):
@staticmethod
def filter_df(df, column, expression, target):
target_index = df[0].index(column)
counter = 0
targets = [0]
@@ -98,7 +101,8 @@ class CSVReader:
counter += 1
return list(map(df.__getitem__, targets))
def concat(self, dfs):
@staticmethod
def concat(dfs):
return list(itertools.chain.from_iterable(dfs))
@@ -204,7 +208,6 @@ class GhostRequest:
def custom_post(self,
folder,
authors,
title='custom'):
self.upload_images(folder)
head = '''This is a custom post created via a script'''
@@ -215,11 +218,9 @@ class GhostRequest:
text=head)
def kpi_to_ghost(self,
authors,
folders,
parent_folder=None,
title=None,
server_pull=None,
ghost_host=None,
port=22,
user_push=None,
@@ -227,13 +228,12 @@ class GhostRequest:
customer=None,
testbed=None,
test_run=None,
target_folders=list(),
target_folders=None,
grafana_token=None,
grafana_host=None,
grafana_port=3000,
grafana_datasource='InfluxDB',
grafana_bucket=None):
global dut_hw, dut_sw, dut_model, dut_serial
now = datetime.now()
@@ -502,7 +502,8 @@ class GhostRequest:
Influx Host: %s<br />
Influx Port: %s<br />
Influx Organization: %s<br />
Influx Bucket: %s<br />''' % (influx_error, self.influx_host, self.influx_port, self.influx_org, self.influx_bucket)
Influx Bucket: %s<br />''' % (
influx_error, self.influx_host, self.influx_port, self.influx_org, self.influx_bucket)
raw_test_tags = list()
test_tag_table = ''
@@ -524,8 +525,8 @@ class GhostRequest:
else:
column_name = column
dut_table_columns += (
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td><td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' % (
column_name, duts[column])
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td><td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' %
(column_name, duts[column])
)
dut_table = '<table width="700px" border="1" cellpadding="2" cellspacing="0" ' \

View File

@@ -3,9 +3,10 @@
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Class holds default settings for json requests -
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
import logging
import sys
import os
import importlib
from pprint import pformat, PrettyPrinter
import urllib
from urllib import request
import json
@@ -14,10 +15,9 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
debug_printer = PrettyPrinter(indent=2)
class LFRequest:
@@ -27,6 +27,7 @@ class LFRequest:
post_data = No_Data
default_headers = {'Accept': 'application/json'}
proxies = None
logger = logging.getLogger(__name__)
def __init__(self, url=None,
uri=None,
@@ -62,7 +63,7 @@ class LFRequest:
# pprint.pprint(self.proxies)
if not url.startswith("http://") and not url.startswith("https://"):
print("No http:// or https:// found, prepending http:// to "+url)
self.logger.warning("No http:// or https:// found, prepending http:// to " + url)
url = "http://" + url
if uri is not None:
if not url.endswith('/') and not uri.startswith('/'):
@@ -76,16 +77,16 @@ class LFRequest:
if self.requested_url.find('//'):
protopos = self.requested_url.find("://")
self.requested_url = self.requested_url[:protopos + 2] + self.requested_url[protopos + 2:].replace("//", "/")
self.requested_url = self.requested_url[:protopos + 2] + self.requested_url[protopos + 2:].replace("//",
"/")
# finding '#' prolly indicates a macvlan (eth1#0)
# finding ' ' prolly indicates a field name that should imply %20
if (self.requested_url.find('#') >= 1):
if self.requested_url.find('#') >= 1:
self.requested_url = self.requested_url.replace('#', '%23')
if (self.requested_url.find(' ') >= 1):
if self.requested_url.find(' ') >= 1:
self.requested_url = self.requested_url.replace(' ', '+')
if self.debug:
print("new LFRequest[%s]" % self.requested_url )
self.logger.debug("new LFRequest[%s]" % self.requested_url)
# request first url on stack
def formPost(self, show_error=True, debug=False, die_on_error_=False):
@@ -94,7 +95,7 @@ class LFRequest:
def form_post(self, show_error=True, debug=False, die_on_error_=False):
if self.die_on_error:
die_on_error_ = True
if (debug == False) and (self.debug == True):
if not debug and self.debug:
debug = True
responses = []
urlenc_data = ""
@@ -104,20 +105,17 @@ class LFRequest:
opener = request.build_opener(request.ProxyHandler(self.proxies))
request.install_opener(opener)
if (debug):
print("formPost: url: "+self.requested_url)
if ((self.post_data != None) and (self.post_data is not self.No_Data)):
self.logger.debug("formPost: url: " + self.requested_url)
if (self.post_data is not None) and (self.post_data is not self.No_Data):
urlenc_data = urllib.parse.urlencode(self.post_data).encode("utf-8")
if (debug):
print("formPost: data looks like:" + str(urlenc_data))
print("formPost: url: "+self.requested_url)
self.logger.debug("formPost: data looks like:" + str(urlenc_data))
self.logger.debug("formPost: url: " + self.requested_url)
myrequest = request.Request(url=self.requested_url,
data=urlenc_data,
headers=self.default_headers)
else:
myrequest = request.Request(url=self.requested_url, headers=self.default_headers)
print("No data for this formPost?")
self.logger.error("No data for this formPost?")
myrequest.headers['Content-type'] = 'application/x-www-form-urlencoded'
@@ -143,15 +141,16 @@ class LFRequest:
error_list_=self.error_list,
debug_=debug)
if (die_on_error_ == True) or (self.die_on_error == True):
if die_on_error_ or self.die_on_error:
exit(1)
return None
def jsonPost(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None):
return self.json_post(show_error=show_error, debug=debug, die_on_error_=die_on_error_, response_json_list_=response_json_list_)
return self.json_post(show_error=show_error, debug=debug, die_on_error_=die_on_error_,
response_json_list_=response_json_list_)
def json_post(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None, method_='POST'):
if (debug == False) and (self.debug == True):
if not debug and self.debug:
debug = True
if self.die_on_error:
die_on_error_ = True
@@ -160,14 +159,14 @@ class LFRequest:
opener = urllib.request.build_opener(request.ProxyHandler(self.proxies))
urllib.request.install_opener(opener)
if ((self.post_data != None) and (self.post_data is not self.No_Data)):
if (self.post_data is not None) and (self.post_data is not self.No_Data):
myrequest = request.Request(url=self.requested_url,
method=method_,
data=json.dumps(self.post_data).encode("utf-8"),
headers=self.default_headers)
else:
myrequest = request.Request(url=self.requested_url, headers=self.default_headers)
print("No data for this jsonPost?")
self.logger.error("No data for this jsonPost?")
myrequest.headers['Content-type'] = 'application/json'
@@ -176,23 +175,24 @@ class LFRequest:
try:
resp = urllib.request.urlopen(myrequest)
resp_data = resp.read().decode('utf-8')
if (debug and die_on_error_):
print("----- LFRequest::json_post:128 debug: --------------------------------------------")
print("URL: %s :%d "% (self.requested_url, resp.status))
if debug and die_on_error_:
self.logger.debug("----- LFRequest::json_post:128 debug: --------------------------------------------")
self.logger.debug("URL: <%s> status: %d " % (self.requested_url, resp.status))
if resp.status != 200:
LFUtils.debug_printer.pprint(resp.getheaders())
print("----- resp_data:128 -------------------------------------------------")
print(resp_data)
print("-------------------------------------------------")
self.logger.debug(pformat(resp.getheaders()))
self.logger.debug("----- resp_data:128 -------------------------------------------------")
self.logger.debug(resp_data)
self.logger.debug("-------------------------------------------------")
responses.append(resp)
if response_json_list_ is not None:
if type(response_json_list_) is not list:
raise ValueError("reponse_json_list_ needs to be type list")
j = json.loads(resp_data)
if debug:
print("----- LFRequest::json_post:140 debug: --------------------------------------------")
LFUtils.debug_printer.pprint(j)
print("-------------------------------------------------")
self.logger.debug(
"----- LFRequest::json_post:140 debug: --------------------------------------------")
self.logger.debug(pformat(j))
self.logger.debug("-------------------------------------------------")
response_json_list_.append(j)
return responses[0]
@@ -210,7 +210,7 @@ class LFRequest:
error_=uerror,
debug_=debug)
if die_on_error_ == True:
if die_on_error_:
exit(1)
return None
@@ -222,17 +222,11 @@ class LFRequest:
method_='PUT')
def json_delete(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None):
return self.get_as_json(debug_=debug,
die_on_error_=die_on_error_,
method_='DELETE')
return self.get_as_json(method_='DELETE')
def get(self, debug=False, die_on_error_=False, method_='GET'):
if self.debug == True:
debug = True
if self.die_on_error == True:
die_on_error_ = True
if debug:
print("LFUtils.get: url: "+self.requested_url)
def get(self, method_='GET'):
if self.debug:
self.logger.debug("LFUtils.get: url: " + self.requested_url)
# https://stackoverflow.com/a/59635684/11014343
if (self.proxies is not None) and (len(self.proxies) > 0):
@@ -254,7 +248,7 @@ class LFRequest:
responses_=myresponses,
error_=error,
error_list_=self.error_list,
debug_=debug)
debug_=self.debug)
except urllib.error.URLError as uerror:
print_diagnostics(url_=self.requested_url,
@@ -262,26 +256,24 @@ class LFRequest:
responses_=myresponses,
error_=uerror,
error_list_=self.error_list,
debug_=debug)
debug_=self.debug)
if die_on_error_ == True:
if self.die_on_error:
exit(1)
return None
def getAsJson(self, die_on_error_=False, debug_=False):
return self.get_as_json(die_on_error_=die_on_error_, debug_=debug_)
def getAsJson(self):
return self.get_as_json()
def get_as_json(self, die_on_error_=False, debug_=False, method_='GET'):
responses = []
j = self.get(debug=debug_, die_on_error_=die_on_error_, method_=method_)
responses.append(j)
def get_as_json(self, method_='GET'):
responses = list()
responses.append(self.get(method_=method_))
if len(responses) < 1:
if debug_ and self.has_errors():
if self.debug and self.has_errors():
self.print_errors()
return None
if responses[0] == None:
if debug_:
print("No response from "+self.requested_url)
if responses[0] is None:
self.logger.debug("No response from " + self.requested_url)
return None
json_data = json.loads(responses[0].read().decode('utf-8'))
return json_data
@@ -302,10 +294,11 @@ class LFRequest:
def print_errors(self):
if not self.has_errors:
print("---------- no errors ----------")
self.logger.debug("---------- no errors ----------")
return
for err in self.error_list:
print("error: %s" % err)
self.logger.error("error: %s" % err)
def plain_get(url_=None, debug_=False, die_on_error_=False, proxies_=None):
"""
@@ -340,25 +333,25 @@ def plain_get(url_=None, debug_=False, die_on_error_=False, proxies_=None):
error_=uerror,
debug_=debug_)
if die_on_error_ == True:
if die_on_error_:
exit(1)
return None
def print_diagnostics(url_=None, request_=None, responses_=None, error_=None, error_list_=None, debug_=False):
if debug_:
print("LFRequest::print_diagnostics: error_.__class__: %s"%error_.__class__)
LFUtils.debug_printer.pprint(error_)
logger = logging.getLogger(__name__)
# logger.error("LFRequest::print_diagnostics: error_.__class__: %s"%error_.__class__)
# logger.error(pformat(error_))
if url_ is None:
print("WARNING LFRequest::print_diagnostics: url_ is None")
logger.warning("WARNING LFRequest::print_diagnostics: url_ is None")
if request_ is None:
print("WARNING LFRequest::print_diagnostics: request_ is None")
logger.warning("WARNING LFRequest::print_diagnostics: request_ is None")
if error_ is None:
print("WARNING LFRequest::print_diagnostics: error_ is None")
logger.warning("WARNING LFRequest::print_diagnostics: error_ is None")
method = 'NA'
if (hasattr(request_, 'method')):
if hasattr(request_, 'method'):
method = request_.method
err_code = 0
err_reason = 'NA'
@@ -376,52 +369,52 @@ def print_diagnostics(url_=None, request_=None, responses_=None, error_=None, er
if err_code == 404:
xerrors.append("[%s HTTP %s] <%s> : %s" % (method, err_code, err_full_url, err_reason))
else:
if (len(err_headers) > 0):
if len(err_headers) > 0:
for headername in sorted(err_headers.keys()):
if headername.startswith("X-Error-"):
xerrors.append("%s: %s" % (headername, err_headers.get(headername)))
if len(xerrors) > 0:
print(" = = LANforge Error Messages = =")
logger.error(" = = LANforge Error Messages = =")
logger.error(" = = URL: %s" % err_full_url)
for xerr in xerrors:
print(xerr)
logger.error(xerr)
if (error_list_ is not None) and isinstance(error_list_, list):
error_list_.append(xerr)
print(" = = = = = = = = = = = = = = = =")
logger.error(" = = = = = = = = = = = = = = = =")
if (error_.__class__ is urllib.error.HTTPError):
if debug_:
print("----- LFRequest: HTTPError: --------------------------------------------")
print("%s <%s> HTTP %s: %s" % (method, err_full_url, err_code, err_reason))
if error_.__class__ is urllib.error.HTTPError:
logger.debug("----- LFRequest: HTTPError: --------------------------------------------")
logger.debug("%s <%s> HTTP %s: %s" % (method, err_full_url, err_code, err_reason))
if err_code == 404:
if (error_list_ is not None) and isinstance(error_list_, list):
error_list_.append("[%s HTTP %s] <%s> : %s" % (method, err_code, err_full_url, err_reason))
else:
if debug_:
print(" Content-type:[%s] Accept[%s]" % (request_.get_header('Content-type'), request_.get_header('Accept')))
logger.debug(
" Content-type:[%s] Accept[%s]" % (request_.get_header('Content-type'), request_.get_header('Accept')))
if hasattr(request_, "data") and (request_.data is not None):
print(" Data:")
LFUtils.debug_printer.pprint(request_.data)
logger.debug(" Data:")
logger.debug(debug_printer.pformat(request_.data))
elif debug_:
print(" <no request data>")
logger.debug(" <no request data>")
if debug_ and (len(err_headers) > 0):
if len(err_headers) > 0:
# the HTTPError is of type HTTPMessage a subclass of email.message
print(" Response Headers: ")
logger.debug(" Response Headers: ")
for headername in sorted(err_headers.keys()):
print(" %s: %s" % (headername, err_headers.get(headername)))
logger.debug(" %s: %s" % (headername, err_headers.get(headername)))
if len(responses_) > 0:
print("----- Response: --------------------------------------------------------")
LFUtils.debug_printer.pprint(responses_[0].reason)
if debug_:
print("------------------------------------------------------------------------")
logger.debug("----- Response: --------------------------------------------------------")
logger.debug(debug_printer.pformat(responses_[0].reason))
logger.debug("------------------------------------------------------------------------")
return
if (error_.__class__ is urllib.error.URLError):
print("----- LFRequest: URLError: ---------------------------------------------")
print("%s <%s> HTTP %s: %s" % (method, err_full_url, err_code, err_reason))
print("------------------------------------------------------------------------")
if error_.__class__ is urllib.error.URLError:
logger.error("----- LFRequest: URLError: ---------------------------------------------")
logger.error("%s <%s> HTTP %s: %s" % (method, err_full_url, err_code, err_reason))
logger.error("------------------------------------------------------------------------")
# ~LFRequest

View File

@@ -12,15 +12,16 @@ from time import sleep
from random import seed, randint
import re
import ipaddress
import logging
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
Logg = importlib.import_module("lanforge_client.logg") # .Logg
debug_printer = pprint.PrettyPrinter(indent=2)
@@ -30,6 +31,7 @@ ADD_STA_FLAGS_DOWN_WPA2 = 68719477760
REPORT_TIMER_MS_FAST = 1500
REPORT_TIMER_MS_SLOW = 3000
# Used for Speed
def parse_size_bps(size_val):
if isinstance(size_val, str):
@@ -52,6 +54,7 @@ def parse_size_bps(size_val):
else:
return size_val
# Used for Size of file
def parse_size(size_val):
if isinstance(size_val, str):
@@ -80,22 +83,14 @@ class PortEID:
port_id = 0
port_name = ""
def __init__(self, p_resource=1, p_port_id=0, p_port_name=""):
resource = p_resource
port_id = p_port_id
port_name = p_port_name
def __init__(self, json_response):
if json_response == None:
if json_response is None:
raise Exception("No json input")
json_s = json_response
if json_response['interface'] != None:
if json_response['interface'] is not None:
json_s = json_response['interface']
debug_printer(json_s)
resource = json_s['resource']
port_id = json_s['id']
port_name = json_s['name']
# end class PortEID
@@ -103,6 +98,7 @@ class PortEID:
def staNewDownStaRequest(sta_name, resource_id=1, radio="wiphy0", ssid="", passphrase="", debug_on=False):
return sta_new_down_sta_request(sta_name, resource_id, radio, ssid, passphrase, debug_on)
def sta_new_down_sta_request(sta_name, resource_id=1, radio="wiphy0", ssid="", passphrase="", debug_on=False):
"""
For use with add_sta. If you don't want to generate mac addresses via patterns (xx:xx:xx:xx:81:*)
@@ -132,6 +128,7 @@ def sta_new_down_sta_request(sta_name, resource_id=1, radio="wiphy0", ssid="", p
def portSetDhcpDownRequest(resource_id, port_name, debug_on=False):
return port_set_dhcp_down_request(resource_id, port_name, debug_on)
def port_set_dhcp_down_request(resource_id, port_name, debug_on=False):
"""
See http://localhost:8080/help/set_port
@@ -156,6 +153,7 @@ def port_set_dhcp_down_request(resource_id, port_name, debug_on=False):
def portDhcpUpRequest(resource_id, port_name, debug_on=False):
return port_dhcp_up_request(resource_id, port_name, debug_on)
def port_dhcp_up_request(resource_id, port_name, debug_on=False):
"""
See http://localhost:8080/help/set_port
@@ -181,6 +179,7 @@ def port_dhcp_up_request(resource_id, port_name, debug_on=False):
def portUpRequest(resource_id, port_name, debug_on=False):
return port_up_request(resource_id, port_name, debug_on)
def port_up_request(resource_id, port_name, debug_on=False):
"""
See http://localhost:8080/help/set_port
@@ -201,9 +200,11 @@ def port_up_request(resource_id, port_name, debug_on=False):
debug_printer.pprint(data)
return data
def portDownRequest(resource_id, port_name, debug_on=False):
return port_down_request(resource_id, port_name, debug_on)
def port_down_request(resource_id, port_name, debug_on=False):
"""
Does not change the use_dhcp flag
@@ -226,6 +227,7 @@ def port_down_request(resource_id, port_name, debug_on=False):
debug_printer.pprint(data)
return data
def port_reset_request(resource_id, port_name, debug_on=False):
"""
Does not change the use_dhcp flag
@@ -249,6 +251,7 @@ def port_reset_request(resource_id, port_name, debug_on=False):
def generateMac(parent_mac, random_octet, debug=False):
return generate_mac(parent_mac=parent_mac, random_octet=random_octet, debug=debug)
def generate_mac(parent_mac, random_octet, debug=False):
if debug:
print("************ random_octet: %s **************" % (random_octet))
@@ -272,7 +275,8 @@ def portNameSeries(prefix_="sta", start_id_=0, end_id_=1, padding_number_=10000,
:param padding_number_:
:return:
"""
return port_name_series(prefix=prefix_, start_id=start_id_, end_id=end_id_, padding_number=padding_number_, radio=radio)
return port_name_series(prefix=prefix_, start_id=start_id_, end_id=end_id_, padding_number=padding_number_,
radio=radio)
def port_name_series(prefix="sta", start_id=0, end_id=1, padding_number=10000, radio=None):
@@ -312,9 +316,11 @@ def gen_ip_series(ip_addr, netmask, num_ips=None):
chosen_ips.append(ip_list[i])
return chosen_ips
def generateRandomHex():
return generate_random_hex()
# generate random hex if you need it for mac addresses
def generate_random_hex():
# generate a few random numbers and convert them into hex:
@@ -370,6 +376,7 @@ def port_list_to_alias_map(json_list, debug_=False):
return reverse_map
def list_to_alias_map(json_list=None, from_element=None, debug_=False):
reverse_map = {}
if (json_list is None) or (len(json_list) < 1):
@@ -419,9 +426,9 @@ def find_port_eids(resource_id=1, base_url="http://localhost:8080", port_names=(
port_url = "/port/1"
for port_name in port_names:
uri = "%s/%s/%s" % (port_url, resource_id, port_name)
lf_r = LFRequest.LFRequest(base_url, uri)
lf_r = LFRequest.LFRequest(base_url, uri, debug_=debug)
try:
response = lf_r.getAsJson(debug)
response = lf_r.getAsJson()
if response is None:
continue
port_eids.append(PortEID(response))
@@ -443,9 +450,9 @@ def wait_until_ports_admin_down(resource_id=1, base_url="http://localhost:8080",
up_stations = []
for port_name in port_list:
uri = "%s/%s/%s?fields=device,down" % (port_url, resource_id, port_name)
lf_r = LFRequest.LFRequest(base_url, uri)
json_response = lf_r.getAsJson(debug_=False)
if json_response == None:
lf_r = LFRequest.LFRequest(base_url, uri, debug_=debug_)
json_response = lf_r.getAsJson()
if json_response is None:
if debug_:
print("port %s disappeared" % port_name)
continue
@@ -460,6 +467,7 @@ def wait_until_ports_admin_down(resource_id=1, base_url="http://localhost:8080",
def waitUntilPortsAdminUp(resource_id=1, base_url="http://localhost:8080", port_list=()):
return wait_until_ports_admin_up(resource_id=resource_id, base_url=base_url, port_list=port_list)
def wait_until_ports_admin_up(resource_id=1, base_url="http://localhost:8080", port_list=(), debug_=False):
print("Waiting until ports appear admin-up...")
down_stations = port_list.copy()
@@ -470,9 +478,9 @@ def wait_until_ports_admin_up(resource_id=1, base_url="http://localhost:8080", p
down_stations = []
for port_name in port_list:
uri = "%s/%s/%s?fields=device,down" % (port_url, resource_id, port_name)
lf_r = LFRequest.LFRequest(base_url, uri)
json_response = lf_r.getAsJson(debug_=False)
if json_response == None:
lf_r = LFRequest.LFRequest(base_url, uri, debug_=debug_)
json_response = lf_r.getAsJson()
if json_response is None:
if debug_:
print("port %s appeared" % port_name)
continue
@@ -483,9 +491,11 @@ def wait_until_ports_admin_up(resource_id=1, base_url="http://localhost:8080", p
sleep(1)
return None
def waitUntilPortsDisappear(base_url="http://localhost:8080", port_list=(), debug=False):
wait_until_ports_disappear(base_url, port_list, debug)
def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=(), debug=False):
if (port_list is None) or (len(port_list) < 1):
if debug:
@@ -511,7 +521,8 @@ def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=(), d
temp_names_by_resource[resource_id] = []
port_name = eid[2]
temp_names_by_resource[resource_id].append(port_name)
temp_query_by_resource[resource_id] = "%s/%s/%s?fields=alias" % (url, resource_id, ",".join(temp_names_by_resource[resource_id]))
temp_query_by_resource[resource_id] = "%s/%s/%s?fields=alias" % (
url, resource_id, ",".join(temp_names_by_resource[resource_id]))
if debug:
pprint.pprint(("temp_query_by_resource", temp_query_by_resource))
while len(found_stations) > 0:
@@ -523,8 +534,8 @@ def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=(), d
("check_url", check_url),
])
lf_r = LFRequest.LFRequest(base_url, check_url, debug_=debug)
json_response = lf_r.get_as_json(debug_=debug, die_on_error_=False)
if (json_response == None):
json_response = lf_r.get_as_json()
if json_response is None:
print("LFUtils::wait_until_ports_disappear:: Request returned None: [{}]".format(base_url + check_url))
else:
if debug:
@@ -554,15 +565,17 @@ def waitUntilPortsAppear(base_url="http://localhost:8080", port_list=(), debug=F
"""
return wait_until_ports_appear(base_url, port_list, debug=debug)
def name_to_eid(input, non_port=False):
def name_to_eid(eid_input, non_port=False):
rv = [1, 1, "", ""]
info = []
if (input is None) or (input == ""):
raise ValueError("name_to_eid wants eid like 1.1.sta0 but given[%s]" % input)
if type(input) is not str:
raise ValueError("name_to_eid wants string formatted like '1.2.name', not a tuple or list or [%s]" % type(input))
if (eid_input is None) or (eid_input == ""):
raise ValueError("name_to_eid wants eid like 1.1.sta0 but given[%s]" % eid_input)
if type(eid_input) is not str:
raise ValueError(
"name_to_eid wants string formatted like '1.2.name', not a tuple or list or [%s]" % type(eid_input))
info = input.split('.')
info = eid_input.split('.')
if len(info) == 1:
rv[2] = info[0] # just port name
return rv
@@ -603,6 +616,7 @@ def name_to_eid(input, non_port=False):
return rv
def wait_until_ports_appear(base_url="http://localhost:8080", port_list=(), debug=False):
"""
Use this method to pause until the LANforge system has caught up and implemented the
@@ -633,21 +647,22 @@ def wait_until_ports_appear(base_url="http://localhost:8080", port_list=(), debu
port_name = eid[2]
# print("waiting for sta sta "+port_eid)
uri = "%s/%s/%s" % (port_url, resource_id, port_name)
lf_r = LFRequest.LFRequest(base_url, uri)
json_response = lf_r.getAsJson(debug_=False)
if (json_response != None):
lf_r = LFRequest.LFRequest(base_url, uri, debug_=debug)
json_response = lf_r.getAsJson()
if json_response is not None:
found_stations.append(port_name)
else:
lf_r = LFRequest.LFRequest(base_url, ncshow_url)
lf_r = LFRequest.LFRequest(base_url, ncshow_url, debug_=debug)
lf_r.addPostData({"shelf": shelf, "resource": resource_id, "port": port_name, "probe_flags": 5})
lf_r.jsonPost()
if (len(found_stations) < len(port_list)):
if len(found_stations) < len(port_list):
sleep(2)
if debug:
print("These stations appeared: " + ", ".join(found_stations))
return
def wait_until_endps(base_url="http://localhost:8080", endp_list=(), debug=False):
"""
@@ -674,12 +689,12 @@ def wait_until_endps(base_url="http://localhost:8080", endp_list=(), debug=False
port_name = eid[2]
uri = "%s/%s/%s" % (port_url, resource_id, port_name)
lf_r = LFRequest.LFRequest(base_url, uri)
json_response = lf_r.getAsJson(debug_=False)
if (json_response != None):
lf_r = LFRequest.LFRequest(base_url, uri, debug_=debug)
json_response = lf_r.getAsJson()
if json_response is not None:
found_stations.append(port_name)
else:
lf_r = LFRequest.LFRequest(base_url, ncshow_url)
lf_r = LFRequest.LFRequest(base_url, ncshow_url, debug_=debug)
lf_r.addPostData({"shelf": shelf, "resource": resource_id, "port": port_name, "flags": 1})
lf_r.formPost()
if (len(found_stations) < len(endp_list)):
@@ -698,7 +713,7 @@ def remove_port(resource, port_name, baseurl="http://localhost:8080/", debug=Fal
if debug:
print("Removing port %d.%s" % (resource, port_name))
url = "/cli-json/rm_vlan"
lf_r = LFRequest.LFRequest(baseurl, url)
lf_r = LFRequest.LFRequest(baseurl, url, debug_=debug)
lf_r.addPostData({
"shelf": 1,
"resource": resource,
@@ -720,7 +735,7 @@ def remove_cx(baseurl, cx_names, debug=False):
"test_mgr": "all",
"cx_name": name
}
lf_r = LFRequest.LFRequest(baseurl, url)
lf_r = LFRequest.LFRequest(baseurl, url, debug_=debug)
lf_r.addPostData(data)
lf_r.jsonPost(debug)
@@ -733,7 +748,7 @@ def remove_endps(baseurl, endp_names, debug=False):
if debug:
print("Removing endp %s" % ", ".join(endp_names))
url = "/cli-json/rm_endp"
lf_r = LFRequest.LFRequest(baseurl, url)
lf_r = LFRequest.LFRequest(baseurl, url, debug_=debug)
for name in endp_names:
data = {
"endp_name": name

View File

@@ -10,24 +10,33 @@ import random
import string
import datetime
import argparse
import re
import logging
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
debug_printer = pprint.PrettyPrinter(indent=2)
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
Logg = importlib.import_module("lanforge_client.logg")
if os.environ.get("LF_USE_AUTOGEN") == 1:
lanforge_api = importlib.import_module("lanforge_client.lanforge_api")
LFSession = lanforge_api.LFSession
class LFCliBase:
SHOULD_RUN = 0 # indicates normal operation
SHOULD_QUIT = 1 # indicates to quit loops, close files, send SIGQUIT to threads and return
SHOULD_HALT = 2 # indicates to quit loops, send SIGABRT to threads and exit
# - LOGGING -
_logger = logging.getLogger(__name__)
# do not use `super(LFCLiBase,self).__init__(self, host, port, _debug)
# that is py2 era syntax and will force self into the host variable, making you
# very confused.
@@ -37,7 +46,9 @@ class LFCliBase:
_exit_on_fail=False,
_local_realm=None,
_proxy_str=None,
_capture_signal_list=[]):
_capture_signal_list=None):
if _capture_signal_list is None:
_capture_signal_list = []
self.fail_pref = "FAILED: "
self.pass_pref = "PASSED: "
self.lfclient_host = _lfjson_host
@@ -48,7 +59,7 @@ class LFCliBase:
self.proxy = {}
self.adjust_proxy(_proxy_str)
if (_local_realm is not None):
if _local_realm:
self.local_realm = _local_realm
# if (_debug):
@@ -69,7 +80,7 @@ class LFCliBase:
if len(_capture_signal_list) > 0:
for zignal in _capture_signal_list:
self.captured_signal(zignal, self.my_captured_signal)
self.captured_signal(zignal)
#
def _finish(self):
@@ -136,7 +147,8 @@ class LFCliBase:
print("sending signal %s to thread %s" % (signum, name))
# do a thing
def my_captured_signal(self, signum):
@staticmethod
def my_captured_signal(signum):
"""
Override me to process signals, otherwise superclass signal handler is called.
You may use _finish() or _halt() to indicate finishing soon or halting immediately.
@@ -164,6 +176,42 @@ class LFCliBase:
def clear_test_results(self):
self.test_results.clear()
# - LOGGING - we want to remove old logging code
def log_register_method_name(self, method_name=None):
if not method_name:
return
if os.environ.get("LF_USE_AUTOGEN") == 1:
Logg.register_method_name(method_name=method_name)
else:
if method_name not in self._method_name_list:
self._method_name_list.append(method_name)
if method_name not in self._tag_list:
self._tag_list.append(method_name)
def log_register_tag(self, tag=None):
if not tag:
return
if os.environ.get("LF_USE_AUTOGEN") == 1:
Logg.register_tag(tag=tag)
else:
if tag not in self._tag_list:
self._tag_list.append(tag)
self._logger.register_method_name(tag=tag)
def log_enable(self, reserved_tag=None):
if os.environ.get("LF_USE_AUTOGEN") == 1:
Logg.enable(reserved_tag=reserved_tag)
else:
self.log_register_tag(reserved_tag)
@staticmethod
def log_set_filename(filename=None):
if not filename:
return
logging.basicConfig(filename=filename)
# - END LOGGING -
def json_post(self, _req_url, _data, debug_=False, suppress_related_commands_=None, response_json_list_=None):
"""
send json to the LANforge client
@@ -191,7 +239,7 @@ class LFCliBase:
del _data['suppress_postexec_cli']
if 'suppress_postexec_method' in _data:
del _data['suppress_postexec_method']
elif suppress_related_commands_ == False:
elif not suppress_related_commands_:
_data['suppress_preexec_cli'] = False
_data['suppress_preexec_method'] = False
_data['suppress_postexec_cli'] = False
@@ -204,7 +252,7 @@ class LFCliBase:
lf_r.addPostData(_data)
if debug_:
LFUtils.debug_printer.pprint(_data)
debug_printer.pprint(_data)
json_response = lf_r.json_post(show_error=debug_,
debug=debug_,
response_json_list_=response_json_list_,
@@ -242,7 +290,7 @@ class LFCliBase:
die_on_error_=self.exit_on_error)
lf_r.addPostData(_data)
if debug_:
LFUtils.debug_printer.pprint(_data)
debug_printer.pprint(_data)
json_response = lf_r.json_put(show_error=self.debug,
debug=debug_,
response_json_list_=response_json_list_,
@@ -259,23 +307,22 @@ class LFCliBase:
exit(1)
return json_response
def json_get(self, _req_url, debug_=False):
debug_ |= self.debug
def json_get(self, _req_url, debug_=None):
# if debug_:
# print("json_get: "+_req_url)
# print("json_get: proxies:")
# pprint.pprint(self.proxy)
if debug_ is None:
debug_ = self.debug
json_response = None
# print("----- GET ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ")
try:
lf_r = LFRequest.LFRequest(url=self.lfclient_url,
uri=_req_url,
proxies_=self.proxy,
debug_=debug_,
die_on_error_=self.exit_on_error)
json_response = lf_r.get_as_json(debug_=debug_, die_on_error_=False)
#debug_printer.pprint(json_response)
if (json_response is None):
json_response = lf_r.get_as_json()
if json_response is None:
if debug_:
if hasattr(lf_r, 'print_errors'):
lf_r.print_errors()
@@ -352,7 +399,8 @@ class LFCliBase:
return reverse_map
def error(self, exception):
@staticmethod
def error(exception):
# print("lfcli_base error: %s" % exception)
pprint.pprint(exception)
traceback.print_exception(Exception, exception, exception.__traceback__, chain=True)
@@ -465,26 +513,34 @@ class LFCliBase:
# print("lfclibase::self.proxy: ")
# pprint.pprint(self.proxy)
def logg2(self, level="debug", mesg=None):
@staticmethod
def logg2(level="debug", mesg=None):
if (mesg is None) or (mesg == ""):
return
print("[{level}]: {msg}".format(level=level, msg=mesg))
def logg(self,
level=None,
@staticmethod
def logg(level=None,
mesg=None,
filename=None,
scriptname=None):
"""
This method is used by vr_profile2, lf_create_bcast, and shadowed by base_profile.py
:param level:
:param mesg:
:param filename:
:param scriptname:
:return:
"""
if (mesg is None) or (mesg == "") or (level is None):
return
userhome = os.path.expanduser('~')
session = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-h-%M-m-%S-s")).replace(':', '-')
if filename == None:
try:
if filename is None:
if not os.path.isdir("%s/report-data/%s" % (userhome, session)):
if not os.path.isdir('%s/report-data' % userhome):
os.mkdir('%s/report-data' % userhome)
os.mkdir("%s/report-data/%s" % (userhome, session))
except:
pass
filename = ("%s/report-data/%s/%s.log" % (userhome, session, scriptname))
import logging
logging.basicConfig(filename=filename, level=logging.DEBUG)
@@ -547,10 +603,20 @@ class LFCliBase:
parser = argparse.ArgumentParser()
optional = parser.add_argument_group('optional arguments')
required = parser.add_argument_group('required arguments')
optional.add_argument('--mgr', help='hostname for where LANforge GUI is running', default='localhost')
optional.add_argument('--mgr_port', help='port LANforge GUI HTTP service is running on', default=8080)
optional.add_argument('--debug', '-d', help='Enable debugging', default=False, action="store_true")
optional.add_argument('--proxy', nargs='?', default=None, # action=ProxyAction,
optional.add_argument('--mgr',
default='localhost',
help='hostname for where LANforge GUI is running')
optional.add_argument('--mgr_port',
default=8080,
help='port LANforge GUI HTTP service is running on')
optional.add_argument('--debug',
'-d',
default=False,
action="store_true",
help='Enable debugging')
optional.add_argument('--proxy',
nargs='?',
default=None, # action=ProxyAction,
help='Connection proxy like http://proxy.localnet:80 or https://user:pass@proxy.localnet:3128')
return parser
@@ -575,35 +641,73 @@ class LFCliBase:
required = parser.add_argument_group('required arguments')
# Optional Args
optional.add_argument('--mgr', help='hostname for where LANforge GUI is running', default='localhost')
optional.add_argument('--mgr_port', help='port LANforge GUI HTTP service is running on', default=8080)
optional.add_argument('-u', '--upstream_port',
help='non-station port that generates traffic: <resource>.<port>, e.g: 1.eth1',
default='1.eth1')
optional.add_argument('--num_stations', help='Number of stations to create', default=0)
optional.add_argument('--test_id', help='Test ID (intended to use for ws events)', default="webconsole")
optional.add_argument('--debug', help='Enable debugging', default=False, action="store_true")
optional.add_argument('--proxy', nargs='?', default=None,
help='Connection proxy like http://proxy.localnet:80 or https://user:pass@proxy.localnet:3128')
optional.add_argument('--mgr',
default='localhost',
help='hostname for where LANforge GUI is running')
optional.add_argument('--mgr_port',
default=8080,
help='port LANforge GUI HTTP service is running on')
optional.add_argument('-u',
'--upstream_port',
default='1.eth1',
help='non-station port that generates traffic: <resource>.<port>, e.g: 1.eth1')
optional.add_argument('--num_stations',
type=int,
default=0,
help='Number of stations to create')
optional.add_argument('--test_id',
default="webconsole",
help='Test ID (intended to use for ws events)')
optional.add_argument('-d',
'--debug',
action="store_true",
help='Enable debugging')
optional.add_argument('--proxy',
nargs='?',
default=None,
help="Connection proxy like http://proxy.localnet:80 \n"
+ " or https://user:pass@proxy.localnet:3128")
optional.add_argument('--debugging',
nargs="+",
action="append",
help="Indicate what areas you would like express debug output:\n"
+ " - digest - print terse indications of lanforge_api calls\n"
+ " - json - print url and json data\n"
+ " - http - print HTTP headers\n"
+ " - gui - ask the GUI for extra debugging in responses\n"
+ " - method:method_name - enable by_method() debugging (if present)\n"
+ " - tag:tagname - enable matching by_tag() debug output\n"
)
optional.add_argument('--debug_log',
default=None,
help="Specify a file to send debug output to")
if more_optional is not None:
for x in more_optional:
if 'default' in x.keys():
optional.add_argument(x['name'], help=x['help'], default=x['default'])
for argument in more_optional:
if 'default' in argument.keys():
optional.add_argument(argument['name'], help=argument['help'], default=argument['default'])
else:
optional.add_argument(x['name'], help=x['help'])
optional.add_argument(argument['name'], help=argument['help'])
# Required Args
required.add_argument('--radio', help='radio EID, e.g: 1.wiphy2')
required.add_argument('--security', help='WiFi Security protocol: < open | wep | wpa | wpa2 | wpa3 >', default="open")
required.add_argument('--ssid', help='WiFi SSID for script objects to associate to')
required.add_argument('--passwd', '--password' ,'--key', help='WiFi passphrase/password/key', default="[BLANK]")
required.add_argument('--radio',
help='radio EID, e.g: 1.wiphy2')
required.add_argument('--security',
default="open",
help='WiFi Security protocol: < open | wep | wpa | wpa2 | wpa3 >')
required.add_argument('--ssid',
help='WiFi SSID for script objects to associate to')
required.add_argument('--passwd',
'--password',
'--key',
default="[BLANK]",
help='WiFi passphrase/password/key')
if more_required is not None:
for x in more_required:
if 'default' in x.keys():
required.add_argument(x['name'], help=x['help'], default=x['default'])
for argument in more_required:
if 'default' in argument.keys():
required.add_argument(argument['name'], help=argument['help'], default=argument['default'])
else:
required.add_argument(x['name'], help=x['help'])
required.add_argument(argument['name'], help=argument['help'])
return parser
@@ -622,24 +726,29 @@ class LFCliBase:
}
self.json_post("/cli-json/add_event", data, debug_=debug_)
def read_file(self, filename):
@staticmethod
def read_file(filename):
filename = open(filename, 'r')
return [line.split(',') for line in filename.readlines()]
# Function creates random characters made of letters
def random_chars(self, size, chars=None):
@staticmethod
def random_chars(size, chars=None):
if chars is None:
chars = string.ascii_letters
return ''.join(random.choice(chars) for x in range(size))
def get_milliseconds(self, timestamp):
@staticmethod
def get_milliseconds(timestamp):
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds() * 1000
def get_seconds(self, timestamp):
@staticmethod
def get_seconds(timestamp):
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds()
def replace_special_char(self, str):
return str.replace('+', ' ').replace('_', ' ').strip(' ')
@staticmethod
def replace_special_char(special_str):
return special_str.replace('+', ' ').replace('_', ' ').strip(' ')
Help_Mode = """Station WiFi modes: use the number value below:
auto : 0,

View File

@@ -81,7 +81,7 @@ class pandas_extensions:
print(for_loop_df1.at[0, col])
print(for_loop_df2.at[0, col])
if type(for_loop_df1.at[0, col]) == str and type(for_loop_df2.at[0, col]) == str:
if (' ' in for_loop_df1.at[0, col]) == True:
if (' ' in for_loop_df1.at[0, col]):
# do subtraction
new_value = float(for_loop_df1.at[0, col].split(" ")[0]) - float(
for_loop_df2.at[0, col].split(" ")[0])

View File

@@ -7,16 +7,15 @@ if sys.version_info[0] != 3:
exit()
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
lf_json_autogen = importlib.import_module("py-json.LANforge.lf_json_autogen")
LFJsonPost = lf_json_autogen.LFJsonPost
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../")))
if os.environ.get("LF_USE_AUTOGEN") == 1:
set_port_current_flags = LFJsonPost.SetPortCurrentFlags.__members__
set_port_cmd_flags = LFJsonPost.SetPortCmdFlags.__members__
set_port_interest_flags = LFJsonPost.SetPortInterest.__members__
lanforge_api = importlib.import_module("lanforge_client.lanforge_api")
LFJsonCommand = lanforge_api.LFJsonCommand
set_port_current_flags = LFJsonCommand.SetPortCurrentFlags.__members__
set_port_cmd_flags = LFJsonCommand.SetPortCmdFlags.__members__
set_port_interest_flags = LFJsonCommand.SetPortInterest.__members__
else:
set_port_current_flags = {

View File

@@ -2,6 +2,12 @@
# Create and modify WAN Links Using LANforge JSON AP : http://www.candelatech.com/cookbook.php?vol=cli&book=JSON:+Managing+WANlinks+using+JSON+and+Python
# Written by Candela Technologies Inc.
# Updated by: Erin Grimes
"""
sample command:
./test_wanlink.py --name my_wanlink4 --latency_A 20 --latency_B 69 --rate 1000 --jitter_A 53 --jitter_B 73 --jitter_freq 6 --drop_A 12 --drop_B 11
"""
import sys
import urllib
import importlib
@@ -13,19 +19,23 @@ import os
from time import sleep
from urllib import error
import pprint
import argparse
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
j_printer = pprint.PrettyPrinter(indent=2)
# todo: this needs to change
resource_id = 1
def main(base_url, args={}):
def main(args):
base_url = 'http://'+args['host']+':8080'
print(base_url)
json_post = ""
json_response = ""
@@ -36,7 +46,7 @@ def main(base_url, args={}):
print(lf_r.get_as_json())
# remove old wanlinks
if (num_wanlinks > 0):
if num_wanlinks > 0:
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_cx")
lf_r.addPostData({
'test_mgr': 'all',
@@ -49,10 +59,10 @@ def main(base_url, args={}):
json_response = lf_r.getAsJson()
LFUtils.debug_printer.pprint(json_response)
for key, value in json_response.items():
if (isinstance(value, dict) and "_links" in value):
if isinstance(value, dict) and "_links" in value:
num_wanlinks = 1
except urllib.error.HTTPError as error:
print("Error code "+error.code)
print("Error code %s" % error.code)
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp")
lf_r.addPostData({
@@ -129,18 +139,18 @@ def main(base_url, args={}):
# start wanlink once we see it
seen = 0
while (seen < 1):
while seen < 1:
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,state,_links")
try:
json_response = lf_r.getAsJson()
if (json_response is None):
if json_response is None:
continue
LFUtils.debug_printer.pprint(json_response)
for key, value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == args['name']):
if isinstance(value, dict):
if "_links" in value:
if value["name"] == args['name']:
seen = 1
else:
pass
@@ -152,71 +162,71 @@ def main(base_url, args={}):
# print("value not a dict")
except urllib.error.HTTPError as error:
print("Error code "+error.code)
print("Error code %s " % error.code)
continue
print("starting wanlink:")
# print("the latency is {laten}".format(laten=latency))
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': args['name'],
'cx_state': 'RUNNING'
})
lf_r.jsonPost()
# print("starting wanlink:")
# # print("the latency is {laten}".format(laten=latency))
# lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
# lf_r.addPostData({
# 'test_mgr': 'all',
# 'cx_name': args['name'],
# 'cx_state': 'RUNNING'
# })
# lf_r.jsonPost()
running = 0
while (running < 1):
while running < 1:
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,state,_links")
try:
json_response = lf_r.getAsJson()
if (json_response is None):
if json_response is None:
continue
for key, value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == args['name']):
if (value["state"].startswith("Run")):
if isinstance(value, dict):
if "_links" in value:
if value["name"] == args['name']:
if value["state"].startswith("Run"):
LFUtils.debug_printer.pprint(json_response)
running = 1
except urllib.error.HTTPError as error:
print("Error code "+error.code)
print("Error code %s" % error.code)
continue
print("Wanlink is running")
# stop wanlink
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': args['name'],
'cx_state': 'STOPPED'
})
lf_r.jsonPost()
running = 1
while (running > 0):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,eid,state,_links")
LFUtils.debug_printer.pprint(json_response)
try:
json_response = lf_r.getAsJson()
if (json_response is None):
continue
for key, value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == args['name']):
if (value["state"].startswith("Stop")):
LFUtils.debug_printer.pprint(json_response)
running = 0
# # stop wanlink
# lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
# lf_r.addPostData({
# 'test_mgr': 'all',
# 'cx_name': args['name'],
# 'cx_state': 'STOPPED'
# })
# lf_r.jsonPost()
# running = 1
# while (running > 0):
# sleep(1)
# lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,eid,state,_links")
# LFUtils.debug_printer.pprint(json_response)
# try:
# json_response = lf_r.getAsJson()
# if (json_response is None):
# continue
# for key, value in json_response.items():
# if (isinstance(value, dict)):
# if ("_links" in value):
# if (value["name"] == args['name']):
# if (value["state"].startswith("Stop")):
# LFUtils.debug_printer.pprint(json_response)
# running = 0
except urllib.error.HTTPError as error:
print("Error code "+error.code)
continue
# except urllib.error.HTTPError as error:
# print("Error code "+error.code)
# continue
print("Wanlink is stopped.")
# print("Wanlink is stopped.")
# print("Wanlink info:")
# lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1")
@@ -235,4 +245,67 @@ def main(base_url, args={}):
if __name__ == '__main__':
main()
parser = LFCliBase.create_basic_argparse(
prog='create_wanlink.py',
formatter_class=argparse.RawTextHelpFormatter)
for group in parser._action_groups:
if group.title == "required arguments":
required_args = group
break
optional_args = None
for group in parser._action_groups:
if group.title == "optional arguments":
optional_args = group
break
if optional_args is not None:
optional_args.add_argument('--host', help='The resource IP address', default="localhost")
optional_args.add_argument('--port_A', help='Endpoint A', default="eth1")
optional_args.add_argument('--port_B', help='Endpoint B', default="eth2")
optional_args.add_argument('--name', help='The name of the wanlink', default="wl_eg1")
optional_args.add_argument('--rate', help='The maximum rate of transfer at both endpoints (bits/s)', default=1000000)
optional_args.add_argument('--rate_A', help='The max rate of transfer at endpoint A (bits/s)', default=None)
optional_args.add_argument('--rate_B', help='The maximum rate of transfer (bits/s)', default=None)
optional_args.add_argument('--latency', help='The delay of both ports', default=20)
optional_args.add_argument('--latency_A', help='The delay of port A', default=None)
optional_args.add_argument('--latency_B', help='The delay of port B', default=None)
optional_args.add_argument('--jitter', help='The max jitter of both ports (ms)', default=None)
optional_args.add_argument('--jitter_A', help='The max jitter of port A (ms)', default=None)
optional_args.add_argument('--jitter_B', help='The max jitter of port B (ms)', default=None)
optional_args.add_argument('--jitter_freq', help='The jitter frequency of both ports (%%)', default=None)
optional_args.add_argument('--jitter_freq_A', help='The jitter frequency of port A (%%)', default=None)
optional_args.add_argument('--jitter_freq_B', help='The jitter frequency of port B (%%)', default=None)
optional_args.add_argument('--drop', help='The drop frequency of both ports (%%)', default=None)
optional_args.add_argument('--drop_A', help='The drop frequency of port A (%%)', default=None)
optional_args.add_argument('--drop_B', help='The drop frequency of port B (%%)', default=None)
# todo: packet loss A and B
# todo: jitter A and B
for group in parser._action_groups:
if group.title == "optional arguments":
optional_args = group
break
parseargs = parser.parse_args()
args = {
"host": parseargs.mgr,
"port": parseargs.mgr_port,
"name": parseargs.name,
"port_A": parseargs.port_A,
"port_B": parseargs.port_B,
"latency": parseargs.latency,
"latency_A": (parseargs.latency_A if parseargs.latency_A is not None else parseargs.latency),
"latency_B": (parseargs.latency_B if parseargs.latency_B is not None else parseargs.latency),
"rate": parseargs.rate,
"rate_A": (parseargs.rate_A if parseargs.rate_A is not None else parseargs.rate),
"rate_B": (parseargs.rate_B if parseargs.rate_B is not None else parseargs.rate),
"jitter": parseargs.jitter,
"jitter_A": (parseargs.jitter_A if parseargs.jitter_A is not None else parseargs.jitter),
"jitter_B": (parseargs.jitter_B if parseargs.jitter_B is not None else parseargs.jitter),
"jitter_freq": parseargs.jitter,
"jitter_freq_A": (parseargs.jitter_freq_A if parseargs.jitter_freq_A is not None else parseargs.jitter_freq),
"jitter_freq_B": (parseargs.jitter_freq_B if parseargs.jitter_freq_B is not None else parseargs.jitter_freq),
"drop": parseargs.drop,
"drop_A": (parseargs.drop_A if parseargs.drop_A is not None else parseargs.drop),
"drop_B": (parseargs.drop_B if parseargs.drop_B is not None else parseargs.drop),
}
main(args)

View File

@@ -40,9 +40,12 @@ class cv_dut(LFCliBase):
self.lan_port = "[BLANK]"
self.api_id = "0"
self.flags_mask = "NA"
if desired_dut_flags is not None:
if desired_dut_flags:
self.dut_flags = desired_dut_flags
self.dut_flags_mask = desired_dut_flags_mask
if self.dut_flags:
self.flags = self.add_named_flags(self.dut_flags, add_dut_flags)
self.flags_mask = self.add_named_flags(self.dut_flags_mask, add_dut_flags)
def add_named_flags(self, desired_list, command_ref):
if desired_list is None:
@@ -80,11 +83,6 @@ class cv_dut(LFCliBase):
top_left_x="NA",
top_left_y="NA",
):
try:
self.flags = self.add_named_flags(self.dut_flags, add_dut_flags)
self.flags_mask = self.add_named_flags(self.dut_flags_mask, add_dut_flags)
except:
pass
response_json = []
req_url = "/cli-json/add_dut"
data = {
@@ -129,13 +127,13 @@ class cv_dut(LFCliBase):
ssid_flags=0,
ssid_flags_mask=0xFFFFFFFF):
req_url = "/cli-json/add_dut_ssid"
print("name:" + dut_name,
"ssid_idx:" + ssid_idx,
"ssid:" + ssid,
"passwd:" + passwd,
"bssid:" + bssid,
"ssid_flags:" + str(ssid_flags),
"ssid_flags_mask:" + str(ssid_flags_mask))
print("name: %s" % dut_name,
"ssid_idx: %s" % ssid_idx,
"ssid: %s" % ssid,
"passwd: %s" % passwd,
"bssid: %s" % bssid,
"ssid_flags: %s" % ssid_flags,
"ssid_flags_mask: %s" % ssid_flags_mask)
self.json_post(req_url, {
"name": dut_name,

View File

@@ -9,7 +9,6 @@ import importlib
import time
import json
from pprint import pprint
import argparse
if sys.version_info[0] != 3:
print("This script requires Python 3")
@@ -56,9 +55,9 @@ def cv_add_base_parser(parser):
parser.add_argument("-c", "--config_name", type=str, default="cv_dflt_cfg",
help="Config file name")
parser.add_argument("-r", "--pull_report", default=False, action='store_true',
parser.add_argument("-r", "--pull_report", action='store_true',
help="pull reports from lanforge (by default: False)")
parser.add_argument("--load_old_cfg", default=False, action='store_true',
parser.add_argument("--load_old_cfg", action='store_true',
help="Should we first load defaults from previous run of the capacity test? Default is False")
parser.add_argument("--enable", action='append', nargs=1, default=[],
@@ -86,7 +85,7 @@ class cv_test(Realm):
def __init__(self,
lfclient_host="localhost",
lfclient_port=8080,
lf_report_dir=""
lf_report_dir=None
):
super().__init__(lfclient_host=lfclient_host,
lfclient_port=lfclient_port)
@@ -105,7 +104,7 @@ class cv_test(Realm):
print("adding- " + text + " " + "to test config")
rsp = self.json_post(req_url, data)
self.json_post(req_url, data)
# time.sleep(1)
# Tell LANforge GUI Chamber View to launch a test
@@ -138,24 +137,6 @@ class cv_test(Realm):
cmd = "cv click '%s' Cancel" % instance
self.run_cv_cmd(cmd)
# Send chamber view commands to the LANforge GUI
def run_cv_cmd(self, command):
response_json = []
req_url = "/gui-json/cmd"
data = {
"cmd": command
}
debug_par = ""
rsp = self.json_post("/gui-json/cmd%s" % debug_par, data, debug_=False, response_json_list_=response_json)
try:
if response_json[0]["LAST"]["warnings"].startswith("Unknown"):
print("Unknown command?\n");
pprint(response_json)
except:
# Ignore un-handled structs at this point, let calling code deal with it.
pass
return response_json
# For auto save report
def auto_save_report(self, instance):
cmd = "cv click %s 'Auto Save Report'" % instance
@@ -165,16 +146,6 @@ class cv_test(Realm):
def get_report_location(self, instance):
cmd = "cv get %s 'Report Location:'" % instance
location = self.run_cv_cmd(cmd)
var = 1
while var != 0:
try:
data = json.dumps(location[0]["LAST"]["response"])
var = 0
except Exception as e:
var += 1
time.sleep(2)
if var > 5:
break
return location
# To get if test is running or not
@@ -216,7 +187,7 @@ class cv_test(Realm):
if self.get_exists(instance):
print("Waiting %i/60 for test instance: %s to be deleted." % (tries, instance))
tries += 1
if (tries > 60):
if tries > 60:
break
time.sleep(1)
else:
@@ -226,9 +197,9 @@ class cv_test(Realm):
tries = 0
while True:
if not self.get_cv_is_built():
print("Waiting %i/60 for Chamber-View to be built." % (tries))
print("Waiting %i/60 for Chamber-View to be built." % tries)
tries += 1
if (tries > 60):
if tries > 60:
break
time.sleep(1)
else:
@@ -258,17 +229,18 @@ class cv_test(Realm):
"type": "Plugin-Settings",
"name": str(blob_test_name + config_name), # config name
}
rsp = self.json_post(req_url, data)
self.json_post(req_url, data)
def rm_cv_text_blob(self, type="Network-Connectivity", name=None):
def rm_cv_text_blob(self, cv_type="Network-Connectivity", name=None):
req_url = "/cli-json/rm_text_blob"
data = {
"type": type,
"type": cv_type,
"name": name, # config name
}
rsp = self.json_post(req_url, data)
self.json_post(req_url, data)
def apply_cfg_options(self, cfg_options, enables, disables, raw_lines, raw_lines_file):
@staticmethod
def apply_cfg_options(cfg_options, enables, disables, raw_lines, raw_lines_file):
# Read in calibration data and whatever else.
if raw_lines_file != "":
@@ -315,7 +287,7 @@ class cv_test(Realm):
# cv_cmds: Array of raw chamber-view commands, such as "cv click 'button-name'"
# These (and the sets) are applied after the test is created and before it is started.
def create_and_run_test(self, load_old_cfg, test_name, instance_name, config_name, sets,
pull_report, lf_host, lf_user, lf_password, cv_cmds, local_lf_report_dir="", ssh_port=22,
pull_report, lf_host, lf_user, lf_password, cv_cmds, local_lf_report_dir=None, ssh_port=22,
graph_groups_file=None):
load_old = "false"
if load_old_cfg:
@@ -327,7 +299,7 @@ class cv_test(Realm):
if response[0]["LAST"]["response"] == "OK":
break
else:
print("Could not create test, try: %i/60:\n" % (start_try))
print("Could not create test, try: %i/60:\n" % start_try)
pprint(response)
start_try += 1
if start_try > 60:
@@ -349,7 +321,7 @@ class cv_test(Realm):
response = self.start_test(instance_name)
if response[0]["LAST"]["response"].__contains__("Could not find instance:"):
print("ERROR: start_test failed: ", response[0]["LAST"]["response"], "\n");
print("ERROR: start_test failed: ", response[0]["LAST"]["response"], "\n")
# pprint(response)
exit(1)
@@ -357,16 +329,12 @@ class cv_test(Realm):
while True:
cmd = "cv get_and_close_dialog"
dialog = self.run_cv_cmd(cmd)
try:
if dialog[0]["LAST"]["response"] != "NO-DIALOG":
print("Popup Dialog:\n")
print(dialog[0]["LAST"]["response"])
except Exception as e:
print(e)
check = self.get_report_location(instance_name)
location = json.dumps(check[0]["LAST"]["response"])
if location != '\"Report Location:::\"':
print(location)
location = location.replace('\"Report Location:::', '')
@@ -385,24 +353,24 @@ class cv_test(Realm):
self.lf_report_dir = location
if pull_report:
try:
print(lf_host)
print("Pulling report to directory: %s from %s@%s/%s" %
(local_lf_report_dir, lf_user, lf_host, location))
report.pull_reports(hostname=lf_host, username=lf_user, password=lf_password,
port=ssh_port, report_dir=local_lf_report_dir,
report_location=location)
except Exception as e:
print("SCP failed, user %s, password %s, dest %s", (lf_user, lf_password, lf_host))
print("SCP failed, user %s, password %s, dest %s" % (lf_user, lf_password, lf_host))
raise e # Exception("Could not find Reports")
break
else:
print('Not reporting to kpi file')
# Of if test stopped for some reason and could not generate report.
try:
if not self.get_is_running(instance_name):
print("Detected test is not running.")
not_running += 1
if not_running > 5:
break
except Exception as e:
print(e)
time.sleep(1)
self.report_name = self.get_report_location(instance_name)
@@ -411,20 +379,17 @@ class cv_test(Realm):
# Clean up any remaining popups.
while True:
dialog = self.run_cv_cmd(cmd);
dialog = self.run_cv_cmd(cmd)
if dialog[0]["LAST"]["response"] != "NO-DIALOG":
print("Popup Dialog:\n")
print(dialog[0]["LAST"]["response"])
else:
break
def a(self):
pass
# Takes cmd-line args struct or something that looks like it.
# See csv_to_influx.py::influx_add_parser_args for options, or --help.
def check_influx_kpi(self, args):
if self.lf_report_dir == "":
if self.lf_report_dir is None:
# Nothing to report on.
print("Not submitting to influx, no report-dir.\n")
return
@@ -446,12 +411,12 @@ class cv_test(Realm):
# lf_wifi_capacity_test.py may be run / initiated by a remote system against a lanforge
# the local_lf_report_dir is where data is stored, if there is no local_lf_report_dir then the test is run directly on lanforge
if self.local_lf_report_dir == "":
csv_path = "%s/kpi.csv" % (self.lf_report_dir)
if self.lf_report_dir:
csv_path = "%s/kpi.csv" % self.lf_report_dir
else:
kpi_location = self.local_lf_report_dir + "/" + os.path.basename(self.lf_report_dir)
# the local_lf_report_dir is the parent directory, need to get the directory name
csv_path = "%s/kpi.csv" % (kpi_location)
kpi_location = self.lf_report_dir + "/" + os.path.basename(self.lf_report_dir)
# the lf_report_dir is the parent directory, need to get the directory name
csv_path = "%s/kpi.csv" % kpi_location
print("Attempt to submit kpi: ", csv_path)
print("Posting to influx...\n")
@@ -483,7 +448,7 @@ class cv_test(Realm):
"text": text_blob
}
rsp = self.json_post(req_url, data)
self.json_post(req_url, data)
def pass_raw_lines_to_cv(self,
scenario_name="Automation",
@@ -494,7 +459,7 @@ class cv_test(Realm):
"name": scenario_name,
"text": Rawline
}
rsp = self.json_post(req_url, data)
self.json_post(req_url, data)
# This is for chamber view buttons
@@ -520,18 +485,17 @@ class cv_test(Realm):
def run_cv_cmd(self, command): # Send chamber view commands
response_json = []
req_url = "/gui-json/cmd"
data = {
"cmd": command
}
rsp = self.json_post(req_url, data, debug_=False, response_json_list_=response_json)
data = {"cmd": command}
self.json_post(req_url, data, debug_=False, response_json_list_=response_json)
return response_json
def get_response_string(self, response):
@staticmethod
def get_response_string(response):
return response[0]["LAST"]["response"]
def get_popup_info_and_close(self):
cmd = "cv get_and_close_dialog"
dialog = self.run_cv_cmd(cmd);
dialog = self.run_cv_cmd(cmd)
if dialog[0]["LAST"]["response"] != "NO-DIALOG":
print("Popup Dialog:\n")
print(dialog[0]["LAST"]["response"])

View File

@@ -1,9 +1,11 @@
import paramiko
from scp import SCPClient
class lanforge_reports:
def pull_reports(self, hostname="localhost", port=22, username="lanforge", password="lanforge",
@staticmethod
def pull_reports(hostname="localhost", port=22, username="lanforge", password="lanforge",
report_location="/home/lanforge/html-reports/",
report_dir="../../../reports/"):
ssh = paramiko.SSHClient()
@@ -14,4 +16,3 @@ class lanforge_reports:
with SCPClient(ssh.get_transport()) as scp:
scp.get(remote_path=report_location, local_path=report_dir, recursive=True)
scp.close()

View File

@@ -39,7 +39,7 @@ class DataPlaneTest(ChamberViewBase):
def main():
obj = DataPlaneTest(lfclient_host="localhost", lfclient_port=8080, debug_=True)
DataPlaneTest(lfclient_host="localhost", lfclient_port=8080, debug_=True)
if __name__ == '__main__':

View File

@@ -2,7 +2,8 @@
import datetime
import random
import string
from pprint import pprint
from pprint import pformat
class BaseProfile:
def __init__(self, local_realm, debug=False):
@@ -11,9 +12,8 @@ class BaseProfile:
self.debug = debug or local_realm.debug
self.profiles = []
def json_get(self, _req_url, debug_=False):
return self.parent_realm.json_get(_req_url, debug_=False)
return self.parent_realm.json_get(_req_url, debug_=debug_)
def json_post(self, req_url=None, data=None, debug_=False, suppress_related_commands_=None):
return self.parent_realm.json_post(_req_url=req_url,
@@ -34,50 +34,54 @@ class BaseProfile:
return self.parent_realm.rm_cx(cx_name)
def rm_endp(self, ename, debug_=False, suppress_related_commands_=True):
self.parent_realm.rm_endp(ename, debug_=False, suppress_related_commands_=True)
self.parent_realm.rm_endp(ename, debug_=debug_, suppress_related_commands_=suppress_related_commands_)
def name_to_eid(self, eid):
return self.parent_realm.name_to_eid(eid)
def set_endp_tos(self, ename, _tos, debug_=False, suppress_related_commands_=True):
return self.parent_realm.set_endp_tos(ename, _tos, debug_=False, suppress_related_commands_=True)
return self.parent_realm.set_endp_tos(ename, _tos, debug_=debug_, suppress_related_commands_=suppress_related_commands_)
def wait_until_endps_appear(self, these_endp, debug=False):
return self.parent_realm.wait_until_endps_appear(these_endp, debug=False)
return self.parent_realm.wait_until_endps_appear(these_endp, debug=debug)
def wait_until_cxs_appear(self, these_cx, debug=False):
return self.parent_realm.wait_until_cxs_appear(these_cx, debug=False)
return self.parent_realm.wait_until_cxs_appear(these_cx, debug=debug)
def logg(self, message=None, audit_list=None):
if audit_list is None:
self.parent_realm.logg(message)
for item in audit_list:
if (item is None):
if item is None:
continue
message += ("\n" + pprint.pformat(item, indent=4))
message += ("\n" + pformat(item, indent=4))
self.parent_realm.logg(message)
def replace_special_char(self, str):
return str.replace('+', ' ').replace('_', ' ').strip(' ')
@staticmethod
def replace_special_char(original):
return original.replace('+', ' ').replace('_', ' ').strip(' ')
# @deprecate me
def get_milliseconds(self, timestamp):
@staticmethod
def get_milliseconds(timestamp):
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds() * 1000
# @deprecate me
def get_seconds(self, timestamp):
@staticmethod
def get_seconds(timestamp):
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds()
def read_file(self, filename):
@staticmethod
def read_file(filename):
filename = open(filename, 'r')
return [line.split(',') for line in filename.readlines()]
# Function to create random characters made of letters
def random_chars(self, size, chars=None):
@staticmethod
def random_chars(size, chars=None):
if chars is None:
chars = string.ascii_letters
return ''.join(random.choice(chars) for x in range(size))
return ''.join(random.choice(chars) for _ in range(size))
# --------------- create file path / find file path code - to be put into functions
# #Find file path to save data/csv to:
@@ -118,5 +122,3 @@ class BaseProfile:
# exit(1)
# else:
# compared_rept=args.compared_report

View File

@@ -611,9 +611,9 @@ class L3CXProfile2(BaseProfile):
raise ValueError("L3CXProfile::monitor wants duration_sec > 1 second")
if (duration_sec <= monitor_interval_ms):
raise ValueError("L3CXProfile::monitor wants duration_sec > monitor_interval")
if report_file == None:
if report_file is None:
raise ValueError("Monitor requires an output file to be defined")
if created_cx == None:
if created_cx is None:
raise ValueError("Monitor needs a list of Layer 3 connections")
if (monitor_interval_ms is None) or (monitor_interval_ms < 1):
raise ValueError("L3CXProfile::monitor wants monitor_interval >= 1 second")

View File

@@ -74,7 +74,7 @@ class VRProfile(BaseProfile):
def vr_eid_to_url(self, eid_str=None, debug=False):
debug |= self.debug
if (eid_str is None) or ("" == eid_str) or (eid_str.index(".") < 1):
if (eid_str is None) or (eid_str == "") or (eid_str.index(".") < 1):
raise ValueError("vr_eid_to_url cannot read eid[%s]" % eid_str)
hunks = eid_str.split(".")
if len(hunks) > 3:
@@ -111,7 +111,7 @@ class VRProfile(BaseProfile):
resource=1,
debug=False):
debug |= self.debug
if (resource is None) or (resource == 0) or ("" == resource):
if (resource is None) or (resource == 0) or (resource == ""):
raise ValueError("resource needs to be a number greater than 1")
router_map = self.router_list(resource=resource, debug=debug)
@@ -334,9 +334,9 @@ class VRProfile(BaseProfile):
:return: True if area is inside listed virtual router(s)
"""
debug |= self.debug
if (resource is None) or (resource == 0) or ("" == resource):
if (resource is None) or (resource == 0) or (resource == ""):
raise ValueError("resource needs to be a number greater than 1")
if (vrcx_rect is None) or type(vrcx_rect ) or ("" == resource):
if (vrcx_rect is None) or type(vrcx_rect) or (resource == ""):
raise ValueError("resource needs to be a number greater than 1")
router_list = self.router_list(resource=resource, debug=debug)
#router_list = self.json_get("/vr/1/%s/%s?fields=eid,x,y,height,width")

View File

@@ -50,10 +50,10 @@ class DUTProfile(LFCliBase):
self.append = []
def set_param(self, name, value):
if (name in self.__dict__):
if name in self.__dict__:
self.__dict__[name] = value
def create(self, name=None, param_=None, flags=None, flags_mask=None, notes=None):
def create(self, name=None, flags=None, flags_mask=None):
data = {}
if (name is not None) and (name != ""):
data["name"] = name
@@ -63,7 +63,7 @@ class DUTProfile(LFCliBase):
raise ValueError("cannot create/update DUT record lacking a name")
for param in add_dut.dut_params:
if (param.name in self.__dict__):
if param.name in self.__dict__:
if (self.__dict__[param.name] is not None) \
and (self.__dict__[param.name] != "NA"):
data[param.name] = self.__dict__[param.name]
@@ -97,7 +97,6 @@ class DUTProfile(LFCliBase):
"dut": self.name,
"text": "[BLANK]"
}, self.debug)
notebytes = None
for line in self.notes:
notebytes = base64.b64encode(line.encode('ascii'))
if self.debug:
@@ -110,7 +109,6 @@ class DUTProfile(LFCliBase):
"text-64": notebytes.decode('ascii')
}, self.debug)
if (self.append is not None) and (len(self.append) > 0):
notebytes = None
for line in self.append:
notebytes = base64.b64encode(line.encode('ascii'))
if self.debug:

View File

@@ -121,7 +121,9 @@ class FIOEndpProfile(LFCliBase):
self.json_post(req_url, data)
# pprint(data)
def create(self, ports=[], connections_per_port=1, sleep_time=.5, debug_=False, suppress_related_commands_=None):
def create(self, ports=None, connections_per_port=1, sleep_time=.5, debug_=False, suppress_related_commands_=None):
if ports is None:
ports = []
cx_post_data = []
for port_name in ports:
for num_connection in range(connections_per_port):
@@ -173,8 +175,6 @@ class FIOEndpProfile(LFCliBase):
self.local_realm.json_post("/cli-json/nc_show_endpoints", {"endpoint": "all"})
for port_name in ports:
for num_connection in range(connections_per_port):
shelf = self.local_realm.name_to_eid(port_name)[0]
resource = self.local_realm.name_to_eid(port_name)[1]
name = self.local_realm.name_to_eid(port_name)[2]
endp_data = {

View File

@@ -9,7 +9,6 @@ import time
import datetime
import json
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
@@ -49,7 +48,7 @@ class GenCXProfile(LFCliBase):
if self.cmd == "":
raise ValueError("Please ensure cmd has been set correctly")
elif self.type == "speedtest":
self.cmd = "vrf_exec.bash %s speedtest-cli --json --share" % (sta_name)
self.cmd = "vrf_exec.bash %s speedtest-cli --json --share" % sta_name
elif self.type == "iperf3" and self.dest is not None:
self.cmd = "iperf3 --forceflush --format k --precision 4 -c %s -t 60 --tos 0 -b 1K --bind_dev %s -i 1 " \
"--pidfile /tmp/lf_helper_iperf3_%s.pid" % (self.dest, sta_name, gen_name)
@@ -132,7 +131,7 @@ class GenCXProfile(LFCliBase):
if self.cmd == "":
raise ValueError("Please ensure cmd has been set correctly")
elif self.type == "speedtest":
self.cmd = "vrf_exec.bash %s speedtest-cli --json --share" % (sta_name)
self.cmd = "vrf_exec.bash %s speedtest-cli --json --share" % sta_name
elif self.type == "iperf3" and self.dest is not None:
self.cmd = "iperf3 --forceflush --format k --precision 4 -c %s -t 60 --tos 0 -b 1K --bind_dev %s -i 1 " \
"--pidfile /tmp/lf_helper_iperf3_test.pid" % (self.dest, sta_name)
@@ -176,7 +175,7 @@ class GenCXProfile(LFCliBase):
resource = port_info[1]
shelf = port_info[0]
name = port_info[2]
except:
except ValueError:
raise ValueError("Unexpected name for port_name %s" % port_name)
# this naming convention follows what you see when you use
@@ -278,7 +277,9 @@ class GenCXProfile(LFCliBase):
})
time.sleep(sleep_time)
def create(self, ports=[], sleep_time=.5, debug_=False, suppress_related_commands_=None):
def create(self, ports=None, sleep_time=.5, debug_=False, suppress_related_commands_=None):
if ports is None:
ports = []
if self.debug:
debug_ = True
post_data = []
@@ -401,7 +402,6 @@ class GenCXProfile(LFCliBase):
# for k,v in name.items():
exit(1)
def choose_speedtest_command(self):
gen_results = self.json_get("generic/list?fields=name,last+results", debug_=self.debug)
if gen_results['endpoints'] is not None:
@@ -418,7 +418,7 @@ class GenCXProfile(LFCliBase):
def choose_generic_command(self):
gen_results = self.json_get("generic/list?fields=name,last+results", debug_=self.debug)
if (gen_results['endpoints'] is not None):
if gen_results['endpoints'] is not None:
for name in gen_results['endpoints']:
for k, v in name.items():
if v['name'] in self.created_endp and not v['name'].endswith('1'):
@@ -444,10 +444,10 @@ class GenCXProfile(LFCliBase):
debug=False):
try:
duration_sec = self.parse_time(duration_sec).seconds
except:
except ValueError:
if (duration_sec is None) or (duration_sec <= 1):
raise ValueError("GenCXProfile::monitor wants duration_sec > 1 second")
if (duration_sec <= monitor_interval_ms):
if duration_sec <= monitor_interval_ms:
raise ValueError("GenCXProfile::monitor wants duration_sec > monitor_interval")
if report_file is None:
raise ValueError("Monitor requires an output file to be defined")
@@ -602,7 +602,9 @@ class GenCXProfile(LFCliBase):
exit(1)
# append compared df to created one
if output_format.lower() != 'csv':
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format,
save_path=report_file)
else:
if output_format.lower() != 'csv':
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format,
save_path=report_file)

View File

@@ -4,7 +4,6 @@ import os
import importlib
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
port_utils = importlib.import_module("py-json.port_utils")
@@ -27,6 +26,7 @@ class HTTPProfile(LFCliBase):
self.dest = "/dev/null"
self.port_util = PortUtils(self.local_realm)
self.max_speed = 0 # infinity
self.quiesce_after = 0 # infinity
def check_errors(self, debug=False):
fields_list = ["!conn", "acc.+denied", "bad-proto", "bad-url", "other-err", "total-err", "rslv-p", "rslv-h",
@@ -91,7 +91,9 @@ class HTTPProfile(LFCliBase):
self.json_post(req_url, data)
# pprint(data)
def map_sta_ips(self, sta_list=[]):
def map_sta_ips(self, sta_list=None):
if sta_list is None:
sta_list = []
for sta_eid in sta_list:
eid = self.local_realm.name_to_eid(sta_eid)
sta_list = self.json_get("/port/%s/%s/%s?fields=alias,ip" %
@@ -99,8 +101,11 @@ class HTTPProfile(LFCliBase):
if sta_list['interface'] is not None:
self.ip_map[sta_list['interface']['alias']] = sta_list['interface']['ip']
def create(self, ports=[], sleep_time=.5, debug_=False, suppress_related_commands_=None, http=False, ftp=False,
https=False, user=None, passwd=None, source=None, ftp_ip=None, upload_name=None, http_ip=None, https_ip=None):
def create(self, ports=None, sleep_time=.5, debug_=False, suppress_related_commands_=None, http=False, ftp=False,
https=False, user=None, passwd=None, source=None, ftp_ip=None, upload_name=None, http_ip=None,
https_ip=None):
if ports is None:
ports = []
cx_post_data = []
self.map_sta_ips(ports)
print("Create CXs...")
@@ -121,7 +126,7 @@ class HTTPProfile(LFCliBase):
resource = self.local_realm.name_to_eid(port_name)[1]
name = self.local_realm.name_to_eid(port_name)[2]
if upload_name != None:
if upload_name is not None:
name = upload_name
if http:
@@ -154,7 +159,7 @@ class HTTPProfile(LFCliBase):
if (url is None) or (url == ""):
raise ValueError("HTTPProfile::create: url unset")
if upload_name ==None:
if upload_name is None:
endp_data = {
"alias": name + "_l4",
"shelf": shelf,
@@ -164,7 +169,9 @@ class HTTPProfile(LFCliBase):
"timeout": 10,
"url_rate": self.requests_per_ten,
"url": url,
"proxy_auth_type": 0x200
"proxy_auth_type": 0x200,
"quiesce_after": self.quiesce_after,
"max_speed": self.max_speed
}
else:
endp_data = {
@@ -179,7 +186,8 @@ class HTTPProfile(LFCliBase):
"ssl_cert_fname": "ca-bundle.crt",
"proxy_port": 0,
"max_speed": self.max_speed,
"proxy_auth_type": 0x200
"proxy_auth_type": 0x200,
"quiesce_after": self.quiesce_after
}
url = "cli-json/add_l4_endp"
self.local_realm.json_post(url, endp_data, debug_=debug_,

View File

@@ -2,18 +2,18 @@
import sys
import os
import importlib
from pprint import pprint
import csv
import pprint
import pandas as pd
import time
import datetime
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
pandas_extensions = importlib.import_module("py-json.LANforge.pandas_extensions")
port_probe = importlib.import_module("py-json.port_probe")
ProbePort = port_probe.ProbePort
class L3CXProfile(LFCliBase):
@@ -21,8 +21,8 @@ class L3CXProfile(LFCliBase):
lfclient_host,
lfclient_port,
local_realm,
side_a_min_bps=None,
side_b_min_bps=None,
side_a_min_bps=256000,
side_b_min_bps=256000,
side_a_max_bps=0,
side_b_max_bps=0,
side_a_min_pdu=-1,
@@ -76,10 +76,10 @@ class L3CXProfile(LFCliBase):
return self.created_cx.keys()
def get_cx_report(self):
self.data = {}
data = dict()
for cx_name in self.get_cx_names():
self.data[cx_name] = self.json_get("/cx/" + cx_name).get(cx_name)
return self.data
data[cx_name] = self.json_get("/cx/" + cx_name).get(cx_name)
return data
def __get_rx_values(self):
cx_list = self.json_get("endp?fields=name,rx+bytes")
@@ -95,7 +95,8 @@ class L3CXProfile(LFCliBase):
cx_rx_map[item] = value_rx
return cx_rx_map
def __compare_vals(self, old_list, new_list):
@staticmethod
def __compare_vals(old_list, new_list):
passes = 0
expected_passes = 0
if len(old_list) == len(new_list):
@@ -121,7 +122,6 @@ class L3CXProfile(LFCliBase):
layer3_cols=None,
port_mgr_cols=None,
created_cx=None,
monitor=True,
report_file=None,
systeminfopath=None,
output_format=None,
@@ -129,24 +129,23 @@ class L3CXProfile(LFCliBase):
arguments=None,
compared_report=None,
debug=False):
try:
if duration_sec:
duration_sec = self.parse_time(duration_sec).seconds
except:
if (duration_sec is None) or (duration_sec <= 1):
else:
raise ValueError("L3CXProfile::monitor wants duration_sec > 1 second")
if (duration_sec <= monitor_interval_ms):
if duration_sec <= monitor_interval_ms:
raise ValueError("L3CXProfile::monitor wants duration_sec > monitor_interval")
if report_file == None:
if report_file is None:
raise ValueError("Monitor requires an output file to be defined")
if systeminfopath == None:
if systeminfopath is None:
raise ValueError("Monitor requires a system info path to be defined")
if created_cx == None:
if created_cx is None:
raise ValueError("Monitor needs a list of Layer 3 connections")
if (monitor_interval_ms is None) or (monitor_interval_ms < 1):
raise ValueError("L3CXProfile::monitor wants monitor_interval >= 1 second")
if layer3_cols is None:
raise ValueError("L3CXProfile::monitor wants a list of column names to monitor")
if output_format is not None:
if output_format:
if output_format.lower() != report_file.split('.')[-1]:
raise ValueError('Filename %s has an extension that does not match output format %s .' % (
report_file, output_format))
@@ -163,13 +162,16 @@ class L3CXProfile(LFCliBase):
layer3_fields = ",".join(layer3_cols)
default_cols = ['Timestamp', 'Timestamp milliseconds epoch', 'Timestamp seconds epoch', 'Duration elapsed']
default_cols.extend(layer3_cols)
if port_mgr_cols is not None:
# append alias to port_mgr_cols if not present needed later
if port_mgr_cols:
if 'alias' not in port_mgr_cols:
port_mgr_cols.append('alias')
if port_mgr_cols:
default_cols.extend(port_mgr_cols)
header_row = default_cols
# csvwriter.writerow([systeminfo['VersionInfo']['BuildVersion'], script_name, str(arguments)])
if port_mgr_cols is not None:
if port_mgr_cols:
port_mgr_cols = [self.replace_special_char(x) for x in port_mgr_cols]
port_mgr_cols_labelled = []
for col_name in port_mgr_cols:
@@ -193,59 +195,25 @@ class L3CXProfile(LFCliBase):
expected_passes = 0
old_cx_rx_values = self.__get_rx_values()
# instantiate csv file here, add specified column headers
csvfile = open(str(report_file), 'w')
csvwriter = csv.writer(csvfile, delimiter=",")
csvwriter.writerow(header_row)
# wait 10 seconds to get proper port data
time.sleep(10)
# for x in range(0,int(round(iterations,0))):
initial_starttime = datetime.datetime.now()
timestamp_data = list()
while datetime.datetime.now() < end_time:
t = datetime.datetime.now()
timestamp = t.strftime("%m/%d/%Y %I:%M:%S")
t_to_millisec_epoch = int(self.get_milliseconds(t))
t_to_sec_epoch = int(self.get_seconds(t))
time_elapsed = int(self.get_seconds(t)) - int(self.get_seconds(initial_starttime))
basecolumns = [timestamp, t_to_millisec_epoch, t_to_sec_epoch, time_elapsed]
layer_3_response = self.json_get("/endp/%s?fields=%s" % (created_cx, layer3_fields))
if port_mgr_cols is not None:
port_mgr_response = self.json_get("/port/1/1/%s?fields=%s" % (sta_list, port_mgr_fields))
# get info from port manager with list of values from cx_a_side_list
if "endpoint" not in layer_3_response or layer_3_response is None:
print(layer_3_response)
raise ValueError("Cannot find columns requested to be searched. Exiting script, please retry.")
if debug:
print("Json layer_3_response from LANforge... " + str(layer_3_response))
if port_mgr_cols is not None:
if "interfaces" not in port_mgr_response or port_mgr_response is None:
print(port_mgr_response)
raise ValueError("Cannot find columns requested to be searched. Exiting script, please retry.")
if debug:
print("Json port_mgr_response from LANforge... " + str(port_mgr_response))
stations = [station.split('.')[-1] for station in sta_list]
stations = ','.join(stations)
for endpoint in layer_3_response["endpoint"]: # each endpoint is a dictionary
endp_values = list(endpoint.values())[0]
temp_list = basecolumns
for columnname in header_row[len(basecolumns):]:
temp_list.append(endp_values[columnname])
if port_mgr_cols is not None:
for sta_name in sta_list_edit:
if sta_name in current_sta:
for interface in port_mgr_response["interfaces"]:
if sta_name in list(interface.keys())[0]:
merge = temp_endp_values.copy()
# rename keys (separate port mgr 'rx bytes' from layer3 'rx bytes')
port_mgr_values_dict = list(interface.values())[0]
renamed_port_cols = {}
for key in port_mgr_values_dict.keys():
renamed_port_cols['port mgr - ' + key] = port_mgr_values_dict[key]
merge.update(renamed_port_cols)
for name in port_mgr_cols:
temp_list.append(merge[name])
csvwriter.writerow(temp_list)
if port_mgr_cols:
port_mgr_response = self.json_get("/port/1/1/%s?fields=%s" % (stations, port_mgr_fields))
layer_3_response = self.json_get("/endp/%s?fields=%s" % (created_cx, layer3_fields))
new_cx_rx_values = self.__get_rx_values()
if debug:
@@ -258,37 +226,129 @@ class L3CXProfile(LFCliBase):
passes += 1
else:
self.fail("FAIL: Not all stations increased traffic")
self.exit_fail()
try:
cx_data = self.json_get("/cx/all")
cx_data.pop("handler")
cx_data.pop("uri")
for i in self.created_cx.keys():
endp_a_data = self.json_get("/endp/"+ cx_data[i]['endpoints'][0])
endp_b_data = self.json_get("/endp/" + cx_data[i]['endpoints'][1])
print("cx name:", i, "\n",
" bps tx a :", endp_a_data['endpoint']['tx rate'], " --> ",
" bps rx b : ", endp_b_data['endpoint']['rx rate'],
" rx drop % b : ", cx_data[i]['rx drop % b'], "\n"
" tx bytes a : ", endp_a_data['endpoint']['tx bytes'], " --> "
" rx bytes b", endp_b_data['endpoint']['rx bytes'], "\n"
" tx bytes b : ", endp_b_data['endpoint']['tx bytes'], " --> "
" rx bytes a", endp_a_data['endpoint']['rx bytes'], "\n"
" bps tx b :", endp_b_data['endpoint']['tx rate'], " --> "
" bps rx a : ", endp_a_data['endpoint']['rx rate'],
" rx drop % a :", cx_data[i]['rx drop % a'], "\n"
" pkt rx a :", cx_data[i]['pkt rx a'], " pkt rx b : ", cx_data[i]['pkt rx b'],
)
print("\n\n\n")
except Exception as e:
print(e)
result = dict() # create dataframe from layer 3 results
if type(layer_3_response) is dict:
for dictionary in layer_3_response['endpoint']:
# if debug:
print('layer_3_data: %s' % dictionary)
result.update(dictionary)
else:
pass
layer3 = pd.DataFrame(result.values())
layer3.columns = ['l3-' + x for x in layer3.columns]
if port_mgr_cols: # create dataframe from port mgr results
result = dict()
if type(port_mgr_response) is dict:
print("port_mgr_response {pmr}".format(pmr=port_mgr_response))
if 'interfaces' in port_mgr_response:
for dictionary in port_mgr_response['interfaces']:
if debug:
print('port mgr data: %s' % dictionary)
result.update(dictionary)
elif 'interface' in port_mgr_response:
dict_update = {port_mgr_response['interface']['alias']: port_mgr_response['interface']}
if debug:
print(dict_update)
result.update(dict_update)
if debug:
print(result)
else:
print('interfaces and interface not in port_mgr_response')
exit(1)
portdata_df = pd.DataFrame(result.values())
print("portdata_df {pd}".format(pd=portdata_df))
portdata_df.columns = ['port-' + x for x in portdata_df.columns]
portdata_df['alias'] = portdata_df['port-alias']
layer3_alias = list() # Add alias to layer 3 dataframe
for cross_connect in layer3['l3-name']:
for port in portdata_df['port-alias']:
if port in cross_connect:
layer3_alias.append(port)
if len(layer3_alias) == layer3.shape[0]:
layer3['alias'] = layer3_alias
else:
raise ValueError("The Stations or Connection on LANforge did not match expected, \
Check if LANForge initial state correct or delete/cleanup corrects")
timestamp_df = pd.merge(layer3, portdata_df, on='alias')
else:
timestamp_df = layer3
probe_port_df_list = list()
for station in sta_list:
probe_port = ProbePort(lfhost=self.lfclient_host,
lfport=self.lfclient_port,
eid_str=station,
debug=self.debug)
probe_results = dict()
probe_port.refreshProbe()
probe_results['Signal Avg Combined'] = probe_port.getSignalAvgCombined()
probe_results['Signal Avg per Chain'] = probe_port.getSignalAvgPerChain()
probe_results['Signal Combined'] = probe_port.getSignalCombined()
probe_results['Signal per Chain'] = probe_port.getSignalPerChain()
if 'Beacon Av Signal' in probe_results.keys():
probe_results['Beacon Avg Signal'] = probe_port.getBeaconSignalAvg()
else:
probe_results['Beacon Avg Signal'] = "0"
# probe_results['HE status'] = probe_port.he
probe_results['TX Bitrate'] = probe_port.tx_bitrate
probe_results['TX Mbps'] = probe_port.tx_mbit
probe_results['TX MCS ACTUAL'] = probe_port.tx_mcs
if probe_port.tx_mcs:
probe_results['TX MCS'] = int(probe_port.tx_mcs) % 8
else:
probe_results['TX MCS'] = probe_port.tx_mcs
probe_results['TX NSS'] = probe_port.tx_nss
probe_results['TX MHz'] = probe_port.tx_mhz
if probe_port.tx_gi:
probe_results['TX GI ns'] = (probe_port.tx_gi * 10**9)
else:
probe_results['TX GI ns'] = probe_port.tx_gi
probe_results['TX Mbps Calc'] = probe_port.tx_mbit_calc
probe_results['TX GI'] = probe_port.tx_gi
probe_results['TX Mbps short GI'] = probe_port.tx_data_rate_gi_short_Mbps
probe_results['TX Mbps long GI'] = probe_port.tx_data_rate_gi_long_Mbps
probe_results['RX Bitrate'] = probe_port.rx_bitrate
probe_results['RX Mbps'] = probe_port.rx_mbit
probe_results['RX MCS ACTUAL'] = probe_port.rx_mcs
if probe_port.rx_mcs:
probe_results['RX MCS'] = int(probe_port.rx_mcs) % 8
else:
probe_results['RX MCS'] = probe_port.rx_mcs
probe_results['RX NSS'] = probe_port.rx_nss
probe_results['RX MHz'] = probe_port.rx_mhz
if probe_port.rx_gi:
probe_results['RX GI ns'] = (probe_port.rx_gi * 10**9)
else:
probe_results['RX GI ns'] = probe_port.rx_gi
probe_results['RX Mbps Calc'] = probe_port.rx_mbit_calc
probe_results['RX GI'] = probe_port.rx_gi
probe_results['RX Mbps short GI'] = probe_port.rx_data_rate_gi_short_Mbps
probe_results['RX Mbps long GI'] = probe_port.rx_data_rate_gi_long_Mbps
probe_df_initial = pd.DataFrame(probe_results.values()).transpose()
probe_df_initial.columns = probe_results.keys()
probe_df_initial.columns = ['probe ' + x for x in probe_df_initial.columns]
probe_df_initial['alias'] = station.split('.')[-1]
probe_port_df_list.append(probe_df_initial)
probe_port_df = pd.concat(probe_port_df_list)
timestamp_df = pd.merge(timestamp_df, probe_port_df, on='alias')
timestamp_df['Timestamp'] = timestamp
timestamp_df['Timestamp milliseconds epoch'] = t_to_millisec_epoch
timestamp_df['Timestamp seconds epoch'] = t_to_sec_epoch
timestamp_df['Duration elapsed'] = time_elapsed
timestamp_data.append(timestamp_df)
time.sleep(monitor_interval_ms)
csvfile.close()
df = pd.concat(timestamp_data)
df = df.drop('alias', axis=1)
df.to_csv(str(report_file), index=False)
# comparison to last report / report inputted
if compared_report is not None:
compared_df = pandas_extensions.compare_two_df(dataframe_one=pandas_extensions.file_to_df(report_file),
if compared_report:
pandas_extensions.compare_two_df(dataframe_one=pandas_extensions.file_to_df(report_file),
dataframe_two=pandas_extensions.file_to_df(compared_report))
exit(1)
# append compared df to created one
@@ -310,7 +370,7 @@ class L3CXProfile(LFCliBase):
print("Starting CXs...")
for cx_name in self.created_cx.keys():
if self.debug:
print("cx-name: %s" % (cx_name))
print("cx-name: %s" % cx_name)
self.json_post("/cli-json/set_cx_state", {
"test_mgr": "default_tm",
"cx_name": cx_name,
@@ -336,13 +396,13 @@ class L3CXProfile(LFCliBase):
if len(self.created_cx) != 0:
for cx_name in self.created_cx.keys():
if self.debug:
print("Cleaning cx: %s" % (cx_name))
print("Cleaning cx: %s" % cx_name)
self.local_realm.rm_cx(cx_name)
for side in range(len(self.created_cx[cx_name])):
ename = self.created_cx[cx_name][side]
if self.debug:
print("Cleaning endpoint: %s" % (ename))
print("Cleaning endpoint: %s" % ename)
self.local_realm.rm_endp(self.created_cx[cx_name][side])
self.clean_cx_lists()
@@ -382,8 +442,6 @@ class L3CXProfile(LFCliBase):
side_a_info = self.local_realm.name_to_eid(port_name, debug=debug_)
side_a_shelf = side_a_info[0]
side_a_resource = side_a_info[1]
if port_name.find('.') < 0:
port_name = "%d.%s" % (side_a_info[1], port_name)
cx_name = "%s%s-%i" % (self.name_prefix, side_a_info[2], len(self.created_cx))
@@ -456,7 +514,7 @@ class L3CXProfile(LFCliBase):
self.local_realm.json_post(url, data, debug_=debug_,
suppress_related_commands_=suppress_related_commands)
if tos != None:
if tos:
self.local_realm.set_endp_tos(endp_a_name, tos)
self.local_realm.set_endp_tos(endp_b_name, tos)
@@ -485,7 +543,6 @@ class L3CXProfile(LFCliBase):
side_b_info = self.local_realm.name_to_eid(port_name, debug=debug_)
side_b_shelf = side_b_info[0]
side_b_resource = side_b_info[1]
side_b_name = side_b_info[2]
cx_name = "%s%s-%i" % (self.name_prefix, port_name, len(self.created_cx))
endp_a_name = cx_name + "-A"

View File

@@ -8,7 +8,6 @@ import time
import datetime
import ast
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
@@ -74,7 +73,8 @@ class L4CXProfile(LFCliBase):
print(".", end='')
print("")
def compare_vals(self, old_list, new_list):
@staticmethod
def compare_vals(old_list, new_list):
passes = 0
expected_passes = 0
if len(old_list) == len(new_list):
@@ -139,10 +139,16 @@ class L4CXProfile(LFCliBase):
self.json_post(req_url, data)
# pprint(data)
def create(self, ports=[], sleep_time=.5, debug_=False, suppress_related_commands_=None):
def create(self, ports=None, sleep_time=.5, debug_=False, suppress_related_commands_=None):
if ports is None:
ports = []
cx_post_data = []
for port_name in ports:
print("port_name: {} len: {} self.local_realm.name_to_eid(port_name): {}".format(port_name, len(self.local_realm.name_to_eid(port_name)), self.local_realm.name_to_eid(port_name)))
print("port_name: {} len: {} self.local_realm.name_to_eid(port_name): {}".format(port_name,
len(self.local_realm.name_to_eid(
port_name)),
self.local_realm.name_to_eid(
port_name)))
shelf = self.local_realm.name_to_eid(port_name)[0]
resource = self.local_realm.name_to_eid(port_name)[1]
name = self.local_realm.name_to_eid(port_name)[2]
@@ -189,16 +195,16 @@ class L4CXProfile(LFCliBase):
arguments=None,
iterations=0,
debug=False):
try:
if duration_sec:
duration_sec = LFCliBase.parse_time(duration_sec).seconds
except:
else:
if (duration_sec is None) or (duration_sec <= 1):
raise ValueError("L4CXProfile::monitor wants duration_sec > 1 second")
if (duration_sec <= monitor_interval):
if duration_sec <= monitor_interval:
raise ValueError("L4CXProfile::monitor wants duration_sec > monitor_interval")
if report_file == None:
if report_file is None:
raise ValueError("Monitor requires an output file to be defined")
if created_cx == None:
if created_cx is None:
raise ValueError("Monitor needs a list of Layer 4 connections")
if (monitor_interval is None) or (monitor_interval < 1):
raise ValueError("L4CXProfile::monitor wants monitor_interval >= 1 second")
@@ -287,7 +293,6 @@ class L4CXProfile(LFCliBase):
endpoint_data["Timestamp"] = test_timestamp
full_test_data_list.append(endpoint_data)
header_row.append("Timestamp")
header_row.append('Timestamp milliseconds')
df = pd.DataFrame(full_test_data_list)
@@ -299,7 +304,8 @@ class L4CXProfile(LFCliBase):
df = df[["Timestamp", "Timestamp milliseconds", *header_row[:-2]]]
# compare previous data to current data
systeminfo = ast.literal_eval(requests.get('http://'+str(self.lfclient_host)+':'+str(self.lfclient_port)).text)
systeminfo = ast.literal_eval(
requests.get('http://' + str(self.lfclient_host) + ':' + str(self.lfclient_port)).text)
if output_format == 'hdf':
df.to_hdf(report_file, 'table', append=True)

View File

@@ -27,15 +27,15 @@ class ChamberViewBase(LFCliBase):
def remove_text_blobs(self):
pass
def add_text_blobs(self, type="", name="", data="", debug=False):
data = {'type': type,
def add_text_blobs(self, text_type="", name="", data="", debug=False):
data = {'type': text_type,
'name': name,
"text": data
}
self.json_post("/cli-json/add_text_blob/", data, debug_=debug)
def get_text_blob(self, type="", name="", debug=False):
data = {'type': type,
def get_text_blob(self, text_type="", name="", debug=False):
data = {'type': text_type,
'name': name,
}
return self.json_post("/cli-json/show_text_blob/", data, debug_=debug)

View File

@@ -22,22 +22,24 @@ class LFDataCollection:
self.debug = debug or local_realm.debug
def json_get(self, _req_url, debug_=False):
return self.parent_realm.json_get(_req_url, debug_=False)
return self.parent_realm.json_get(_req_url, debug_=debug_)
def check_json_validity(self, keyword=None, json_response=None):
@staticmethod
def check_json_validity(keyword=None, json_response=None):
if json_response is None:
raise ValueError("Cannot find columns requested to be searched in port manager. Exiting script, please retry.")
raise ValueError(
"Cannot find columns requested to be searched in port manager. Exiting script, please retry.")
if keyword is not None and keyword not in json_response:
raise ValueError("Cannot find proper information from json. Please check your json request. Exiting script, please retry.")
raise ValueError(
"Cannot find proper information from json. Please check your json request. Exiting script, please retry.")
def get_milliseconds(self,
timestamp):
@staticmethod
def get_milliseconds(timestamp):
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds() * 1000
def get_seconds(self,
timestamp):
return (timestamp - datetime.datetime(1970,1,1)).total_seconds()
@staticmethod
def get_seconds(timestamp):
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds()
# only for ipv4_variable_time at the moment
def monitor_interval(self, header_row_=None,
@@ -54,7 +56,8 @@ class LFDataCollection:
# get responses from json
layer_3_response = self.json_get("/endp/%s?fields=%s" % (created_cx_, layer3_fields_), debug_=self.debug)
if port_mgr_fields_ is not None:
port_mgr_response=self.json_get("/port/1/1/%s?fields=%s" % (sta_list_, port_mgr_fields_), debug_=self.debug)
port_mgr_response = self.json_get("/port/1/1/%s?fields=%s" % (sta_list_, port_mgr_fields_),
debug_=self.debug)
# check json response validity
self.check_json_validity(keyword="endpoint", json_response=layer_3_response)
@@ -86,6 +89,4 @@ class LFDataCollection:
temp_list.append(merge[name])
return temp_list
# class WebSocket():

View File

@@ -101,17 +101,17 @@ class MACVLANProfile(LFCliBase):
pprint(set_port.set_port_current_flags)
pprint(set_port.set_port_interest_flags)
return
if (param_name in set_port.set_port_cmd_flags):
if param_name in set_port.set_port_cmd_flags:
if (value == 1) and (param_name not in self.desired_set_port_cmd_flags):
self.desired_set_port_cmd_flags.append(param_name)
elif value == 0:
self.desired_set_port_cmd_flags.remove(param_name)
elif (param_name in set_port.set_port_current_flags):
elif param_name in set_port.set_port_current_flags:
if (value == 1) and (param_name not in self.desired_set_port_current_flags):
self.desired_set_port_current_flags.append(param_name)
elif value == 0:
self.desired_set_port_current_flags.remove(param_name)
elif (param_name in set_port.set_port_interest_flags):
elif param_name in set_port.set_port_interest_flags:
if (value == 1) and (param_name not in self.desired_set_port_interest_flags):
self.desired_set_port_interest_flags.append(param_name)
elif value == 0:

View File

@@ -2,8 +2,7 @@
import sys
import os
import importlib
from pprint import pprint
import pprint
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
@@ -36,7 +35,7 @@ class MULTICASTProfile(LFCliBase):
# Clean out our local lists, this by itself does NOT remove anything from LANforge manager.
# but, if you are trying to modify existing connections, then clearing these arrays and
# re-calling 'create' will do the trick.
created_mc = {}
self.created_mc = {}
def get_mc_names(self):
return self.created_mc.keys()
@@ -52,7 +51,7 @@ class MULTICASTProfile(LFCliBase):
debug_ = True
for endp_name in self.get_mc_names():
print("Starting mcast endpoint: %s" % (endp_name))
print("Starting mcast endpoint: %s" % endp_name)
json_data = {
"endp_name": endp_name
}
@@ -96,8 +95,6 @@ class MULTICASTProfile(LFCliBase):
side_tx_port = side_tx_info[2]
side_tx_name = "%smtx-%s-%i" % (self.name_prefix, side_tx_port, len(self.created_mc))
json_data = []
# add_endp mcast-xmit-sta 1 1 side_tx mc_udp -1 NO 4000000 0 NO 1472 0 INCREASING NO 32 0 0
json_data = {
'alias': side_tx_name,

View File

@@ -203,7 +203,7 @@ def main():
url = base_url+"/port/1/%s/list?fields=alias" % (resource_id)
lf_r = LFRequest.LFRequest(url)
json_response = lf_r.getAsJson()
if json_response == None:
if json_response is None:
raise Exception("no reponse to: "+url)
port_map = LFUtils.portListToAliasMap(json_response)
#LFUtils.debug_printer.pprint(port_map)

View File

@@ -136,7 +136,7 @@ def main():
# Now lets do some cli-socket scripting
gui_telnet = pexpect.spawn('telnet %s %s'%(host, clisock))
if (gui_telnet == None):
if gui_telnet is None:
print ("Unable to telnet to %s:%s"%(host,clisock));
exit(1)

708
py-json/port_probe.py Normal file
View File

@@ -0,0 +1,708 @@
#!/usr/bin/env python3
import importlib
from time import sleep
# import pandas as pd
import sys
import os
from pprint import pprint
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
# Probe data can change frequently. It is recommended to update
class ProbePort(LFCliBase):
def __init__(self,
lfhost=None,
lfport='8080',
debug=False,
eid_str=None):
super().__init__(_lfjson_host=lfhost,
_lfjson_port=lfport,
_debug=debug)
hunks = eid_str.split(".")
self.eid_str = eid_str
self.probepath = "/probe/1/%s/%s" % (hunks[-2], hunks[-1])
self.response = None
self.signals = None
self.ofdma = False
self.tx_bitrate = None
self.tx_mcs = None
self.tx_nss = None
self.tx_mbit = None
self.tx_mhz = None
self.tx_gi = None
self.tx_duration = None
self.tx_mbit_calc = None
self.tx_data_rate_gi_short_Mbps = None
self.tx_data_rate_gi_long_Mbps = None
self.rx_bitrate = None
self.rx_mcs = None
self.rx_nss = None
self.rx_mbit = None
self.rx_mhz = None
self.rx_gi = None
self.rx_duration = None
self.rx_mbit_calc = None
self.rx_data_rate_gi_short_Mbps = None
self.rx_data_rate_gi_long_Mbps = None
self.data_rate = None
# folder = os.path.dirname(__file__)
def refreshProbe(self):
self.json_post(self.probepath, {})
sleep(0.2)
response = self.json_get(self.probepath)
self.response = response
if self.debug:
print("probepath (eid): {probepath}".format(probepath=self.probepath))
pprint("Probe response: {response}".format(response=self.response))
text = self.response['probe-results'][0][self.eid_str]['probe results'].split('\n')
signals = [x.strip('\t').split('\t') for x in text if 'signal' in x]
keys = [x[0].strip(' ').strip(':') for x in signals]
values = [x[1].strip('dBm').strip(' ') for x in signals]
# if self.debug:
print("signals keys: {keys}".format(keys=keys))
print("signals values: {values}".format(values=values))
self.signals = dict(zip(keys, values))
tx_bitrate = [x for x in text if 'tx bitrate' in x][0].replace('\t', ' ')
# if 'HE' in tx_bitrate:
# print("HE not supported ")
print("tx_bitrate {tx_bitrate}".format(tx_bitrate=tx_bitrate))
self.tx_bitrate = tx_bitrate.split(':')[-1].strip(' ')
if 'MHz' in tx_bitrate:
self.tx_mhz = [x.strip('\t') for x in text if 'tx bitrate' in x][0].split('MHz')[0].rsplit(' ')[-1].strip(
' ')
print("tx_mhz {tx_mhz}".format(tx_mhz=self.tx_mhz))
else:
self.tx_mhz = 20
print("HT: tx_mhz {tx_mhz}".format(tx_mhz=self.tx_mhz))
tx_mcs = [x.strip('\t') for x in text if 'tx bitrate' in x][0].split(':')[1].strip('\t')
if 'MCS' in tx_mcs:
self.tx_mcs = int(tx_mcs.split('MCS')[1].strip(' ').split(' ')[0])
print("self.tx_mcs {tx_mcs}".format(tx_mcs=self.tx_mcs))
if 'NSS' in text:
self.tx_nss = [x.strip('\t') for x in text if 'tx bitrate' in x][0].split('NSS')[1].strip(' ')
else:
# nss is not present need to derive from MCS for HT
if 0 <= self.tx_mcs <= 7:
self.tx_nss = 1
elif 8 <= self.tx_mcs <= 15:
self.tx_nss = 2
elif 16 <= self.tx_mcs <= 23:
self.tx_nss = 3
elif 24 <= self.tx_mcs <= 31:
self.tx_nss = 4
print("tx_nss {tx_nss}".format(tx_nss=self.tx_nss))
self.tx_mbit = float(self.tx_bitrate.split(' ')[0])
print("tx_mbit {tx_mbit}".format(tx_mbit=self.tx_mbit))
if 'HE' in tx_bitrate:
self.calculated_data_rate_tx_HE()
elif 'VHT' in tx_bitrate:
self.calculated_data_rate_tx_VHT()
else:
self.calculated_data_rate_tx_HT()
else:
print("No tx MCS value:{tx_bitrate}".format(tx_bitrate=tx_bitrate))
rx_bitrate = [x for x in text if 'rx bitrate' in x][0].replace('\t', ' ')
print("rx_bitrate {rx_bitrate}".format(rx_bitrate=rx_bitrate))
self.rx_bitrate = rx_bitrate.split(':')[-1].strip(' ')
print("self.rx_bitrate {rx_bitrate}".format(rx_bitrate=self.rx_bitrate))
# rx will received : 6Mbps encoding is legacy frame
# for 24g - MHz is 20
# try:
if 'MHz' in rx_bitrate:
self.rx_mhz = [x.strip('\t') for x in text if 'rx bitrate' in x][0].split('MHz')[0].rsplit(' ')[
-1].strip(' ')
print("rx_mhz {rx_mhz}".format(rx_mhz=self.rx_mhz))
else:
self.rx_mhz = 20
rx_mcs = [x.strip('\t') for x in text if 'rx bitrate' in x][0].split(':')[1].strip('\t')
# MCS is not in the 6.0MBit/s frame
if 'MCS' in rx_mcs:
self.rx_mcs = int(rx_mcs.split('MCS')[1].strip(' ').split(' ')[0])
print("self.rx_mcs {rx_mcs}".format(rx_mcs=self.rx_mcs))
if 'NSS' in text:
self.rx_nss = [x.strip('\t') for x in text if 'rx bitrate' in x][0].split('NSS')[1].strip(' ')
else:
# nss is not present need to derive from MCS for HT
if 0 <= self.rx_mcs <= 7:
self.rx_nss = 1
elif 8 <= self.rx_mcs <= 15:
self.rx_nss = 2
elif 16 <= self.rx_mcs <= 23:
self.rx_nss = 3
elif 24 <= self.rx_mcs <= 31:
self.rx_nss = 4
self.rx_mbit = self.rx_bitrate.split(' ')[0]
print("rx_nss {rx_nss}".format(rx_nss=self.rx_nss))
self.rx_mbit = float(self.rx_bitrate.split(' ')[0])
print("rx_mbit {rx_mbit}".format(rx_mbit=self.rx_mbit))
if 'HE' in rx_bitrate:
self.calculated_data_rate_rx_HE()
elif 'VHT' in rx_bitrate:
self.calculated_data_rate_rx_VHT()
else:
self.calculated_data_rate_rx_HT()
else:
print("No rx MCS value:{rx_bitrate}".format(rx_bitrate=rx_bitrate))
def getSignalAvgCombined(self):
return self.signals['signal avg'].split(' ')[0]
def getSignalAvgPerChain(self):
return ' '.join(self.signals['signal avg'].split(' ')[1:])
def getSignalCombined(self):
return self.signals['signal'].split(' ')[0]
def getSignalPerChain(self):
return ' '.join(self.signals['signal'].split(' ')[1:])
def getBeaconSignalAvg(self):
return ' '.join(self.signals['beacon signal avg']).replace(' ', '')
def calculated_data_rate_tx_HT(self):
print("calculated_data_rate_tx_HT")
# TODO compare with standard for 40 MHz if values change
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
bw = 20
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.tx_mhz)
print("Mhz {Mhz}".format(Mhz=self.tx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.tx_mhz = 20
# NSS
N_ss = self.tx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.tx_mcs == 0 or self.tx_mcs == 8 or self.tx_mcs == 16 or self.tx_mcs == 24:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.tx_mcs == 1 or self.tx_mcs == 9 or self.tx_mcs == 17 or self.tx_mcs == 25:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.tx_mcs == 2 or self.tx_mcs == 10 or self.tx_mcs == 18 or self.tx_mcs == 26:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.tx_mcs == 3 or self.tx_mcs == 11 or self.tx_mcs == 19 or self.tx_mcs == 27:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.tx_mcs == 4 or self.tx_mcs == 12 or self.tx_mcs == 20 or self.tx_mcs == 28:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.tx_mcs == 5 or self.tx_mcs == 13 or self.tx_mcs == 21 or self.tx_mcs == 29:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.tx_mcs == 6 or self.tx_mcs == 14 or self.tx_mcs == 22 or self.tx_mcs == 30:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.tx_mcs == 7 or self.tx_mcs == 15 or self.tx_mcs == 23 or self.tx_mcs == 31:
R = 5 / 6
N_bpscs = 6
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.tx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("tx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.tx_data_rate_gi_short_Mbps))
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.tx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("data_rate gi_long {data_rate} Mbps".format(data_rate=self.tx_data_rate_gi_long_Mbps))
if abs(self.tx_mbit - self.tx_data_rate_gi_short_Mbps) <= abs(self.tx_mbit - self.tx_data_rate_gi_long_Mbps):
self.tx_mbit_calc = self.tx_data_rate_gi_short_Mbps
self.tx_gi = T_gi_short
else:
self.tx_mbit_calc = self.tx_data_rate_gi_long_Mbps
self.tx_gi = T_gi_long
def calculated_data_rate_rx_HT(self):
print("calculated_data_rate_rx_HT")
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
bw = 20
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.rx_mhz)
print("Mhz {Mhz}".format(Mhz=self.rx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.rx_mhz = 20
# NSS
N_ss = self.rx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.rx_mcs == 0 or self.rx_mcs == 8 or self.rx_mcs == 16 or self.rx_mcs == 24:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.rx_mcs == 1 or self.rx_mcs == 9 or self.rx_mcs == 17 or self.rx_mcs == 25:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.rx_mcs == 2 or self.rx_mcs == 10 or self.rx_mcs == 18 or self.rx_mcs == 26:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.rx_mcs == 3 or self.rx_mcs == 11 or self.rx_mcs == 19 or self.rx_mcs == 27:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.rx_mcs == 4 or self.rx_mcs == 12 or self.rx_mcs == 20 or self.rx_mcs == 28:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.rx_mcs == 5 or self.rx_mcs == 13 or self.rx_mcs == 21 or self.rx_mcs == 29:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.rx_mcs == 6 or self.rx_mcs == 14 or self.rx_mcs == 22 or self.rx_mcs == 30:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.rx_mcs == 7 or self.rx_mcs == 15 or self.rx_mcs == 23 or self.rx_mcs == 31:
R = 5 / 6
N_bpscs = 6
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.rx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("rx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.rx_data_rate_gi_short_Mbps))
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.rx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("rx_data_rate gi_long {data_rate} Mbps".format(data_rate=self.rx_data_rate_gi_long_Mbps))
if abs(self.rx_mbit - self.rx_data_rate_gi_short_Mbps) <= abs(
self.rx_mbit - self.rx_data_rate_gi_long_Mbps):
self.rx_mbit_calc = self.rx_data_rate_gi_short_Mbps
self.rx_gi = T_gi_short
else:
self.rx_mbit_calc = self.rx_data_rate_gi_long_Mbps
self.rx_gi = T_gi_long
def calculated_data_rate_tx_VHT(self):
print("calculated_data_rate_tx_VHT")
# TODO compare with standard for 40 MHz if values change
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
bw = 20
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.tx_mhz)
print("Mhz {Mhz}".format(Mhz=self.tx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.tx_mhz = 20
# NSS
N_ss = self.tx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.tx_mcs == 0:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.tx_mcs == 1:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.tx_mcs == 2:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.tx_mcs == 3:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.tx_mcs == 4:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.tx_mcs == 5:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.tx_mcs == 6:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.tx_mcs == 7:
R = 5 / 6
N_bpscs = 6
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
elif self.tx_mcs == 8:
R = 3 / 4
N_bpscs = 8
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
elif self.tx_mcs == 9:
R = 5 / 6
N_bpscs = 8
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.tx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("tx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.tx_data_rate_gi_short_Mbps))
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.tx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("data_rate gi_long {data_rate} Mbps".format(data_rate=self.tx_data_rate_gi_long_Mbps))
if abs(self.tx_mbit - self.tx_data_rate_gi_short_Mbps) <= abs(self.tx_mbit - self.tx_data_rate_gi_long_Mbps):
self.tx_mbit_calc = self.tx_data_rate_gi_short_Mbps
self.tx_gi = T_gi_short
else:
self.tx_mbit_calc = self.tx_data_rate_gi_long_Mbps
self.tx_gi = T_gi_long
def calculated_data_rate_rx_VHT(self):
print("calculated_data_rate_rx_VHT")
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.rx_mhz)
print("Mhz {Mhz}".format(Mhz=self.rx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.rx_mhz = 20
# NSS
N_ss = self.rx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.rx_mcs == 0:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.rx_mcs == 1:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.rx_mcs == 2:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.rx_mcs == 3:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.rx_mcs == 4:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.rx_mcs == 5:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.rx_mcs == 6:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.rx_mcs == 7:
R = 5 / 6
N_bpscs = 6
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
elif self.rx_mcs == 8:
R = 3 / 4
N_bpscs = 8
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
elif self.rx_mcs == 9:
R = 5 / 6
N_bpscs = 8
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.rx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("rx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.rx_data_rate_gi_short_Mbps))
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.rx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("rx_data_rate gi_long {data_rate} Mbps".format(data_rate=self.rx_data_rate_gi_long_Mbps))
if abs(self.rx_mbit - self.rx_data_rate_gi_short_Mbps) <= abs(
self.rx_mbit - self.rx_data_rate_gi_long_Mbps):
self.rx_mbit_calc = self.rx_data_rate_gi_short_Mbps
self.rx_gi = T_gi_short
else:
self.rx_mbit_calc = self.rx_data_rate_gi_long_Mbps
self.rx_gi = T_gi_long
##########################################
#
# HE no OFDMA - changes the calculations
#
###########################################
def calculated_data_rate_tx_HE(self):
print("calculated_data_rate_tx_HE")
# TODO compare with standard for 40 MHz if values change
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
bw = 20
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.tx_mhz)
print("Mhz {Mhz}".format(Mhz=self.tx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.tx_mhz = 20
# NSS
N_ss = self.tx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.tx_mcs == 0:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.tx_mcs == 1:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.tx_mcs == 2:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.tx_mcs == 3:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.tx_mcs == 4:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.tx_mcs == 5:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.tx_mcs == 6:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.tx_mcs == 7:
R = 5 / 6
N_bpscs = 6
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
elif self.tx_mcs == 8:
R = 3 / 4
N_bpscs = 8
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
elif self.tx_mcs == 9:
R = 5 / 6
N_bpscs = 8
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.tx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("tx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.tx_data_rate_gi_short_Mbps))
print(
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.tx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("data_rate gi_long {data_rate} Mbps".format(data_rate=self.tx_data_rate_gi_long_Mbps))
if abs(self.tx_mbit - self.tx_data_rate_gi_short_Mbps) <= abs(self.tx_mbit - self.tx_data_rate_gi_long_Mbps):
self.tx_mbit_calc = self.tx_data_rate_gi_short_Mbps
self.tx_gi = T_gi_short
else:
self.tx_mbit_calc = self.tx_data_rate_gi_long_Mbps
self.tx_gi = T_gi_long
def calculated_data_rate_rx_HE(self):
print("calculated_data_rate_rx_HE")
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
R = 0 # coding , (Determined by the modulation, MCS )
N_ss = 0 # Number of Spatial Streams
T_dft = 3.2 * 10 ** -6 # Constant for HT
T_gi_short = .4 * 10 ** -6 # Guard index.
T_gi_long = .8 * 10 ** -6 # Guard index.
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
# the nubmer of Data Subcarriers is based on modulation and bandwith
bw = int(self.rx_mhz)
print("Mhz {Mhz}".format(Mhz=self.rx_mhz))
if bw == 20:
N_sd = 52
elif bw == 40:
N_sd = 108
elif bw == 80:
N_sd = 234
elif bw == 160:
N_sd = 468
else:
print("For HT if cannot be read bw is assumed to be 20")
N_sd = 52
self.rx_mhz = 20
# NSS
N_ss = self.rx_nss
# MCS (Modulation Coding Scheme) determines the constands
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
# Only for HT configuration
if self.rx_mcs == 0:
R = 1 / 2
N_bpscs = 1
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
elif self.rx_mcs == 1:
R = 1 / 2
N_bpscs = 2
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
elif self.rx_mcs == 2:
R = 3 / 4
N_bpscs = 2
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
elif self.rx_mcs == 3:
R = 1 / 2
N_bpscs = 4
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
elif self.rx_mcs == 4:
R = 3 / 4
N_bpscs = 4
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
elif self.rx_mcs == 5:
R = 2 / 3
N_bpscs = 6
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
elif self.rx_mcs == 6:
R = 3 / 4
N_bpscs = 6
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
elif self.rx_mcs == 7:
R = 5 / 6
N_bpscs = 6
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
elif self.rx_mcs == 8:
R = 3 / 4
N_bpscs = 8
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
elif self.rx_mcs == 9:
R = 5 / 6
N_bpscs = 8
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
self.rx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
print("rx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.rx_data_rate_gi_short_Mbps))
print(
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
self.rx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
print("rx_data_rate gi_long {data_rate} Mbps".format(data_rate=self.rx_data_rate_gi_long_Mbps))
if abs(self.rx_mbit - self.rx_data_rate_gi_short_Mbps) <= abs(
self.rx_mbit - self.rx_data_rate_gi_long_Mbps):
self.rx_mbit_calc = self.rx_data_rate_gi_short_Mbps
self.rx_gi = T_gi_short
else:
self.rx_mbit_calc = self.rx_data_rate_gi_long_Mbps
self.rx_gi = T_gi_long

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python3
class PortUtils():
class PortUtils:
def __init__(self, local_realm):
self.local_realm = local_realm

View File

@@ -5,13 +5,13 @@ import importlib
from pprint import pprint
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
LFCliBase = lfcli_base.LFCliBase
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
set_port = importlib.import_module("py-json.LANforge.set_port")
class QVLANProfile(LFCliBase):
@@ -19,7 +19,6 @@ class QVLANProfile(LFCliBase):
local_realm,
qvlan_parent="eth1",
num_qvlans=1,
admin_down=False,
dhcp=False,
debug_=False):
super().__init__(lfclient_host, lfclient_port, debug_)
@@ -98,17 +97,17 @@ class QVLANProfile(LFCliBase):
pprint(set_port.set_port_current_flags)
pprint(set_port.set_port_interest_flags)
return
if (param_name in set_port.set_port_cmd_flags):
if param_name in set_port.set_port_cmd_flags:
if (value == 1) and (param_name not in self.desired_set_port_cmd_flags):
self.desired_set_port_cmd_flags.append(param_name)
elif value == 0:
self.desired_set_port_cmd_flags.remove(param_name)
elif (param_name in set_port.set_port_current_flags):
elif param_name in set_port.set_port_current_flags:
if (value == 1) and (param_name not in self.desired_set_port_current_flags):
self.desired_set_port_current_flags.append(param_name)
elif value == 0:
self.desired_set_port_current_flags.remove(param_name)
elif (param_name in set_port.set_port_interest_flags):
elif param_name in set_port.set_port_interest_flags:
if (value == 1) and (param_name not in self.desired_set_port_interest_flags):
self.desired_set_port_interest_flags.append(param_name)
elif value == 0:
@@ -116,11 +115,11 @@ class QVLANProfile(LFCliBase):
else:
raise ValueError("Unknown param name: " + param_name)
def create(self, admin_down=False, debug=False, sleep_time=1):
def create(self, sleep_time=1):
print("Creating qvlans...")
req_url = "/cli-json/add_vlan"
if not self.dhcp and self.first_ip_addr is not None and self.netmask is not None and self.gateway is not None:
if not self.dhcp and self.first_ip_addr and self.netmask and self.gateway:
self.desired_set_port_interest_flags.append("ip_address")
self.desired_set_port_interest_flags.append("ip_Mask")
self.desired_set_port_interest_flags.append("ip_gateway")
@@ -146,8 +145,7 @@ class QVLANProfile(LFCliBase):
"vid": i + 1
}
self.created_qvlans.append("%s.%s.%s#%d" % (self.shelf, self.resource,
self.qvlan_parent, int(
self.desired_qvlans[i][self.desired_qvlans[i].index('#') + 1:])))
self.qvlan_parent, int(self.desired_qvlans[i][self.desired_qvlans[i].index('#') + 1:])))
self.local_realm.json_post(req_url, data)
time.sleep(sleep_time)

View File

@@ -28,8 +28,6 @@ LFCliBase = lfcli_base.LFCliBase
l3_cxprofile = importlib.import_module("py-json.l3_cxprofile")
L3CXProfile = l3_cxprofile.L3CXProfile
l3_cxprofile2 = importlib.import_module("py-json.l3_cxprofile2")
L3CXProfile2 = l3_cxprofile2.L3CXProfile2
l4_cxprofile = importlib.import_module("py-json.l4_cxprofile")
L4CXProfile = l4_cxprofile.L4CXProfile
lf_attenmod = importlib.import_module("py-json.lf_attenmod")
@@ -96,7 +94,7 @@ class Realm(LFCliBase):
_exit_on_error=False,
_exit_on_fail=False,
_proxy_str=None,
_capture_signal_list=[]):
_capture_signal_list=None):
super().__init__(_lfjson_host=lfclient_host,
_lfjson_port=lfclient_port,
_debug=debug_,
@@ -105,6 +103,8 @@ class Realm(LFCliBase):
_proxy_str=_proxy_str,
_capture_signal_list=_capture_signal_list)
if _capture_signal_list is None:
_capture_signal_list = []
self.debug = debug_
# if debug_:
# print("Realm _proxy_str: %s" % _proxy_str)
@@ -112,7 +112,6 @@ class Realm(LFCliBase):
self.check_connect()
self.chan_to_freq = {}
self.freq_to_chan = {}
freq = 0
chan = 1
for freq in range(2412, 2472, 5):
self.freq_to_chan[freq] = chan
@@ -223,14 +222,15 @@ class Realm(LFCliBase):
port_list=sta_list,
debug=debug_)
def rm_port(self, port_eid, check_exists=True, debug_=False):
def rm_port(self, port_eid, check_exists=True, debug_=None):
if port_eid is None:
raise ValueError("realm.rm_port: want a port eid like 1.1.eth1")
debug_ |= self.debug
if debug_ is None:
debug_ = self.debug
req_url = "/cli-json/rm_vlan"
eid = self.name_to_eid(port_eid)
if check_exists:
if not self.port_exists(port_eid):
if not self.port_exists(port_eid, debug=debug_):
return False
data = {
@@ -238,20 +238,22 @@ class Realm(LFCliBase):
"resource": eid[1],
"port": eid[2]
}
rsp = self.json_post(req_url, data, debug_=debug_)
self.json_post(req_url, data, debug_=debug_)
return True
def port_exists(self, port_eid):
def port_exists(self, port_eid, debug=None):
if debug is None:
debug = self.debug
eid = self.name_to_eid(port_eid)
current_stations = self.json_get("/port/%s/%s/%s?fields=alias" % (eid[0], eid[1], eid[2]))
if not current_stations is None:
current_stations = self.json_get("/port/%s/%s/%s?fields=alias" % (eid[0], eid[1], eid[2]),
debug_=debug)
if current_stations:
return True
return False
def admin_up(self, port_eid):
# print("186 admin_up port_eid: "+port_eid)
eid = self.name_to_eid(port_eid)
shelf = eid[0]
resource = eid[1]
port = eid[2]
request = LFUtils.port_up_request(resource_id=resource, port_name=port)
@@ -261,7 +263,6 @@ class Realm(LFCliBase):
def admin_down(self, port_eid):
eid = self.name_to_eid(port_eid)
shelf = eid[0]
resource = eid[1]
port = eid[2]
request = LFUtils.port_down_request(resource_id=resource, port_name=port)
@@ -269,7 +270,6 @@ class Realm(LFCliBase):
def reset_port(self, port_eid):
eid = self.name_to_eid(port_eid)
shelf = eid[0]
resource = eid[1]
port = eid[2]
request = LFUtils.port_reset_request(resource_id=resource, port_name=port)
@@ -317,13 +317,13 @@ class Realm(LFCliBase):
def cleanup_cxe_prefix(self, prefix):
cx_list = self.cx_list()
if cx_list is not None:
if cx_list:
for cx_name in cx_list:
if cx_name.startswith(prefix):
self.rm_cx(cx_name)
endp_list = self.json_get("/endp/list")
if endp_list is not None:
if endp_list:
if 'endpoint' in endp_list:
endp_list = list(endp_list['endpoint'])
for idx in range(len(endp_list)):
@@ -351,12 +351,11 @@ class Realm(LFCliBase):
if debug_:
dbg_param = "?__debug=1"
while (last_response != "YES"):
response = self.json_post("/gui-json/cmd%s" % dbg_param, data, debug_=debug_,
response_json_list_=response_json)
while last_response != "YES":
self.json_post("/gui-json/cmd%s" % dbg_param, data, debug_=debug_, response_json_list_=response_json)
# LFUtils.debug_printer.pprint(response_json)
last_response = response_json[0]["LAST"]["response"]
if (last_response != "YES"):
if last_response != "YES":
last_response = None
response_json = []
time.sleep(1)
@@ -397,22 +396,23 @@ class Realm(LFCliBase):
found_endps = {}
if debug:
print("Waiting on endpoint endp_list {}".format(endp_list))
if (endp_list is not None) and ("items" not in endp_list):
if endp_list and ("items" not in endp_list):
try:
endp_list = list(endp_list['endpoint'])
for idx in range(len(endp_list)):
name = list(endp_list[idx])[0]
found_endps[name] = name
except:
print("non-fatal exception endp_list = list(endp_list['endpoint'] did not exist, will wait some more")
print(
"non-fatal exception endp_list = list(endp_list['endpoint'] did not exist, will wait some more")
for req in these_endp:
if not req in found_endps:
if req not in found_endps:
if debug:
print("Waiting on endpoint: %s" % (req))
print("Waiting on endpoint: %s" % req)
wait_more = True
count += 1
if (count > 100):
if count > 100:
break
return not wait_more
@@ -429,23 +429,25 @@ class Realm(LFCliBase):
found_cxs = {}
cx_list = self.cx_list()
not_cx = ['warnings', 'errors', 'handler', 'uri', 'items']
if cx_list is not None:
if cx_list:
for cx_name in cx_list:
if cx_name in not_cx:
continue
found_cxs[cx_name] = cx_name
for req in these_cx:
if not req in found_cxs:
if req not in found_cxs:
if debug:
print("Waiting on CX: %s" % (req))
print("Waiting on CX: %s" % req)
wait_more = True
count += 1
if (count > 100):
if count > 100:
break
return not wait_more
# def wait_until_database_loaded(self):
# Returns map of all stations with port+type == WIFI-STATION
# Key is the EID, value is the map of key/values for the port values.
def station_map(self):
@@ -457,7 +459,7 @@ class Realm(LFCliBase):
sta_map = {}
temp_map = LFUtils.portListToAliasMap(response)
for k, v in temp_map.items():
if (v['port type'] == "WIFI-STA"):
if v['port type'] == "WIFI-STA":
sta_map[k] = v
temp_map.clear()
del temp_map
@@ -482,7 +484,6 @@ class Realm(LFCliBase):
# Returns list of all ports
def port_list(self):
sta_list = []
response = super().json_get("/port/list?fields=all")
if (response is None) or ("interfaces" not in response):
print("port_list: incomplete response:")
@@ -522,7 +523,7 @@ class Realm(LFCliBase):
# removes port by eid/eidpn
def remove_vlan_by_eid(self, eid):
if (eid is None) or ("" == eid):
if (eid is None) or (eid == ""):
raise ValueError("removeVlanByEid wants eid like 1.1.sta0 but given[%s]" % eid)
hunks = self.name_to_eid(eid)
# print("- - - - - - - - - - - - - - - - -")
@@ -556,7 +557,7 @@ class Realm(LFCliBase):
if debug_:
print("- prelim - - - - - - - - - - - - - - - - - - -")
pprint(record)
if (record["port type"] == "WIFI-STA"):
if record["port type"] == "WIFI-STA":
prelim_map[name] = record
except Exception as x:
@@ -645,7 +646,7 @@ class Realm(LFCliBase):
for sta_eid in station_list:
if debug:
print("checking sta-eid: %s" % (sta_eid))
print("checking sta-eid: %s" % sta_eid)
eid = self.name_to_eid(sta_eid)
response = super().json_get("/port/%s/%s/%s?fields=alias,ip,port+type,ipv6+address" %
@@ -663,7 +664,7 @@ class Realm(LFCliBase):
if v['ip'] in waiting_states:
wait_more = True
if debug:
print("Waiting for port %s to get IPv4 Address." % (sta_eid))
print("Waiting for port %s to get IPv4 Address." % sta_eid)
else:
if sta_eid not in stas_with_ips:
stas_with_ips[sta_eid] = {'ipv4': v['ip']}
@@ -682,7 +683,7 @@ class Realm(LFCliBase):
else:
wait_more = True
if debug:
print("Waiting for port %s to get IPv6 Address." % (sta_eid))
print("Waiting for port %s to get IPv6 Address." % sta_eid)
if wait_more:
time.sleep(1)
@@ -698,7 +699,7 @@ class Realm(LFCliBase):
raise ValueError("check for num curr ips expects non-empty list of ports")
for sta_eid in station_list:
if debug:
print("checking sta-eid: %s" % (sta_eid))
print("checking sta-eid: %s" % sta_eid)
eid = self.name_to_eid(sta_eid)
response = super().json_get("/port/%s/%s/%s?fields=alias,ip,port+type,ipv6+address" %
(eid[0], eid[1], eid[2]))
@@ -711,9 +712,9 @@ class Realm(LFCliBase):
break
if ipv4:
v = response['interface']
if (v['ip'] in waiting_states):
if v['ip'] in waiting_states:
if debug:
print("Waiting for port %s to get IPv4 Address." % (sta_eid))
print("Waiting for port %s to get IPv4 Address." % sta_eid)
else:
if debug:
print("Found IP: %s on port: %s" % (v['ip'], sta_eid))
@@ -723,9 +724,9 @@ class Realm(LFCliBase):
num_sta_with_ips += 1
if ipv6:
v = response['interface']
if (v['ip'] in waiting_states):
if v['ip'] in waiting_states:
if debug:
print("Waiting for port %s to get IPv6 Address." % (sta_eid))
print("Waiting for port %s to get IPv6 Address." % sta_eid)
else:
if debug:
@@ -736,11 +737,12 @@ class Realm(LFCliBase):
num_sta_with_ips += 1
return num_sta_with_ips
def duration_time_to_seconds(self, time_string):
@staticmethod
def duration_time_to_seconds(time_string):
if isinstance(time_string, str):
pattern = re.compile("^(\d+)([dhms]$)")
td = pattern.match(time_string)
if td is not None:
if td:
dur_time = int(td.group(1))
dur_measure = str(td.group(2))
if dur_measure == "d":
@@ -757,11 +759,10 @@ class Realm(LFCliBase):
raise ValueError("time_string must be of type str. Type %s provided" % type(time_string))
return duration_sec
def remove_all_stations(self, resource):
port_list = self.station_list()
sta_list = []
if sta_list is not None:
if port_list:
print("Removing all stations")
for item in list(port_list):
if "sta" in list(item)[0]:
@@ -780,7 +781,7 @@ class Realm(LFCliBase):
endp_list = self.json_get("/endp/list")
if "items" in endp_list or "empty" in endp_list:
return
if endp_list is not None or endp_list:
if endp_list:
print("Removing all endps")
endp_list = list(endp_list['endpoint'])
for endp_name in range(len(endp_list)):
@@ -796,10 +797,10 @@ class Realm(LFCliBase):
# remove endpoints
# nc show endpoints
# nc show cross connects
try:
if self.cx_list():
cx_list = list(self.cx_list())
not_cx = ['warnings', 'errors', 'handler', 'uri', 'items', 'empty']
if cx_list is not None:
if cx_list:
print("Removing all cxs")
for cx_name in cx_list:
if cx_name in not_cx:
@@ -810,7 +811,7 @@ class Realm(LFCliBase):
"cx_name": cx_name
}
self.json_post(req_url, data)
except:
else:
print("no cxs to remove")
if remove_all_endpoints:
@@ -820,186 +821,93 @@ class Realm(LFCliBase):
"endpoint": "all"
}
self.json_post(req_url, data)
req_url = "cli-json/show_cx"
data = {
"test_mgr": "all",
"cross_connect": "all"
}
def parse_link(self, link):
link = self.lfclient_url + link
info = ()
def new_station_profile(self, ver = 1):
if ver == 1:
station_prof = StationProfile(self.lfclient_url, local_realm=self, debug_=self.debug, up=False)
#elif ver == 2:
# import station_profile2
# station_prof = station_profile2.StationProfile2(self.lfclient_url, local_realm=self, debug_=self.debug, up=False)
return station_prof
def new_station_profile(self):
return StationProfile(self.lfclient_url, local_realm=self, debug_=self.debug, up=False)
def new_multicast_profile(self, ver = 1):
if ver == 1:
multi_prof = MULTICASTProfile(self.lfclient_host, self.lfclient_port,
local_realm=self, debug_=self.debug, report_timer_=3000)
#elif ver == 2:
# import multicast_profile2
# multi_prof = multicast_profile2.MULTICASTProfile2(self.lfclient_host, self.lfclient_port,
# local_realm=self, debug_=self.debug, report_timer_=3000)
return multi_prof
def new_multicast_profile(self):
return MULTICASTProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug, report_timer_=3000)
def new_wifi_monitor_profile(self, resource_=1, debug_=False, up_=False, ver = 1):
if ver == 1:
wifi_mon_prof = WifiMonitor(self.lfclient_url,
def new_wifi_monitor_profile(self, resource_=1, debug_=False, up_=False):
return WifiMonitor(self.lfclient_url,
local_realm=self,
resource_=resource_,
up=up_,
debug_=(self.debug or debug_))
#elif ver == 2:
# import wifi_monitor_profile2
# wifi_mon_prof = wifi_monitor_profile2.WifiMonitor2(self.lfclient_url,
# local_realm=self,
# resource_=resource_,
# up=up_,
# debug_=(self.debug or debug_))
return wifi_mon_prof
def new_l3_cx_profile(self, ver=1):
if ver == 1:
cx_prof = L3CXProfile(self.lfclient_host,
def new_l3_cx_profile(self):
return L3CXProfile(self.lfclient_host,
self.lfclient_port,
local_realm=self,
debug_=self.debug,
report_timer_=3000)
elif ver == 2:
cx_prof = L3CXProfile2(self.lfclient_host,
self.lfclient_port,
local_realm=self,
debug_=self.debug,
report_timer_=3000)
return cx_prof
def new_l4_cx_profile(self, ver=1):
if ver == 1:
cx_prof = L4CXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
#elif ver == 2:
# import l4_cxprofile2
# cx_prof = l4_cxprofile2.L4CXProfile2(self.lfclient_host,
# self.lfclient_port,
# local_realm=self,
# debug_=self.debug,
# report_timer_=3000)
return cx_prof
def new_attenuator_profile(self, ver=1):
if ver == 1:
atten_prof = ATTENUATORProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
return atten_prof
def new_generic_endp_profile(self, ver=1):
if ver == 1 :
endp_prof = GenCXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
#elif ver == 2:
# import gen_cxprofile2
# endp_prof = gen_cxprofile2.GenCXProfile(self.lfclient_host,
# self.lfclient_port,
# local_realm=self,
# debug_=self.debug,
# report_timer_=3000)
return endp_prof
def new_l4_cx_profile(self):
return L4CXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
def new_generic_cx_profile(self, ver=1):
def new_attenuator_profile(self):
return ATTENUATORProfile(self.lfclient_host, self.lfclient_port, debug_=self.debug)
def new_generic_endp_profile(self):
return GenCXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
def new_generic_cx_profile(self):
"""
@deprecated
:return: new GenCXProfile
"""
if ver == 1:
cx_prof = GenCXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
#elif ver == 2:
# import gen_cxprofile2
# cx_prof = gen_cxprofile2.GenCXProfile(self.lfclient_host,
# self.lfclient_port,
# local_realm=self,
# debug_=self.debug,
# report_timer_=3000)
return cx_prof
return GenCXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
def new_vap_profile(self, ver=1):
if ver == 1:
vap_prof = VAPProfile(lfclient_host=self.lfclient_host, lfclient_port=self.lfclient_port, local_realm=self,
def new_vap_profile(self):
return VAPProfile(lfclient_host=self.lfclient_host, lfclient_port=self.lfclient_port, local_realm=self,
debug_=self.debug)
# elif ver == 2:
# import vap_profile2
# vap_prof = vap_profile2.VAPProfile2(lfclient_host=self.lfclient_host, lfclient_port=self.lfclient_port, local_realm=self,
# debug_=self.debug)
return vap_prof
def new_vr_profile(self, ver=2):
if ver == 2:
from vr_profile2 import VRProfile
vap_prof = VRProfile(local_realm=self,
debug=self.debug)
return vap_prof
# def new_vr_profile(self):
# return VRProfile(local_realm=self,
# debug=self.debug)
def new_http_profile(self, ver = 1):
if ver == 1:
http_prof = HTTPProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
# elif ver == 2:
# import http_profile2
# http_prof = http_profile2.HTTPProfile2(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
return http_prof
def new_http_profile(self):
return HTTPProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
def new_fio_endp_profile(self, ver = 1):
if ver == 1:
cx_prof = FIOEndpProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
# elif ver == 2:
# import fio_endp_profile2
# cx_prof = fio_endp_profile2.FIOEndpProfile2(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
return cx_prof
def new_fio_endp_profile(self):
return FIOEndpProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
def new_dut_profile(self, ver = 1):
if ver == 1:
dut_profile = DUTProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
# elif ver == 2:
# import dut_profile2
# dut_profile = dut_profile2.DUTProfile2(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
return dut_profile
def new_dut_profile(self):
return DUTProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
def new_mvlan_profile(self, ver = 1):
if ver == 1:
mac_vlan_profile = MACVLANProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
# elif ver == 2:
# import mac_vlan_profile2
# mac_vlan_profile = mac_vlan_profile2.MACVLANProfile2(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
return mac_vlan_profile
def new_mvlan_profile(self):
return MACVLANProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
def new_qvlan_profile(self):
return QVLANProfile(self.host, self.port, local_realm=self, debug_=self.debug)
def new_test_group_profile(self, ver = 1):
if ver == 1:
test_group_profile = TestGroupProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
# elif ver == 2:
# import test_group_profile2
# test_group_profile = test_group_profile2.TestGroupProfile2(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
return test_group_profile
def new_test_group_profile(self):
return TestGroupProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
def new_lf_data_collection(self):
return LFDataCollection(local_realm=self)
class PacketFilter():
def get_filter_wlan_assoc_packets(self, ap_mac, sta_mac):
filter = "-T fields -e wlan.fc.type_subtype -e wlan.addr -e wlan.fc.pwrmgt " \
class PacketFilter:
@staticmethod
def get_filter_wlan_assoc_packets(ap_mac, sta_mac):
return "-T fields -e wlan.fc.type_subtype -e wlan.addr -e wlan.fc.pwrmgt " \
"-Y \"(wlan.addr==%s or wlan.addr==%s) and wlan.fc.type_subtype<=3\" " % (ap_mac, sta_mac)
return filter
def get_filter_wlan_null_packets(self, ap_mac, sta_mac):
filter = "-T fields -e wlan.fc.type_subtype -e wlan.addr -e wlan.fc.pwrmgt " \
@staticmethod
def get_filter_wlan_null_packets(ap_mac, sta_mac):
return "-T fields -e wlan.fc.type_subtype -e wlan.addr -e wlan.fc.pwrmgt " \
"-Y \"(wlan.addr==%s or wlan.addr==%s) and wlan.fc.type_subtype==44\" " % (ap_mac, sta_mac)
return filter
def run_filter(self, pcap_file, filter):
@staticmethod
def run_filter(pcap_file, file_filter):
filename = "/tmp/tshark_dump.txt"
cmd = "tshark -r %s %s > %s" % (pcap_file, filter, filename)
cmd = "tshark -r %s %s > %s" % (pcap_file, file_filter, filename)
# print("CMD: ", cmd)
os.system(cmd)
lines = []

View File

@@ -12,7 +12,6 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
@@ -20,7 +19,6 @@ LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
def main():
url = "http://localhost:8080/port/1/1/list"
timeout = 5 # seconds
lf_r = LFRequest.LFRequest(url)
json_response = lf_r.getAsJson(True)
@@ -30,6 +28,7 @@ def main():
# for record in json_response['interfaces']:
# j_printer.pprint(record)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if __name__ == "__main__":
main()

View File

@@ -300,17 +300,17 @@ class StationProfile:
pprint(set_port.set_port_current_flags)
pprint(set_port.set_port_interest_flags)
return
if (param_name in set_port.set_port_cmd_flags):
if param_name in set_port.set_port_cmd_flags:
if (value == 1) and (param_name not in self.desired_set_port_cmd_flags):
self.desired_set_port_cmd_flags.append(param_name)
elif value == 0:
self.desired_set_port_cmd_flags.remove(param_name)
elif (param_name in set_port.set_port_current_flags):
elif param_name in set_port.set_port_current_flags:
if (value == 1) and (param_name not in self.desired_set_port_current_flags):
self.desired_set_port_current_flags.append(param_name)
elif value == 0:
self.desired_set_port_current_flags.remove(param_name)
elif (param_name in set_port.set_port_interest_flags):
elif param_name in set_port.set_port_interest_flags:
if (value == 1) and (param_name not in self.desired_set_port_interest_flags):
self.desired_set_port_interest_flags.append(param_name)
elif value == 0:
@@ -359,7 +359,7 @@ class StationProfile:
def cleanup(self, desired_stations=None, delay=0.03, debug_=False):
print("Cleaning up stations")
if (desired_stations is None):
if desired_stations is None:
desired_stations = self.station_names
if len(desired_stations) < 1:
@@ -371,7 +371,9 @@ class StationProfile:
self.local_realm.rm_port(port_eid, check_exists=True, debug_=debug_)
time.sleep(delay)
# And now see if they are gone
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=desired_stations)
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url,
port_list=desired_stations,
debug=debug_)
# Checks for errors in initialization values and creates specified number of stations using init parameters
def create(self, radio,
@@ -440,7 +442,6 @@ class StationProfile:
set_port_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_port", debug_=debug)
wifi_extra_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_wifi_extra", debug_=debug)
wifi_txo_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_wifi_txo", debug_=debug)
my_sta_names = []
# add radio here
if (num_stations > 0) and (len(sta_names_) < 1):
# print("CREATING MORE STA NAMES == == == == == == == == == == == == == == == == == == == == == == == ==")
@@ -449,7 +450,7 @@ class StationProfile:
else:
my_sta_names = sta_names_
if (len(my_sta_names) >= 15) or (suppress_related_commands_ == True):
if (len(my_sta_names) >= 15) or suppress_related_commands_:
self.add_sta_data["suppress_preexec_cli"] = "yes"
self.add_sta_data["suppress_preexec_method"] = 1
self.set_port_data["suppress_preexec_cli"] = "yes"
@@ -503,13 +504,13 @@ class StationProfile:
continue
# print("- 3264 - ## %s ## add_sta_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
json_response = add_sta_r.jsonPost(debug=self.debug)
add_sta_r.jsonPost(debug=self.debug)
finished_sta.append(eidn)
# print("- ~3264 - %s - add_sta_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
time.sleep(0.01)
set_port_r.addPostData(self.set_port_data)
# print("- 3270 -- %s -- set_port_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
json_response = set_port_r.jsonPost(debug)
set_port_r.jsonPost(debug)
# print("- ~3270 - %s - set_port_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
time.sleep(0.01)
@@ -519,10 +520,10 @@ class StationProfile:
self.wifi_txo_data["port"] = name
if self.wifi_extra_data_modified:
wifi_extra_r.addPostData(self.wifi_extra_data)
json_response = wifi_extra_r.jsonPost(debug)
wifi_extra_r.jsonPost(debug)
if self.wifi_txo_data_modified:
wifi_txo_r.addPostData(self.wifi_txo_data)
json_response = wifi_txo_r.jsonPost(debug)
wifi_txo_r.jsonPost(debug)
# append created stations to self.station_names
self.station_names.append("%s.%s.%s" % (radio_shelf, radio_resource, name))
@@ -534,7 +535,7 @@ class StationProfile:
# and set ports up
if dry_run:
return
if (self.up):
if self.up:
self.admin_up()
# for sta_name in self.station_names:
@@ -551,8 +552,15 @@ class StationProfile:
self.add_sta_data["flags"] = self.add_named_flags(self.desired_add_sta_flags, add_sta.add_sta_flags)
self.add_sta_data["flags_mask"] = self.add_named_flags(self.desired_add_sta_flags_mask,
add_sta.add_sta_flags)
station_eid = self.local_realm.name_to_eid(station)
station_shelf = station_eid[0]
station_resource = station_eid[1]
station_port = station_eid[2]
self.add_sta_data["radio"] = radio
self.add_sta_data["sta_name"] = station
self.add_sta_data["shelf"] = station_shelf
self.add_sta_data["resource"] = station_resource
self.add_sta_data["sta_name"] = station_port
self.add_sta_data["ssid"] = 'NA'
self.add_sta_data["key"] = 'NA'
self.add_sta_data['mac'] = 'NA'
@@ -565,4 +573,4 @@ class StationProfile:
print(self.lfclient_url + "/cli_json/add_sta")
print(self.add_sta_data)
add_sta_r.addPostData(self.add_sta_data)
json_response = add_sta_r.jsonPost(self.debug)
add_sta_r.jsonPost(self.debug)

View File

@@ -65,4 +65,3 @@ class TestBase:
self.stop()
self.report()
self.clean_up()

View File

@@ -14,7 +14,6 @@ import threading
import re
import json
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
@@ -28,7 +27,8 @@ updates_path = webconsole_dir + "/web_json/updates.js"
class ClientVisualization(LFCliBase, threading.Thread):
def __init__(self, lfclient_host="localhost", lfclient_port=8080, num_clients= 64, max_data= 120, thread_id=None, _debug_on=False, _exit_on_error=False, _exit_on_fail=False):
def __init__(self, lfclient_host="localhost", lfclient_port=8080, num_clients=64, max_data=120, thread_id=None,
_debug_on=False, _exit_on_error=False, _exit_on_fail=False):
super().__init__(lfclient_host, lfclient_port, _debug=_debug_on, _exit_on_fail=_exit_on_fail)
threading.Thread.__init__(self)
self.num_clients = num_clients
@@ -57,14 +57,14 @@ class ClientVisualization(LFCliBase, threading.Thread):
print(i[j]['port type'])
if i[j]['port type'] == "WIFI-STA" and i[j]['parent dev'] == "wiphy1" and i[j]['alias'] != 'wlan1':
# print(j)
if i[j]['down'] == False and i[j]['phantom'] == False and i[j]['ip'] == '0.0.0.0':
if i[j]['down'] is False and i[j]['phantom'] is False and i[j]['ip'] == '0.0.0.0':
self.scanning += 1
elif i[j]['down'] == False and i[j]['phantom'] == True:
elif i[j]['down'] is False and i[j]['phantom'] is True:
self.phantom += 1
elif i[j]['down'] == True and i[j]['phantom'] == True:
elif i[j]['down'] is True and i[j]['phantom'] is True:
self.phantom += 1
self.client_data['phantom'].append(self.phantom)
elif i[j]['down'] == True and i[j]['phantom'] == False:
elif i[j]['down'] is True and i[j]['phantom'] is False:
self.down += 1
elif i[j]['ip'] != "0.0.0.0":
self.ip += 1
@@ -75,7 +75,6 @@ class ClientVisualization(LFCliBase, threading.Thread):
self.client_data['down'].append(self.down)
self.client_data['ip'].append(self.ip)
for i in self.client_data:
if len(self.client_data[i]) >= self.max_data:
self.client_data[i].pop(0)
@@ -84,8 +83,9 @@ class ClientVisualization(LFCliBase, threading.Thread):
break
class CreateHTML():
def __init__(self, path="", test_name="", time_snap="", dut_ssid="", test_conf_data={}, objective="", test_results={}, chart_data={}, chart_params={}):
class CreateHTML:
def __init__(self, path="", test_name="", time_snap="", dut_ssid="", test_conf_data={}, objective="",
test_results={}, chart_data={}, chart_params={}):
self.head = """
<html>
<head>
@@ -186,9 +186,6 @@ class CreateHTML():
chart_label.append(i)
chart_d.append(chart_data[i])
self.detail_result = """<table width="1000px" border="1" cellpadding="2" cellspacing="0" >
<tr><th colspan="2">Detailed Results</th></tr>
<table width="1000px" border="1" >
@@ -205,9 +202,11 @@ class CreateHTML():
for i in data:
print(data[i])
if str(data[i]).__contains__("PASS"):
self.detail_result = self.detail_result + "<th colspan='2' bgcolor='#90EE90'>" + str(data[i]) + "</th>"
self.detail_result = self.detail_result + "<th colspan='2' bgcolor='#90EE90'>" + str(
data[i]) + "</th>"
elif str(data[i]).__contains__("FAIL"):
self.detail_result = self.detail_result + "<th colspan='2' bgcolor='orange'>" + str(data[i]) + "</th>"
self.detail_result = self.detail_result + "<th colspan='2' bgcolor='orange'>" + str(
data[i]) + "</th>"
else:
self.detail_result = self.detail_result + "<th colspan='2'>" + str(data[i]) + "</th>"
self.detail_result = self.detail_result + "</tr>"
@@ -231,7 +230,6 @@ class CreateHTML():
self.chart = """<img align='center' style='padding:25;margin:5;width:600px;' src="plot.png" border='0' />"""
self.end = """</table>
</table>
</body>
@@ -240,17 +238,16 @@ class CreateHTML():
self.report = self.head + self.test_conf + self.objective + self.summary_results + self.chart + self.detail_result + self.end
class RuntimeUpdates():
class RuntimeUpdates:
def __init__(self, session_id, init_data):
self.session_id = session_id
self.init_data = init_data
f = open(updates_path, 'r+')
data = f.read()
f.close()
obj = data[data.find('{'): data.rfind('}') + 1]
obj = re.sub('[\']', '"', obj)
data = json.loads(obj)
self.obj = data[data.find('{'): data.rfind('}') + 1]
self.obj = re.sub('[\']', '"', self.obj)
data = json.loads(self.obj)
print(data)
data["web_updates"].append({"ID": self.session_id, "data": self.init_data})
print(data)
@@ -264,9 +261,9 @@ class RuntimeUpdates():
f = open(updates_path, 'r+')
data = f.read()
f.close()
obj = data[data.find('{'): data.rfind('}') + 1]
obj = re.sub('[\']', '"', obj)
data = json.loads(obj)
data_obj = data[data.find('{'): data.rfind('}') + 1]
data_obj = re.sub('[\']', '"', data_obj)
data = json.loads(data_obj)
for update in data["web_updates"]:
if update["ID"] == self.session_id:
@@ -314,14 +311,6 @@ class StatusSession(LFCliBase):
return self.json_get(json_uri)['messages']
if __name__ == "__main__":
obj = StatusMsg(lfclient_host="localhost", lfclient_port=8090, session_id="01_18_21_20_04_20")
print(obj.read())

View File

@@ -5,7 +5,6 @@ import importlib
from pprint import pprint
import time
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
@@ -97,7 +96,7 @@ class VAPProfile(LFCliBase):
req_json = LFUtils.portUpRequest(resource, None, debug_on=self.debug)
req_json["port"] = self.vap_name
set_port_r.addPostData(req_json)
json_response = set_port_r.jsonPost(self.debug)
set_port_r.jsonPost(self.debug)
time.sleep(0.03)
def admin_down(self, resource):
@@ -105,7 +104,7 @@ class VAPProfile(LFCliBase):
req_json = LFUtils.port_down_request(resource, None, debug_on=self.debug)
req_json["port"] = self.vap_name
set_port_r.addPostData(req_json)
json_response = set_port_r.jsonPost(self.debug)
set_port_r.jsonPost(self.debug)
time.sleep(0.03)
def use_security(self, security_type, ssid=None, passwd=None):
@@ -141,7 +140,7 @@ class VAPProfile(LFCliBase):
print("Command name name [%s] not defined in %s" % (command_name, self.COMMANDS))
return
if command_name == "add_vap":
if (param_name not in add_vap.add_vap_flags):
if param_name not in add_vap.add_vap_flags:
print("Parameter name [%s] not defined in add_vap.py" % param_name)
if self.debug:
pprint(add_vap.add_vap_flags)
@@ -255,7 +254,6 @@ class VAPProfile(LFCliBase):
raise ValueError("No radio %s.%s found" % (resource, radio))
eid = "1.%s.%s" % (resource, radio)
frequency = 0
country = 0
if eid in jr:
country = jr[eid]["country"]
@@ -312,18 +310,18 @@ class VAPProfile(LFCliBase):
pprint(add_vap_r)
print("- ~1502 - - - - - - - - - - - - - - - - - - - ")
json_response = add_vap_r.jsonPost(debug)
add_vap_r.jsonPost(debug)
# time.sleep(0.03)
time.sleep(2)
set_port_r.addPostData(self.set_port_data)
json_response = set_port_r.jsonPost(debug)
set_port_r.jsonPost(debug)
time.sleep(0.03)
self.wifi_extra_data["resource"] = resource
self.wifi_extra_data["port"] = self.vap_name
if self.wifi_extra_data_modified:
wifi_extra_r.addPostData(self.wifi_extra_data)
json_response = wifi_extra_r.jsonPost(debug)
wifi_extra_r.jsonPost(debug)
port_list = self.local_realm.json_get("port/1/1/list")
if port_list is not None:
@@ -354,19 +352,16 @@ class VAPProfile(LFCliBase):
}
self.local_realm.json_post("cli-json/set_port", bridge_set_port)
if (self.up):
if self.up:
self.admin_up(resource)
def cleanup(self, resource, delay=0.03):
print("Cleaning up VAPs")
desired_ports = ["1.%s.%s" % (resource, self.vap_name), "1.%s.br0" % resource]
del_count = len(desired_ports)
# First, request remove on the list.
for port_eid in desired_ports:
self.local_realm.rm_port(port_eid, check_exists=True)
# And now see if they are gone
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=desired_ports)

View File

@@ -73,7 +73,7 @@ class WifiMonitor:
})
def set_flag(self, param_name, value):
if (param_name not in add_monitor.flags):
if param_name not in add_monitor.flags:
raise ValueError("Flag '%s' does not exist for add_monitor, consult add_monitor.py" % param_name)
if (value == 1) and (param_name not in self.flag_names):
self.flag_names.append(param_name)
@@ -97,13 +97,13 @@ class WifiMonitor:
if (existing_ports is None) or ("interfaces" not in existing_ports) or ("interface" not in existing_ports):
print("No monitor names found to delete")
return
if ("interfaces" in existing_ports):
if "interfaces" in existing_ports:
for eid, info in existing_ports["interfaces"].items():
LFUtils.removePort(resource=resource_,
port_name=info["alias"],
baseurl=self.lfclient_url,
debug=self.debug)
if ("interface" in existing_ports):
if "interface" in existing_ports:
for eid, info in existing_ports["interface"].items():
LFUtils.removePort(resource=resource_,
port_name=info["alias"],
@@ -132,4 +132,3 @@ class WifiMonitor:
"duration": duration_sec
}
self.local_realm.json_post("/cli-json/sniff_port", _data=data)

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python3
'''
"""
Candela Technologies Inc.
Info : Standard Script for WLAN Capaity Calculator
@@ -13,7 +13,7 @@ This Script has three classes :
3. ac11_calculator : It will take all the user input of 802.11ac station,calculate Intermediate values and Theoretical values.
All classes have different functions: input_parameter() that calculates intermediate values and generate theroretical data
'''
"""
import argparse
import json
@@ -22,8 +22,7 @@ import json
# Class to take all user input (802.11a/b/g Standard)
class abg11_calculator():
class abg11_calculator:
def __init__(self, Traffic_Type, PHY_Bit_Rate, Encryption, QoS, MAC_Frame_802_11, Basic_Rate_Set, Preamble,
slot_name, Codec_Type, RTS_CTS_Handshake, CTS_to_self):
@@ -39,9 +38,6 @@ class abg11_calculator():
self.RTS_CTS_Handshake = RTS_CTS_Handshake
self.CTS_to_self = CTS_to_self
# This function is for calculate intermediate values and Theoretical values
@staticmethod
@@ -329,7 +325,7 @@ class abg11_calculator():
# CWmin_str (leave alone for default)
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
CWmin_str = 31
else:
if (
@@ -376,7 +372,7 @@ class abg11_calculator():
# PHY Bit Rate of Control Frames
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
data = 1
else:
data = 6
@@ -406,7 +402,7 @@ class abg11_calculator():
Preamble_1 = float(192)
if (PHY_Bit == 1) or (PHY_Bit == 2) or (PHY_Bit == 5.5) or (PHY_Bit == 11):
Ttxframe = (14 * 8) / PHY_Bit + (Preamble_1)
Ttxframe = (14 * 8) / PHY_Bit + Preamble_1
Ttxframe_new = format(Ttxframe, '.2f')
else:
@@ -415,7 +411,7 @@ class abg11_calculator():
# RTS/CTS Handshake Overhead
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
SIFS_value = float(10)
else:
SIFS_value = float(16)
@@ -425,7 +421,7 @@ class abg11_calculator():
elif "Yes" in self.RTS_CTS_Handshake:
if (PHY_Bit == 1) or (PHY_Bit == 2) or (PHY_Bit == 5.5) or (PHY_Bit == 11):
RTS_CTS_Handshake = ((20 + 14) * 8) / PHY_Bit + (Preamble_1)
RTS_CTS_Handshake = ((20 + 14) * 8) / PHY_Bit + Preamble_1
else:
RTS_CTS_Handshake = int(((20 + 14) * 8 + 22 + PHY_Bit * 4 - 1) / (PHY_Bit * 4)) * 4 + 2 * 20
@@ -441,26 +437,26 @@ class abg11_calculator():
CTS_to_self_Handshake = 0
else:
if (PHY_Bit == 1) or (PHY_Bit == 2) or (PHY_Bit == 5.5) or (PHY_Bit == 11):
CTS_to_self_Handshake = (14 * 8) / PHY_Bit + (Preamble_1) + SIFS_value
CTS_to_self_Handshake = (14 * 8) / PHY_Bit + Preamble_1 + SIFS_value
else:
CTS_to_self_Handshake = int(
(14 * 8 + 22 + PHY_Bit * 4 - 1) / (PHY_Bit * 4)) * 4 + 20 + SIFS_value
# DIFS calulation
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
DIFS_value = 50
elif ("Short" in self.slot_name):
elif "Short" in self.slot_name:
DIFS_value = 34
else:
DIFS_value = 50
# MeanBackoff calculation
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
c4 = (CWmin_str * 20 / 2)
MeanBackoff_value = float(c4)
elif ("Short" in self.slot_name):
elif "Short" in self.slot_name:
d2 = (CWmin_str * 9 / 2)
MeanBackoff_value = float(d2)
else:
@@ -474,7 +470,7 @@ class abg11_calculator():
Nbits_value = (MAC_MPDU_Size * 8)
# Tmac, time for MAC frame and Tplcp, time for MAC PLCP
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
Tmac_value = Nbits_value / yellow_cell
if "Short" in self.Preamble:
Tplcp = float(96)
@@ -597,8 +593,6 @@ class abg11_calculator():
IP_Throughput_C6_new = "N/A"
IP_Throughput_C7_new = "N/A"
Voice_Call = Max_Frame_Rate_C1 / Codec_Frame_rate
Voice_Call_value = round(Voice_Call)
@@ -620,18 +614,18 @@ class abg11_calculator():
self.Estimated_MOS_Score = "N/A"
self.Maximum_Bidirectional_Voice_Calls = "N/A"
else:
if (Voice_Call_value <= 1):
if Voice_Call_value <= 1:
Maximum_Bidirectional_Voice_Calls1 = self.Max_Frame_Rate_C1_round / Codec_Frame_rate
elif (Voice_Call_value <= 2):
elif Voice_Call_value <= 2:
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C2_round / Codec_Frame_rate
elif (Voice_Call_value <= 5):
elif Voice_Call_value <= 5:
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C3_round / Codec_Frame_rate
elif (Voice_Call_value <= 10):
elif Voice_Call_value <= 10:
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C4_round / Codec_Frame_rate
elif (Voice_Call_value <= 20):
elif Voice_Call_value <= 20:
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C5_round / Codec_Frame_rate
elif (Voice_Call_value <= 50):
elif Voice_Call_value <= 50:
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C6_round / Codec_Frame_rate
else:
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C7_round / Codec_Frame_rate
@@ -648,14 +642,13 @@ class abg11_calculator():
100 - self.Maximum_Theoretical_R_value) * 7 * 0.000001
self.Estimated_MOS_Score = round(Estimated_MOS_Score_1, 2)
def get_result(self):
print("\n" + "******************Station : 11abgCalculator*****************************" + "\n")
print("Theoretical Maximum Offered Load" + "\n")
print("1 Client:")
All_theoretical_output = {'Packet Interval(usec)': self.Client_1_new, 'Max Frame Rate(fps)': self.Max_Frame_Rate_C1_round,
All_theoretical_output = {'Packet Interval(usec)': self.Client_1_new,
'Max Frame Rate(fps)': self.Max_Frame_Rate_C1_round,
'Max. Offered Load (802.11)(Mb/s)': self.Max_Offered_Load_C1_new,
'Offered Load Per 802.11 Client(Mb/s)': self.Offered_Load_Per_Client1_new,
'Offered Load (802.3 Side)(Mb/s)': self.Offered_Load_C1_new,
@@ -677,7 +670,8 @@ class n11_calculator(abg11_calculator):
def __init__(self, Traffic_Type, Data_Voice_MCS, Channel_Bandwidth, Guard_Interval_value, Highest_Basic_str,
Encryption, QoS,
IP_Packets_MSDU_str, MAC_Frames_per_A_MPDU_str, BSS_Basic_Rate, MAC_MPDU_Size_Data_Traffic,
Codec_Type, PLCP, CWmin, RTS_CTS_Handshake, CTS_to_self,PHY_Bit_Rate=None,MAC_Frame_802_11=None,Basic_Rate_Set=None,Preamble=None,slot_name=None):
Codec_Type, PLCP, CWmin, RTS_CTS_Handshake, CTS_to_self, PHY_Bit_Rate=None, MAC_Frame_802_11=None,
Basic_Rate_Set=None, Preamble=None, slot_name=None):
super().__init__(Traffic_Type, PHY_Bit_Rate, Encryption, QoS, MAC_Frame_802_11, Basic_Rate_Set, Preamble,
slot_name, Codec_Type, RTS_CTS_Handshake, CTS_to_self)
self.Data_Voice_MCS = Data_Voice_MCS
@@ -691,7 +685,6 @@ class n11_calculator(abg11_calculator):
self.PLCP = PLCP
self.CWmin = CWmin
# This function is for calculate intermediate values and Theoretical values
def calculate(self):
@@ -875,7 +868,7 @@ class n11_calculator(abg11_calculator):
MAC_MPDU_Size = int(self.MAC_MPDU_Size_Data_Traffic)
else:
if ((IP_Packets_MSDU == 0)):
if IP_Packets_MSDU == 0:
MAC_MPDU_Size = (Codec_IP_Packet_Size + 28 + QoS_Hdr + Encrypt_Hdr + 8)
else:
@@ -890,7 +883,7 @@ class n11_calculator(abg11_calculator):
else:
MSDU_1 = (MAC_MPDU_Size - 28 - QoS_Hdr - Encrypt_Hdr - (IP_Packets_MSDU) * (14 + 3))
MSDU_1 = (MAC_MPDU_Size - 28 - QoS_Hdr - Encrypt_Hdr - IP_Packets_MSDU * (14 + 3))
MSDU_final = MSDU_1 / IP_Packets_MSDU
if MSDU_final < 0:
@@ -1076,7 +1069,7 @@ class n11_calculator(abg11_calculator):
# g22 A-MPDU Pad
if ((MAC_Frames_per_A_MPDU == 0)):
if MAC_Frames_per_A_MPDU == 0:
MPDU_Pad = int(0)
else:
@@ -1087,7 +1080,7 @@ class n11_calculator(abg11_calculator):
# c26 Nbits, Bits per MAC PPDU
MAC_Frames_per_A_MPDU_loc = MAC_Frames_per_A_MPDU + 1
if (MAC_Frames_per_A_MPDU == 0):
if MAC_Frames_per_A_MPDU == 0:
Nbits_Bits_per_MAC_PPDU = MAC_MPDU_Size * 8
else:
@@ -1100,7 +1093,7 @@ class n11_calculator(abg11_calculator):
Guard_Interval_1 = 1
elif "800" in self.Guard_Interval_value:
Guard_Interval_1 = 0
calculation = (((Data_Voice_MCS_int > 7 and PLCP_Configuration_int == 2) or PLCP_Configuration_int == 1))
calculation = ((Data_Voice_MCS_int > 7 and PLCP_Configuration_int == 2) or PLCP_Configuration_int == 1)
if (Guard_Interval_1 == 1) and calculation:
Tsymbol_Data_Symbol_Period = 3.60
@@ -1132,7 +1125,7 @@ class n11_calculator(abg11_calculator):
(22 + 32 * 8 + PHY_Bit_Rate_of_Control_Frames * 4 - 1) / (PHY_Bit_Rate_of_Control_Frames * 4)) * 4 + 20
# g20 Use BlockAck
if (MAC_Frames_per_A_MPDU == 0):
if MAC_Frames_per_A_MPDU == 0:
Use_BlockAck = False
else:
@@ -1228,14 +1221,14 @@ class n11_calculator(abg11_calculator):
Client_14_new = format(Max_PPDU_Rate_7, '.2f')
# c44 Max_MAC_MPDU_Rate_1
if (MAC_Frames_per_A_MPDU > 0):
Max_MAC_MPDU_Rate_1 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_1
Max_MAC_MPDU_Rate_2 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_2
Max_MAC_MPDU_Rate_3 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_3
Max_MAC_MPDU_Rate_4 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_4
Max_MAC_MPDU_Rate_5 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_5
Max_MAC_MPDU_Rate_6 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_6
Max_MAC_MPDU_Rate_7 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_7
if MAC_Frames_per_A_MPDU > 0:
Max_MAC_MPDU_Rate_1 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_1
Max_MAC_MPDU_Rate_2 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_2
Max_MAC_MPDU_Rate_3 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_3
Max_MAC_MPDU_Rate_4 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_4
Max_MAC_MPDU_Rate_5 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_5
Max_MAC_MPDU_Rate_6 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_6
Max_MAC_MPDU_Rate_7 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_7
else:
Max_MAC_MPDU_Rate_1 = Max_PPDU_Rate_1
Max_MAC_MPDU_Rate_2 = Max_PPDU_Rate_2
@@ -1255,14 +1248,14 @@ class n11_calculator(abg11_calculator):
# Max MAC MSDU Rate
if (IP_Packets_MSDU > 0):
Max_MAC_MSDU_Rate_1 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_1
Max_MAC_MSDU_Rate_2 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_2
Max_MAC_MSDU_Rate_3 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_3
Max_MAC_MSDU_Rate_4 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_4
Max_MAC_MSDU_Rate_5 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_5
Max_MAC_MSDU_Rate_6 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_6
Max_MAC_MSDU_Rate_7 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_7
if IP_Packets_MSDU > 0:
Max_MAC_MSDU_Rate_1 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_1
Max_MAC_MSDU_Rate_2 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_2
Max_MAC_MSDU_Rate_3 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_3
Max_MAC_MSDU_Rate_4 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_4
Max_MAC_MSDU_Rate_5 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_5
Max_MAC_MSDU_Rate_6 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_6
Max_MAC_MSDU_Rate_7 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_7
else:
Max_MAC_MSDU_Rate_1 = Max_MAC_MPDU_Rate_1
@@ -1458,8 +1451,6 @@ class n11_calculator(abg11_calculator):
else:
self.Maximum_Bidirectional_Voice_Calls = round(Maximum_Bidirectional, 2)
def get_result(self):
print("\n" + "******************Station : 11nCalculator*****************************" + "\n")
@@ -1488,10 +1479,9 @@ class n11_calculator(abg11_calculator):
class ac11_calculator(n11_calculator):
def __init__(self, Traffic_Type, Data_Voice_MCS, spatial, Channel_Bandwidth, Guard_Interval_value,
Highest_Basic_str, Encryption, QoS,IP_Packets_MSDU_str, MAC_Frames_per_A_MPDU_str, BSS_Basic_Rate, MAC_MPDU_Size_Data_Traffic,
Highest_Basic_str, Encryption, QoS, IP_Packets_MSDU_str, MAC_Frames_per_A_MPDU_str, BSS_Basic_Rate,
MAC_MPDU_Size_Data_Traffic,
Codec_Type, CWmin, RTS_CTS, PLCP=None, RTS_CTS_Handshake=None, CTS_to_self=None):
super().__init__(Traffic_Type, Data_Voice_MCS, Channel_Bandwidth, Guard_Interval_value, Highest_Basic_str,
Encryption, QoS,
@@ -1501,7 +1491,6 @@ class ac11_calculator(n11_calculator):
self.spatial = spatial
self.RTS_CTS = RTS_CTS
# This function is for calculate intermediate values and Theoretical values
def calculate(self):
@@ -1695,7 +1684,7 @@ class ac11_calculator(n11_calculator):
MAC_MPDU_Size = int(self.MAC_MPDU_Size_Data_Traffic)
else:
if (IP_Packets_MSDU == 0):
if IP_Packets_MSDU == 0:
MAC_MPDU_Size = (Codec_IP_Packet_Size + 28 + QoS_Hdr + Encrypt_Hdr + 8)
else:
@@ -1711,8 +1700,8 @@ class ac11_calculator(n11_calculator):
else:
MSDU_1 = (MAC_MPDU_Size - 28 - QoS_Hdr - Encrypt_Hdr - (IP_Packets_MSDU) * (14 + 3))
MSDU_final = (int(MSDU_1 / (IP_Packets_MSDU)))
MSDU_1 = (MAC_MPDU_Size - 28 - QoS_Hdr - Encrypt_Hdr - IP_Packets_MSDU * (14 + 3))
MSDU_final = (int(MSDU_1 / IP_Packets_MSDU))
if MSDU_final < 0:
MSDU = MSDU_final - 1
else:
@@ -1775,7 +1764,7 @@ class ac11_calculator(n11_calculator):
elif "800" in self.Guard_Interval_value:
Guard_Interval_1 = 0
calculation = (((Data_Voice_MCS_int > 7 and plcp == 2) or plcp == 1))
calculation = ((Data_Voice_MCS_int > 7 and plcp == 2) or plcp == 1)
if (Guard_Interval_1 == 1) and calculation:
Tsymbol_Data_Symbol_Period = 3.60
@@ -1839,7 +1828,7 @@ class ac11_calculator(n11_calculator):
# Nbits, Bits per MAC PPDU
# A-MPDU Pad
if ((MAC_Frames_per_A_MPDU == 0)):
if MAC_Frames_per_A_MPDU == 0:
MPDU_Pad = int(0)
else:
@@ -1848,7 +1837,7 @@ class ac11_calculator(n11_calculator):
MPDU_Pad = int((y % 4))
MAC_Frames_per_A_MPDU_loc = MAC_Frames_per_A_MPDU + 1
if (MAC_Frames_per_A_MPDU == 0):
if MAC_Frames_per_A_MPDU == 0:
Nbits_Bits_per_MAC_PPDU = MAC_MPDU_Size * 8
else:
@@ -1925,7 +1914,7 @@ class ac11_calculator(n11_calculator):
# c37 Ack Response Overhead
# g20 Use BlockAck
if (MAC_Frames_per_A_MPDU == 0):
if MAC_Frames_per_A_MPDU == 0:
Use_BlockAck = False
else:
Use_BlockAck = True
@@ -1987,14 +1976,14 @@ class ac11_calculator(n11_calculator):
# c44 Max_MAC_MPDU_Rate_1
if (MAC_Frames_per_A_MPDU > 0):
Max_MAC_MPDU_Rate_1 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_1
Max_MAC_MPDU_Rate_2 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_2
Max_MAC_MPDU_Rate_3 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_3
Max_MAC_MPDU_Rate_4 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_4
Max_MAC_MPDU_Rate_5 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_5
Max_MAC_MPDU_Rate_6 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_6
Max_MAC_MPDU_Rate_7 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_7
if MAC_Frames_per_A_MPDU > 0:
Max_MAC_MPDU_Rate_1 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_1
Max_MAC_MPDU_Rate_2 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_2
Max_MAC_MPDU_Rate_3 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_3
Max_MAC_MPDU_Rate_4 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_4
Max_MAC_MPDU_Rate_5 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_5
Max_MAC_MPDU_Rate_6 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_6
Max_MAC_MPDU_Rate_7 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_7
else:
Max_MAC_MPDU_Rate_1 = Max_PPDU_Rate_1
Max_MAC_MPDU_Rate_2 = Max_PPDU_Rate_2
@@ -2014,14 +2003,14 @@ class ac11_calculator(n11_calculator):
# Max MAC MSDU Rate
if (IP_Packets_MSDU > 0):
Max_MAC_MSDU_Rate_1 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_1
Max_MAC_MSDU_Rate_2 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_2
Max_MAC_MSDU_Rate_3 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_3
Max_MAC_MSDU_Rate_4 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_4
Max_MAC_MSDU_Rate_5 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_5
Max_MAC_MSDU_Rate_6 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_6
Max_MAC_MSDU_Rate_7 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_7
if IP_Packets_MSDU > 0:
Max_MAC_MSDU_Rate_1 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_1
Max_MAC_MSDU_Rate_2 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_2
Max_MAC_MSDU_Rate_3 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_3
Max_MAC_MSDU_Rate_4 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_4
Max_MAC_MSDU_Rate_5 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_5
Max_MAC_MSDU_Rate_6 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_6
Max_MAC_MSDU_Rate_7 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_7
else:
Max_MAC_MSDU_Rate_1 = Max_MAC_MPDU_Rate_1
@@ -2162,8 +2151,10 @@ class ac11_calculator(n11_calculator):
if self.Maximum_Theoretical_R_value > 100:
self.Estimated_MOS_Score = 4.5
else:
Estimated_MOS_Score_1 = (1 + 0.035 * self.Maximum_Theoretical_R_value + self.Maximum_Theoretical_R_value * (
self.Maximum_Theoretical_R_value - 60) * (100 - self.Maximum_Theoretical_R_value) * 7 * 0.000001)
Estimated_MOS_Score_1 = (
1 + 0.035 * self.Maximum_Theoretical_R_value + self.Maximum_Theoretical_R_value * (
self.Maximum_Theoretical_R_value - 60) * (
100 - self.Maximum_Theoretical_R_value) * 7 * 0.000001)
self.Estimated_MOS_Score = format(Estimated_MOS_Score_1, '.2f')
# Voice_Call_Range
@@ -2207,7 +2198,6 @@ class ac11_calculator(n11_calculator):
else:
self.Maximum_Bidirectional_Voice_Calls = round(Maximum_Bidirectional, 2)
def get_result(self):
print("\n" + "******************Station : 11ac Calculator*****************************" + "\n")

View File

@@ -18,6 +18,7 @@ import traceback
from time import sleep
import websocket
import re
try:
import thread
except ImportError:
@@ -27,12 +28,10 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit()
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
cre = {
"phy": re.compile(r'^(1\.\d+):\s+(\S+)\s+\(phy', re.I),
"ifname": re.compile(r'(1\.\d+):\s+IFNAME=(\S+)\s+', re.I),
@@ -75,6 +74,7 @@ host = "localhost"
base_url = None
port = 8081
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def usage():
print("""Example: __file__ --host 192.168.1.101 --port 8081\n""")
@@ -94,7 +94,7 @@ def main():
base_url = "unset"
try:
args = parser.parse_args()
if (args.host is None):
if args.host is None:
host = "localhost"
elif (type(args) is tuple) or (type(args) is list):
host = args.host[0]
@@ -125,79 +125,74 @@ def sock_filter(wsock, text):
resource = None
for test in ignore:
if (test in text):
if (debug):
if test in text:
if debug:
print(" ignoring ", text)
return;
return
try:
message = json.loads(text)
except Exception as ex:
print ("Json Exception: ", repr(ex))
traceback.print_exc()
try:
# big generic filter for wifi-message or details keys
try:
if ("details" in message.keys()):
if "details" in message.keys():
for test in ignore:
if (test in message["details"]):
return;
if test in message["details"]:
return
except KeyError:
print("Message lacks key 'details'")
try:
if ("wifi-event" in message.keys()):
if "wifi-event" in message.keys():
for test in ignore:
# print (" is ",test, " in ", message["wifi-event"])
if (test in message["wifi-event"]):
return;
if test in message["wifi-event"]:
return
except KeyError:
print("Message lacks key 'wifi-event'")
if (("time" in message.keys()) and ("timestamp" in message.keys())):
if ("time" in message.keys()) and ("timestamp" in message.keys()):
return
if ("name" in message.keys()):
if "name" in message.keys():
station_name = message["name"]
if ("resource" in message.keys()):
if "resource" in message.keys():
resource = "1.", message["resource"]
if ("event_type" in message.keys()):
if "event_type" in message.keys():
match_result = cre["port"].match(message["details"])
if (match_result is not None):
if match_result is not None:
station_name = match_result.group(1)
if (message["is_alert"]):
if message["is_alert"]:
print("alert: ", message["details"])
# LFUtils.debug_printer.pprint(message)
return
else:
# LFUtils.debug_printer.pprint(message)
if (" IP change from " in message["details"]):
if (" to 0.0.0.0" in messsage["details"]):
if " IP change from " in message["details"]:
if " to 0.0.0.0" in message["details"]:
print("e: %s.%s lost IP address", [resource, station_name])
else:
print("e: %s.%s gained IP address", [resource, station_name])
if ("Link DOWN" in message["details"]):
if "Link DOWN" in message["details"]:
return # duplicates alert
print("event: ", message["details"])
return
if ("wifi-event" in message.keys()):
if ("CTRL-EVENT-CONNECTED" in message["wifi-event"]):
if "wifi-event" in message.keys():
if "CTRL-EVENT-CONNECTED" in message["wifi-event"]:
# redunant
return
if (("CTRL-EVENT-CONNECTED - Connection to " in message["wifi-event"]) and (
" complete" in message["wifi-event"])):
return;
if ((": assoc " in message["wifi-event"]) and ("status: 0: Successful" in message["wifi-event"])):
return
if ((station_name is None) or (resource is None)):
if (": assoc " in message["wifi-event"]) and ("status: 0: Successful" in message["wifi-event"]):
return
if (station_name is None) or (resource is None):
try:
match_result = cre["phy"].match(message["wifi-event"])
if (match_result is not None):
if match_result is not None:
# LFUtils.debug_printer.pprint(match_result)
# LFUtils.debug_printer.pprint(match_result.groups())
resource = match_result.group(1)
@@ -206,7 +201,7 @@ def sock_filter(wsock, text):
match_result = cre["ifname"].match(message["wifi-event"])
# LFUtils.debug_printer.pprint(match_result)
# LFUtils.debug_printer.pprint(match_result.groups())
if (match_result is not None):
if match_result is not None:
resource = match_result.group(1)
station_name = match_result.group(2)
else:
@@ -221,9 +216,9 @@ def sock_filter(wsock, text):
sleep(1)
# print ("Determined station name: as %s.%s"%(resource, station_name))
if ((": auth ") and ("status: 0: Successful" in message["wifi-event"])):
if ": auth " and ("status: 0: Successful" in message["wifi-event"]):
match_result = cre["auth"].match(message["wifi-event"])
if (match_result and match_result.groups()):
if match_result and match_result.groups():
bssid = match_result.group(1)
print("station %s.%s auth with %s" % (resource, station_name, bssid))
return
@@ -231,9 +226,9 @@ def sock_filter(wsock, text):
print("station %s.%s auth with ??" % (resource, station_name))
LFUtils.debug_printer.pprint(match_result)
if ("Associated with " in message["wifi-event"]):
if "Associated with " in message["wifi-event"]:
match_result = cre["associated"].match(message["wifi-event"])
if (match_result and match_result.groups()):
if match_result and match_result.groups():
bssid = match_result.group(1)
print("station %s.%s assocated with %s" % (resource, station_name, bssid))
return
@@ -241,9 +236,9 @@ def sock_filter(wsock, text):
print("station %s.%s assocated with ??" % (resource, station_name))
LFUtils.debug_printer.pprint(match_result)
if (" - Connection to " in message["wifi-event"]):
if " - Connection to " in message["wifi-event"]:
match_result = cre["connected"].match(message["wifi-event"])
if (match_result and match_result.groups()):
if match_result and match_result.groups():
bssid = match_result.group(1)
print("station %s.%s connected to %s" % (resource, station_name, bssid))
return
@@ -251,14 +246,14 @@ def sock_filter(wsock, text):
print("station %s.%s connected to ??" % (resource, station_name))
LFUtils.debug_printer.pprint(match_result)
if ("disconnected" in message["wifi-event"]):
if "disconnected" in message["wifi-event"]:
print("Station %s.%s down" % (resource, station_name))
return
if ("Trying to associate with " in message["wifi-event"]):
if "Trying to associate with " in message["wifi-event"]:
match_result = cre["associating"].match(message["wifi-event"])
if (match_result and match_result.groups()):
if match_result and match_result.groups():
bssid = match_result.group(1)
print("station %s.%s associating with %s" % (resource, station_name, bssid))
return
@@ -266,10 +261,10 @@ def sock_filter(wsock, text):
print("station %s.%s associating with ??" % (resource, station_name))
LFUtils.debug_printer.pprint(match_result)
if ("Trying to authenticate" in message["wifi-event"]):
if "Trying to authenticate" in message["wifi-event"]:
match_result = cre["authenticating"].match(message["wifi-event"])
if (match_result and match_result.groups()):
if match_result and match_result.groups():
bssid = match_result.group(1)
print("station %s.%s authenticating with %s" % (resource, station_name, bssid))
return
@@ -277,10 +272,10 @@ def sock_filter(wsock, text):
print("station %s.%s authenticating with ??" % (resource, station_name))
LFUtils.debug_printer.pprint(match_result)
if ("Authenticated" in message["wifi-event"]):
if "Authenticated" in message["wifi-event"]:
match_result = cre["authenticed"].match(message["wifi-event"])
LFUtils.debug_printer.pprint(match_result)
if (match_result and match_result.groups()):
if match_result and match_result.groups():
bssid = match_result.group(1)
print("station %s.%s authenticated with %s" % (resource, station_name, bssid))
else:
@@ -291,6 +286,10 @@ def sock_filter(wsock, text):
print("\nUnhandled: ")
LFUtils.debug_printer.pprint(message)
except Exception as ex:
traceback.print_exc()
raise ("Json Exception: ", repr(ex))
except KeyError as kerr:
print("# ----- Bad Key: ----- ----- ----- ----- ----- ----- ----- ----- ----- -----")
print("input: ", text)
@@ -317,6 +316,7 @@ def sock_filter(wsock, text):
sleep(1)
return
# ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
def m_error(wsock, err):
print("# ----- Error: ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----\n")
@@ -355,7 +355,6 @@ def start_websocket(uri, websock):
if __name__ == '__main__':
main()
####
####
####

View File

@@ -13,13 +13,11 @@ WS_Listener has three arguments in general : lfclient_host, _scriptname, _callba
"""
class WS_Listener():
class WS_Listener:
def __init__(self, lfclient_host="localhost", _scriptname=None, _callback=None):
import websocket
self.scriptname = _scriptname
websocket.enableTrace(True)
self.ws = websocket.WebSocketApp("ws://" + lfclient_host + ":8081", on_message=_callback)
self.ws.run_forever()

View File

@@ -368,8 +368,8 @@ td.scriptdetails span.copybtn {
}
td.scriptdetails:hover span.copybtn {
display: inline-block;
padding: 5px;
font-size: 12px;
padding: 2px;
font-size:10px;
float: left;
color: #050;
background: white;

179
py-scripts/asus_ap.py Normal file
View File

@@ -0,0 +1,179 @@
#!/usr/bin/python3
'''
NAME:
asus_ap.py
PURPOSE:
Generic AP library that will work for the ASUS ap's
EXAMPLE:
./asus_ap.py --ap_port '/dev/ttyUSB0' --ap_baud '115200' --ap_cmd "wl -i wl1 bs_data"
./asus_ap.py --ap_port '/dev/ttyUSB0' --ap_baud '115200' --ap_cmd "wl -i wl1 bs_data" --ap_file 'ap_file.txt'
NOTES:
'''
import sys
if sys.version_info[0] != 3:
print("This script requires Python3")
exit()
import argparse
import pexpect
import serial
from pexpect_serial import SerialSpawn
# see https://stackoverflow.com/a/13306095/11014343
class FileAdapter(object):
def __init__(self, logger):
self.logger = logger
def write(self, data):
# NOTE: data can be a partial line, multiple lines
data = data.strip() # ignore leading/trailing whitespace
if data: # non-blank
self.logger.info(data)
def flush(self):
pass # leave it to logging to flush properly
class lf_ap():
def __init__(self,
_ap_test_mode = False,
_ap_2G_interface = "wl0",
_ap_5G_interface = "wl1",
_ap_6G_interface = "wl2",
_ap_scheme = 'serial',
_ap_serial_port = '/dev/ttyUSB0',
_ap_ssh_port = "22",
_ap_telnet_port = "23",
_ap_serial_baud = '115200',
_ap_report_dir = "",
_ap_log_file = ""):
self.ap_test_mode = _ap_test_mode
self.ap_2G_interface = _ap_2G_interface
self.ap_5G_interface = _ap_5G_interface
self.ap_6G_interface = _ap_6G_interface
self.ap_scheme = _ap_scheme
self.ap_serial_port = _ap_serial_port
self.ap_telnet_port = _ap_ssh_port
self.ap_telnet = _ap_telnet_port
self.ap_serial_baud = _ap_serial_baud
self.ap_report_dir = _ap_report_dir
self.ap_log_file = _ap_log_file
def ap_action(self):
print("ap_cmd: {}".format(self.ap_cmd))
try:
ser = serial.Serial(self.ap_port, int(self.ap_baud), timeout=5)
ss = SerialSpawn(ser)
ss.sendline(str(self.ap_cmd))
ss.expect([pexpect.TIMEOUT], timeout=2) # do not detete line, waits for output
ap_results = ss.before.decode('utf-8','ignore')
print("ap_results {}".format(ap_results))
except:
ap_results = "exception on accessing {} Command: {}\r\n".format(self.ap_port,self.ap_cmd)
print("{}".format(ap_results))
if(self.ap_file != None):
ap_file = open(str(self.ap_file),"a")
ap_file.write(ap_results)
ap_file.close()
print("ap file written {}".format(str(self.ap_file)))
# ASUS
def ap_clear_stats(self,band):
pass
# ASUS bs_data
def ap_ul_data(self,band):
pass
# ASUS rx_report
def ap_dl_data(self,band):
pass
# ASUS chanel info (channel utilization)
def ap_chanim(self,band):
pass
def ap_ul_stats(self,band):
pass
def ap_dl_stats(self,band):
pass
def ap_store_dl_scheduler_stats(self,band):
if band is "6G":
pass
def ap_store_ul_scheduler_stats(self,band):
pass
def ap_ofdma_stats(self,band):
pass
def main():
parser = argparse.ArgumentParser(
prog='lf_ap.py',
#formatter_class=argparse.RawDescriptionHelpFormatter,
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
Useful Information:
1. Useful Information goes here
''',
description='''\
lf_ap.py:
--------------------
Summary :
----------
This file is used for verification
Commands: (wl2 == 6GHz wl1 == 5GHz , wl0 == 24ghz)
read ap data:: 'wl -i wl1 bs_data'
reset scheduler's counters:: 'wl -i wl1 dump_clear'
UL scheduler statistics:: 'wl -i wl1 dump umsched'
DL scheduler statistics:: 'wl -i wl1 dump msched'
Generic command layout:
-----------------------
''')
parser.add_argument('--ap_test_mode', help='--ap_mode ',default=True)
parser.add_argument('--ap_port', help='--ap_port \'/dev/ttyUSB0\'',default='/dev/ttyUSB0')
parser.add_argument('--ap_baud', help='--ap_baud \'115200\'',default='115200')
parser.add_argument('--ap_cmd', help='--ap_cmd \'wl -i wl1 bs_data\'',default='wl -i wl1 bs_data')
parser.add_argument('--ap_file', help='--ap_file \'ap_file.txt\'')
args = parser.parse_args()
__ap_port = args.ap_port
__ap_baud = args.ap_baud
__ap_cmd = args.ap_cmd
__ap_file = args.ap_file
ap_dut = lf_ap(
_ap_port = __ap_port,
_ap_baud = __ap_baud,
_ap_cmd = __ap_cmd ,
_ap_file = __ap_file)
ap_dut.ap_action()
if __name__ == '__main__':
main()

View File

@@ -5,13 +5,6 @@ import pandas as pd
import argparse
def get_tag(x, tag):
try:
return x[tag]
except:
return False
def main():
parser = argparse.ArgumentParser(
prog="check_argparse.py",
@@ -29,7 +22,7 @@ def main():
text = open(os.path.join(args.path, file)).read()
results_file = dict()
results_file['argparse'] = 'argparse.' in text
if results_file['argparse'] is True:
if results_file['argparse']:
results_file['create_basic'] = 'create_basic_argparse' in text
results_file['create_bare'] = 'create_bare_argparse' in text
results_file['prog'] = 'prog=' in text
@@ -48,7 +41,9 @@ def main():
'description',
'epilog',
'usage']:
df[tag] = [get_tag(x, tag) for x in df['results']]
for result in df['results']:
if tag in result:
df[tag] = df['results'][tag]
df['details'] = df['description'] + df['epilog'] + df['usage']
df.to_csv(args.output + '.csv', index=False)

View File

@@ -58,7 +58,8 @@ class CreateBond(LFCliBase):
"resource": self.resource,
"port": "bond0000",
"current_flags": 0x80000000,
"interest": 0x4000 # (0x2 + 0x4000 + 0x800000) # current, dhcp, down
# (0x2 + 0x4000 + 0x800000) # current, dhcp, down
"interest": 0x4000
}
self.json_post("cli-json/set_port", bond_set_port)

View File

@@ -13,7 +13,6 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
@@ -54,7 +53,6 @@ class CreateBridge(Realm):
pprint.pprint(self.sta_list)
print("---- ~bridge List ----- ----- ----- ----- ----- ----- \n")
def build(self):
# Build bridges
@@ -71,13 +69,12 @@ class CreateBridge(Realm):
"resource": self.resource,
"port": "br0",
"current_flags": 0x80000000,
"interest": 0x4000 # (0x2 + 0x4000 + 0x800000) # current, dhcp, down
# (0x2 + 0x4000 + 0x800000) # current, dhcp, down
"interest": 0x4000
}
self.json_post("cli-json/set_port", bridge_set_port)
def main():
parser = LFCliBase.create_basic_argparse(
prog='create_bridge.py',
@@ -101,16 +98,18 @@ Command example:
--debug
''')
required = parser.add_argument_group('required arguments')
required.add_argument('--target_device', help='Where the bridges should be connecting', required=True)
required.add_argument(
'--target_device', help='Where the bridges should be connecting', required=True)
# required.add_argument('--security', help='WiFi Security protocol: < open | wep | wpa | wpa2 | wpa3 >', required=True)
optional = parser.add_argument_group('optional arguments')
optional.add_argument('--num_bridges', help='Number of bridges to Create', required=False)
optional.add_argument(
'--num_bridges', help='Number of bridges to Create', required=False)
args = parser.parse_args()
# if args.debug:
# pprint.pprint(args)
# time.sleep(5)
if (args.radio is None):
if args.radio is None:
raise ValueError("--radio required")
num_bridge = 2
@@ -139,5 +138,6 @@ Command example:
create_bridge.build()
print('Created %s bridges' % num_bridge)
if __name__ == "__main__":
main()

View File

@@ -41,7 +41,6 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
@@ -60,13 +59,16 @@ class CreateChamberview(cv):
self.lfmgr = lfmgr
self.port = port
def clean_cv_scenario(self,type="Network-Connectivity",scenario_name=None):
self.rm_cv_text_blob(type,scenario_name)
def clean_cv_scenario(
self,
cv_type="Network-Connectivity",
scenario_name=None):
self.rm_cv_text_blob(cv_type, scenario_name)
def setup(self,
create_scenario="",
line="",
raw_line=[]):
raw_line=None):
if raw_line:
print("creating %s scenario" % create_scenario)
@@ -88,44 +90,39 @@ class CreateChamberview(cv):
Freq = "-1"
VLAN = ""
for i in range(len(line)):
if " " in line[i][0]:
line[i][0] = (re.split(' ', line[i][0]))
elif "," in line[i][0]:
line[i][0] = (re.split(',', line[i][0]))
elif ", " in line[i][0]:
line[i][0] = (re.split(',', line[i][0]))
elif " ," in line[i][0]:
line[i][0] = (re.split(',', line[i][0]))
for item in line:
if " " in item[0]:
item[0] = (re.split(' ', item[0]))
elif "," in item[0]:
item[0] = (re.split(',', item[0]))
else:
print("Wrong arguments entered !")
exit(1)
print("creating %s scenario" % scenario_name)
for j in range(len(line[i][0])):
line[i][0][j] = line[i][0][j].split("=")
for k in range(len(line[i][0][j])):
name = line[i][0][j][k]
if str(name) == "Resource" or str(name) == "Res" or str(name) == "R":
Resource = line[i][0][j][k + 1]
elif str(name) == "Profile" or str(name) == "Prof" or str(name) == "P":
Profile = line[i][0][j][k + 1]
elif str(name) == "Amount" or str(name) == "Sta" or str(name) == "A":
Amount = line[i][0][j][k + 1]
elif str(name) == "Uses-1" or str(name) == "U1" or str(name) == "U-1":
Uses1 = line[i][0][j][k + 1]
elif str(name) == "Uses-2" or str(name) == "U2" or str(name) == "U-2":
Uses2 = line[i][0][j][k + 1]
elif str(name) == "Freq" or str(name) == "Freq" or str(name) == "F":
Freq = line[i][0][j][k + 1]
elif str(name) == "DUT" or str(name) == "dut" or str(name) == "D":
DUT = line[i][0][j][k + 1]
elif str(name) == "DUT_Radio" or str(name) == "dr" or str(name) == "DR":
DUT_Radio = line[i][0][j][k + 1]
elif str(name) == "Traffic" or str(name) == "Traf" or str(name) == "T":
Traffic = line[i][0][j][k + 1]
elif str(name) == "VLAN" or str(name) == "Vlan" or str(name) == "V":
VLAN = line[i][0][j][k + 1]
for sub_item in item[0]:
sub_item = sub_item.split("=")
if sub_item[0] == "Resource" or str(
sub_item[0]) == "Res" or sub_item[0] == "R":
Resource = sub_item[1]
elif sub_item[0] == "Profile" or sub_item[0] == "Prof" or sub_item[0] == "P":
Profile = sub_item[1]
elif sub_item[0] == "Amount" or sub_item[0] == "Sta" or sub_item[0] == "A":
Amount = sub_item[1]
elif sub_item[0] == "Uses-1" or sub_item[0] == "U1" or sub_item[0] == "U-1":
Uses1 = sub_item[1]
elif sub_item[0] == "Uses-2" or sub_item[0] == "U2" or sub_item[0] == "U-2":
Uses2 = sub_item[1]
elif sub_item[0] == "Freq" or sub_item[0] == "Freq" or sub_item[0] == "F":
Freq = sub_item[1]
elif sub_item[0] == "DUT" or sub_item[0] == "dut" or sub_item[0] == "D":
DUT = sub_item[1]
elif sub_item[0] == "DUT_Radio" or sub_item[0] == "dr" or sub_item[0] == "DR":
DUT_Radio = sub_item[1]
elif sub_item[0] == "Traffic" or sub_item[0] == "Traf" or sub_item[0] == "T":
Traffic = sub_item[1]
elif sub_item[0] == "VLAN" or sub_item[0] == "Vlan" or sub_item[0] == "V":
VLAN = sub_item[1]
else:
continue
@@ -140,7 +137,7 @@ class CreateChamberview(cv):
Traffic,
Freq,
VLAN
); # To manage scenario
) # To manage scenario
if not line and not raw_line:
raise Exception("scenario creation failed")
@@ -154,14 +151,14 @@ class CreateChamberview(cv):
self.apply_cv_scenario(scenario_name) # Apply scenario
self.build_cv_scenario() # build scenario
tries = 0
while (True):
while True:
self.get_popup_info_and_close()
if not self.get_cv_is_built():
# It can take a while to build a large scenario, so wait-time
# is currently max of 5 minutes.
print("Waiting %i/300 for Chamber-View to be built." % (tries))
print("Waiting %i/300 for Chamber-View to be built." % tries)
tries += 1
if (tries > 300):
if tries > 300:
break
time.sleep(1)
else:
@@ -188,17 +185,32 @@ def main():
--raw_line "profile_link 1.1 upstream 1 'DUT: temp Radio-1' tcp-dl-6m-vi eth1,AUTO -1"
""")
parser.add_argument("-m", "--lfmgr", type=str,
parser.add_argument(
"-m",
"--lfmgr",
type=str,
help="address of the LANforge GUI machine (localhost is default)")
parser.add_argument("-o", "--port", type=int, default=8080,
parser.add_argument(
"-o",
"--port",
type=int,
default=8080,
help="IP Port the LANforge GUI is listening on (8080 is default)")
parser.add_argument("-cs", "--create_scenario", "--create_lf_scenario", type=str,
parser.add_argument(
"-cs",
"--create_scenario",
"--create_lf_scenario",
type=str,
help="name of scenario to be created")
parser.add_argument("-l", "--line", action='append', nargs='+',
help="line number", default=[])
parser.add_argument("-rl", "--raw_line", action='append', nargs=1,
help="raw lines", default=[])
parser.add_argument("-ds", "--delete_scenario", default=False, action='store_true',
parser.add_argument(
"-ds",
"--delete_scenario",
default=False,
action='store_true',
help="delete scenario (by default: False)")
args = parser.parse_args()
@@ -206,7 +218,9 @@ def main():
port=args.port,
)
if args.delete_scenario:
Create_Chamberview.clean_cv_scenario(type="Network-Connectivity", scenario_name=args.create_scenario)
Create_Chamberview.clean_cv_scenario(
cv_type="Network-Connectivity",
scenario_name=args.create_scenario)
Create_Chamberview.setup(create_scenario=args.create_scenario,
line=args.line,

View File

@@ -54,7 +54,6 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
# from cv_dut_profile import cv_dut as dut
@@ -70,7 +69,7 @@ class DUT(dut):
lfmgr="localhost",
port="8080",
dut_name="DUT",
ssid=[],
ssid=None,
sw_version="NA",
hw_version="NA",
serial_num="NA",
@@ -87,12 +86,13 @@ class DUT(dut):
desired_dut_flags=dut_flags,
desired_dut_flags_mask=dut_flags
)
if ssid is None:
ssid = []
self.cv_dut_name = dut_name
self.cv_test = cvtest(lfmgr, port)
self.dut_name = dut_name
self.ssid = ssid
def setup(self):
self.create_dut()
@@ -115,16 +115,14 @@ class DUT(dut):
d[item[0].lower()] = item[1]
self.ssid[j] = d
self.ssid[j]['flag'] = []
self.ssid[j].keys
flag = 0x0
if 'security' in self.ssid[j].keys():
self.ssid[j]['security'] = self.ssid[j]['security'].split('|')
self.ssid[j]['security'] = self.ssid[j]['security'].split(
'|')
for security in self.ssid[j]['security']:
try:
if security.lower() in flags:
flag |= flags[security.lower()]
except:
pass
self.ssid[j]['flag'] = flag
if 'bssid' not in self.ssid[j].keys():
@@ -152,20 +150,41 @@ def main():
--ssid "ssid_idx=0 ssid=NET1 security=WPA|WEP|11r|EAP-PEAP bssid=78:d2:94:bf:16:41"
--ssid "ssid_idx=1 ssid=NET1 security=WPA password=test bssid=78:d2:94:bf:16:40"
""")
parser.add_argument("-m", "--lfmgr", type=str, default="localhost",
parser.add_argument(
"-m",
"--lfmgr",
type=str,
default="localhost",
help="address of the LANforge GUI machine (localhost is default)")
parser.add_argument("-o", "--port", type=str, default="8080",
parser.add_argument(
"-o",
"--port",
type=str,
default="8080",
help="IP Port the LANforge GUI is listening on (8080 is default)")
parser.add_argument("-d", "--dut_name", type=str, default="DUT",
help="set dut name")
parser.add_argument("-s", "--ssid", action='append', nargs=1,
help="SSID", default=[])
parser.add_argument("--sw_version", default="NA", help="DUT Software version.")
parser.add_argument("--hw_version", default="NA", help="DUT Hardware version.")
parser.add_argument("--serial_num", default="NA", help="DUT Serial number.")
parser.add_argument(
"--sw_version",
default="NA",
help="DUT Software version.")
parser.add_argument(
"--hw_version",
default="NA",
help="DUT Hardware version.")
parser.add_argument(
"--serial_num",
default="NA",
help="DUT Serial number.")
parser.add_argument("--model_num", default="NA", help="DUT Model Number.")
parser.add_argument('--dut_flag', help='station flags to add', default=None, action='append')
parser.add_argument(
'--dut_flag',
help='station flags to add',
default=None,
action='append')
args = parser.parse_args()
new_dut = DUT(lfmgr=args.lfmgr,

View File

@@ -16,7 +16,6 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LANforge = importlib.import_module("py-json.LANforge")
@@ -46,10 +45,12 @@ class CreateL3(Realm):
self.endp_a = endp_a
self.mode = mode
self.name_prefix = name_prefix
self.station_profile = self.new_station_profile()
# self.station_profile = self.new_station_profile()
# self.station_profile.lfclient_url = self.lfclient_url
# self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0,
# end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user
# defined variable from terminal.
self.cx_profile = self.new_l3_cx_profile()
self.station_profile.lfclient_url = self.lfclient_url
self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user defined variable from terminal.
self.cx_profile.host = self.host
self.cx_profile.port = self.port
self.cx_profile.name_prefix = self.name_prefix
@@ -66,47 +67,16 @@ class CreateL3(Realm):
side_a=self.endp_a,
side_b=self.endp_b,
sleep_time=0)
# self.cx_profile.start_cx()
self._pass("PASS: Cross-connect build finished")
def main():
parser = LFCliBase.create_basic_argparse(
prog='create_l3.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
Generate traffic between ports
''',
description='''\
''')
def main(args):
required_args = None
for group in parser._action_groups:
if group.title == "required arguments":
required_args = group
break
if required_args is not None:
required_args.add_argument('--min_rate_a', help='--min_rate_a bps rate minimum for side_a', default=56000)
required_args.add_argument('--min_rate_b', help='--min_rate_b bps rate minimum for side_b', default=56000)
required_args.add_argument('--endp_a', help='--endp_a station list', default=["eth1"], action="append")
required_args.add_argument('--endp_b', help='--upstream port', default="eth2")
num_sta = 0
# if (args.num_stations is not None) and (int(args.num_stations) > 0):
# num_sta = int(args.num_stations)
optional_args = None
for group in parser._action_groups:
if group.title == "optional arguments":
optional_args = group
break;
if optional_args is not None:
optional_args.add_argument('--mode', help='Used to force mode of stations', default=0)
optional_args.add_argument('--ap', help='Used to force a connection to a particular AP')
optional_args.add_argument('--number_template', help='Start the station numbering with a particular number. Default is 0000', default=0000)
args = parser.parse_args()
num_sta = 2
if (args.num_stations is not None) and (int(args.num_stations) > 0):
num_sta = int(args.num_stations)
# station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=int(args.number_template), end_id_=num_sta+int(args.number_template) - 1, padding_number_=10000,
# radio=args.radio)
ip_var_test = CreateL3(host=args.mgr,
port=args.mgr_port,
name_prefix="VT",
@@ -117,13 +87,53 @@ def main():
mode=args.mode,
_debug_on=args.debug)
ip_var_test.pre_cleanup()
# ip_var_test.pre_cleanup()
ip_var_test.build()
if not ip_var_test.passes():
print(ip_var_test.get_fail_message())
ip_var_test.exit_fail()
print('Created %s stations and connections' % num_sta)
print(f'Created {num_sta} stations and connections')
if __name__ == "__main__":
main()
parser = LFCliBase.create_basic_argparse(
prog='create_l3.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
Generate traffic between ports
''',
description='''\
''')
parser.add_argument(
'--min_rate_a',
help='--min_rate_a bps rate minimum for side_a',
default=56000)
parser.add_argument(
'--min_rate_b',
help='--min_rate_b bps rate minimum for side_b',
default=56000)
parser.add_argument(
'--endp_a',
help='--endp_a station list',
default=[],
action="append",
required=True)
parser.add_argument(
'--endp_b',
help='--upstream port',
default="eth2",
required=True)
parser.add_argument(
'--mode',
help='Used to force mode of stations',
default=0)
parser.add_argument(
'--ap',
help='Used to force a connection to a particular AP')
parser.add_argument(
'--number_template',
help='Start the station numbering with a particular number. Default is 0000',
default=0000)
args = parser.parse_args()
main(args)

View File

@@ -5,6 +5,8 @@
Example script:
'./create_l3_stations.py --radio wiphy0 --ssid lanforge --password password --security wpa2'
'./create_l3_stations.py --station_list sta00,sta01 --radio wiphy0 --ssid lanforge --password password --security wpa2'
'./create_l3_stations.py --station_list sta00 sta01 --radio wiphy0 --ssid lanforge --password password --security wpa2'
"""
import sys
@@ -24,12 +26,25 @@ from realm import Realm
class CreateL3(Realm):
def __init__(self,
ssid, security, password, sta_list, name_prefix, upstream, radio,
host="localhost", port=8080, mode=0, ap=None,
side_a_min_rate=56, side_a_max_rate=0,
side_b_min_rate=56, side_b_max_rate=0,
number_template="00000", use_ht160=False,
def __init__(
self,
ssid,
security,
password,
sta_list,
name_prefix,
upstream,
radio,
host="localhost",
port=8080,
mode=0,
ap=None,
side_a_min_rate=56,
side_a_max_rate=0,
side_b_min_rate=56,
side_b_max_rate=0,
number_template="00000",
use_ht160=False,
_debug_on=False,
_exit_on_error=False,
_exit_on_fail=False):
@@ -61,7 +76,9 @@ class CreateL3(Realm):
self.station_profile.mode = mode
if self.ap is not None:
self.station_profile.set_command_param("add_sta", "ap", self.ap)
# self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user defined variable from terminal.
# self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0,
# end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user
# defined variable from terminal.
self.cx_profile.host = self.host
self.cx_profile.port = self.port
@@ -74,7 +91,7 @@ class CreateL3(Realm):
def pre_cleanup(self):
self.cx_profile.cleanup_prefix()
for sta in self.sta_list:
self.rm_port(sta, check_exists=True)
self.rm_port(sta, check_exists=True, debug_=False)
def build(self):
@@ -83,8 +100,10 @@ class CreateL3(Realm):
self.password)
self.station_profile.set_number_template(self.number_template)
print("Creating stations")
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag(
"add_sta", "create_admin_down", 1)
self.station_profile.set_command_param(
"set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
self.station_profile.create(radio=self.radio,
sta_names_=self.sta_list,
@@ -136,39 +155,92 @@ def main():
--ap "00:0e:8e:78:e1:76"
--number_template 0000
--debug
python3 ./create_l3_stations.py
--upstream_port eth1
--radio wiphy0
--station_list sta00,sta01
--security {open|wep|wpa|wpa2|wpa3} \\
--mode 1
{"auto" : "0",
"a" : "1",
"b" : "2",
"g" : "3",
"abg" : "4",
"abgn" : "5",
"bgn" : "6",
"bg" : "7",
"abgnAC" : "8",
"anAC" : "9",
"an" : "10",
"bgnAC" : "11",
"abgnAX" : "12",
"bgnAX" : "13",
--ssid netgear
--password admin123
--a_min 1000
--b_min 1000
--ap "00:0e:8e:78:e1:76"
--number_template 0000
--debug
''')
required_args = None
for group in parser._action_groups:
if group.title == "required arguments":
required_args = group
break;
break
if required_args is not None:
required_args.add_argument('--a_min', help='--a_min bps rate minimum for side_a', default=256000)
required_args.add_argument('--b_min', help='--b_min bps rate minimum for side_b', default=256000)
required_args.add_argument(
'--a_min',
help='--a_min bps rate minimum for side_a',
default=256000)
required_args.add_argument(
'--b_min',
help='--b_min bps rate minimum for side_b',
default=256000)
optional_args = None
for group in parser._action_groups:
if group.title == "optional arguments":
optional_args = group
break;
if optional_args is not None:
optional_args.add_argument('--mode', help='Used to force mode of stations')
optional_args.add_argument('--ap', help='Used to force a connection to a particular AP')
optional_args.add_argument('--number_template', help='Start the station numbering with a particular number. Default is 0000', default=0000)
optional_args.add_argument('--station_list', help='Optional: User defined station names', action='append',default=None)
break
if optional_args:
optional_args.add_argument(
'--mode', help='Used to force mode of stations')
optional_args.add_argument(
'--ap', help='Used to force a connection to a particular AP')
optional_args.add_argument(
'--number_template',
help='Start the station numbering with a particular number. Default is 0000',
default=0000)
optional_args.add_argument(
'--station_list',
help='Optional: User defined station names, can be a comma or space separated list',
nargs='+',
default=None)
optional_args.add_argument(
'--no_cleanup',
help="Optional: Don't cleanup existing stations",
action='store_true')
args = parser.parse_args()
num_sta = 2
if (args.num_stations is not None) and (int(args.num_stations) > 0):
num_sta = int(args.num_stations)
if args.station_list is None:
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=int(args.number_template), end_id_=num_sta+int(args.number_template) - 1, padding_number_=10000,
radio=args.radio)
if not args.station_list:
station_list = LFUtils.portNameSeries(
prefix_="sta", start_id_=int(
args.number_template), end_id_=num_sta + int(
args.number_template) - 1, padding_number_=10000, radio=args.radio)
else:
if ',' in args.station_list[0]:
station_list = args.station_list[0].split(',')
elif ' ' in args.station_list[0]:
station_list = args.station_list[0].split()
else:
station_list = args.station_list
ip_var_test = CreateL3(host=args.mgr,
port=args.mgr_port,
number_template=str(args.number_template),
@@ -186,6 +258,7 @@ def main():
ap=args.ap,
_debug_on=args.debug)
if not args.no_cleanup:
ip_var_test.pre_cleanup()
ip_var_test.build()
if not ip_var_test.passes():

View File

@@ -13,7 +13,6 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
@@ -25,12 +24,25 @@ TestGroupProfile = realm.TestGroupProfile
class CreateL4(Realm):
def __init__(self,
ssid, security, password, sta_list, name_prefix, upstream, radio,
host="localhost", port=8080, mode = 0, ap=None,
side_a_min_rate=56, side_a_max_rate=0,
side_b_min_rate=56, side_b_max_rate=0,
number_template="00000", use_ht160=False,
def __init__(
self,
ssid,
security,
password,
sta_list,
name_prefix,
upstream,
radio,
host="localhost",
port=8080,
mode=0,
ap=None,
side_a_min_rate=56,
side_a_max_rate=0,
side_b_min_rate=56,
side_b_max_rate=0,
number_template="00000",
use_ht160=False,
_debug_on=False,
_exit_on_error=False,
_exit_on_fail=False):
@@ -62,8 +74,9 @@ class CreateL4(Realm):
self.station_profile.mode = mode
if self.ap is not None:
self.station_profile.set_command_param("add_sta", "ap", self.ap)
#self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user defined variable from terminal.
# self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0,
# end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user
# defined variable from terminal.
self.cx_profile.host = self.host
self.cx_profile.port = self.port
@@ -76,22 +89,34 @@ class CreateL4(Realm):
def cleanup(self):
self.cx_profile.cleanup()
self.station_profile.cleanup()
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url,
LFUtils.wait_until_ports_disappear(
base_url=self.lfclient_url,
port_list=self.station_profile.station_names,
debug=self.debug)
def build(self):
# Build stations
self.station_profile.use_security(self.security, self.ssid, self.password)
self.station_profile.use_security(
self.security, self.ssid, self.password)
self.station_profile.set_number_template(self.number_template)
print("Creating stations")
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag(
"add_sta", "create_admin_down", 1)
self.station_profile.set_command_param(
"set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
self.station_profile.create(
radio=self.radio,
sta_names_=self.sta_list,
debug=self.debug)
self._pass("PASS: Station build finished")
self.cx_profile.create(ports=self.station_profile.station_names, sleep_time=.5, debug_=self.debug, suppress_related_commands_=True)
self.cx_profile.create(
ports=self.station_profile.station_names,
sleep_time=.5,
debug_=self.debug,
suppress_related_commands_=True)
def main():
parser = LFCliBase.create_basic_argparse(
@@ -138,26 +163,41 @@ python3 ./layer4.py
for group in parser._action_groups:
if group.title == "required arguments":
required_args = group
break;
break
if required_args is not None:
required_args.add_argument('--a_min', help='--a_min bps rate minimum for side_a', default=256000)
required_args.add_argument('--b_min', help='--b_min bps rate minimum for side_b', default=256000)
required_args.add_argument(
'--a_min',
help='--a_min bps rate minimum for side_a',
default=256000)
required_args.add_argument(
'--b_min',
help='--b_min bps rate minimum for side_b',
default=256000)
optional_args = None
for group in parser._action_groups:
if group.title == "optional arguments":
optional_args = group
break;
break
if optional_args is not None:
optional_args.add_argument('--mode',help='Used to force mode of stations', default=0)
optional_args.add_argument('--ap',help='Used to force a connection to a particular AP')
optional_args.add_argument(
'--mode',
help='Used to force mode of stations',
default=0)
optional_args.add_argument(
'--ap', help='Used to force a connection to a particular AP')
args = parser.parse_args()
num_sta = 2
if (args.num_stations is not None) and (int(args.num_stations) > 0):
num_sta = int(args.num_stations)
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta-1, padding_number_=10000, radio=args.radio)
station_list = LFUtils.portNameSeries(
prefix_="sta",
start_id_=0,
end_id_=num_sta - 1,
padding_number_=10000,
radio=args.radio)
ip_var_test = CreateL4(host=args.mgr,
port=args.mgr_port,
number_template="0000",

View File

@@ -8,7 +8,6 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
@@ -29,7 +28,7 @@ class CreateMacVlan(Realm):
netmask=None,
gateway=None,
dhcp=True,
port_list=[],
port_list=None,
ip_list=None,
connections_per_port=1,
_debug_on=False,
@@ -51,7 +50,6 @@ class CreateMacVlan(Realm):
self.mvlan_profile = self.new_mvlan_profile()
self.mvlan_profile.num_macvlans = int(num_ports)
self.mvlan_profile.desired_macvlans = self.port_list
self.mvlan_profile.macvlan_parent = self.macvlan_parent
@@ -65,10 +63,14 @@ class CreateMacVlan(Realm):
def build(self):
# Build stations
print("Creating MACVLANs")
self.mvlan_profile.create(admin_down=False, sleep_time=.5, debug=self.debug)
self.mvlan_profile.create(
admin_down=False,
sleep_time=.5,
debug=self.debug)
self._pass("PASS: MACVLAN build finished")
self.created_ports += self.mvlan_profile.created_macvlans
def main():
parser = LFCliBase.create_bare_argparse(
prog='create_macvlan.py',
@@ -97,25 +99,58 @@ Generic command layout:
--use_ports eth1#0=10.40.3.103,eth1#1,eth1#2 --connections_per_port 2
--netmask 255.255.240.0 --gateway 10.40.0.1
You can only add MAC-VLANs to Ethernet, Bonding, Redir, and 802.1Q VLAN devices.
''')
parser.add_argument('--num_stations', help='Number of stations to create', default=0)
parser.add_argument(
'--num_stations',
help='Number of stations to create',
default=0)
parser.add_argument('--radio', help='radio EID, e.g: 1.wiphy2')
parser.add_argument('-u', '--upstream_port',
parser.add_argument(
'-u',
'--upstream_port',
help='non-station port that generates traffic: <resource>.<port>, e.g: 1.eth1',
default='1.eth1')
parser.add_argument('--macvlan_parent', help='specifies parent port for macvlan creation', default=None)
parser.add_argument('--first_port', help='specifies name of first port to be used', default=None)
parser.add_argument('--num_ports', help='number of ports to create', default=1)
parser.add_argument('--connections_per_port', help='specifies number of connections to be used per port', default=1,
parser.add_argument(
'--macvlan_parent',
help='specifies parent port for macvlan creation',
required=True)
parser.add_argument(
'--first_port',
help='specifies name of first port to be used',
default=None)
parser.add_argument(
'--num_ports',
help='number of ports to create',
default=1)
parser.add_argument(
'--connections_per_port',
help='specifies number of connections to be used per port',
default=1,
type=int)
parser.add_argument('--use_ports', help='list of comma separated ports to use with ips, \'=\' separates name and ip'
parser.add_argument(
'--use_ports',
help='list of comma separated ports to use with ips, \'=\' separates name and ip'
'{ port_name1=ip_addr1,port_name1=ip_addr2 }. '
'Ports without ips will be left alone', default=None)
parser.add_argument('--first_mvlan_ip', help='specifies first static ip address to be used or dhcp', default=None)
parser.add_argument('--netmask', help='specifies netmask to be used with static ip addresses', default=None)
parser.add_argument('--gateway', help='specifies default gateway to be used with static addressing', default=None)
parser.add_argument('--cxs', help='list of cxs to add/remove depending on use of --add_to_group or --del_from_group'
, default=None)
'Ports without ips will be left alone',
default=None)
parser.add_argument(
'--first_mvlan_ip',
help='specifies first static ip address to be used or dhcp',
default=None)
parser.add_argument(
'--netmask',
help='specifies netmask to be used with static ip addresses',
default=None)
parser.add_argument(
'--gateway',
help='specifies default gateway to be used with static addressing',
default=None)
parser.add_argument(
'--cxs',
help='list of cxs to add/remove depending on use of --add_to_group or --del_from_group',
default=None)
args = parser.parse_args()
port_list = []
@@ -125,26 +160,37 @@ Generic command layout:
if (args.num_ports is not None) and (int(args.num_ports) > 0):
start_num = int(args.first_port[3:])
num_ports = int(args.num_ports)
port_list = LFUtils.port_name_series(prefix="sta", start_id=start_num, end_id=start_num + num_ports - 1,
port_list = LFUtils.port_name_series(
prefix="sta",
start_id=start_num,
end_id=start_num + num_ports - 1,
padding_number=10000,
radio=args.radio)
else:
if (args.num_ports is not None) and args.macvlan_parent is not None and (int(args.num_ports) > 0) \
and args.macvlan_parent in args.first_port:
start_num = int(args.first_port[args.first_port.index('#') + 1:])
if (args.num_ports is not None) and args.macvlan_parent is not None and (
int(args.num_ports) > 0) and args.macvlan_parent in args.first_port:
start_num = int(
args.first_port[args.first_port.index('#') + 1:])
num_ports = int(args.num_ports)
port_list = LFUtils.port_name_series(prefix=args.macvlan_parent + "#", start_id=start_num,
end_id=start_num + num_ports - 1, padding_number=100000,
port_list = LFUtils.port_name_series(
prefix=args.macvlan_parent + "#",
start_id=start_num,
end_id=start_num + num_ports - 1,
padding_number=100000,
radio=args.radio)
else:
raise ValueError("Invalid values for num_ports [%s], macvlan_parent [%s], and/or first_port [%s].\n"
"first_port must contain parent port and num_ports must be greater than 0"
% (args.num_ports, args.macvlan_parent, args.first_port))
raise ValueError(
"Invalid values for num_ports [%s], macvlan_parent [%s], and/or first_port [%s].\n"
"first_port must contain parent port and num_ports must be greater than 0" %
(args.num_ports, args.macvlan_parent, args.first_port))
else:
if args.use_ports is None:
num_ports = int(args.num_ports)
port_list = LFUtils.port_name_series(prefix=args.macvlan_parent + "#", start_id=0,
end_id=num_ports - 1, padding_number=100000,
port_list = LFUtils.port_name_series(
prefix=args.macvlan_parent + "#",
start_id=0,
end_id=num_ports - 1,
padding_number=100000,
radio=args.radio)
else:
temp_list = args.use_ports.split(',')
@@ -156,7 +202,8 @@ Generic command layout:
ip_list.append(0)
if len(port_list) != len(ip_list):
raise ValueError(temp_list, " ports must have matching ip addresses!")
raise ValueError(
temp_list, " ports must have matching ip addresses!")
if args.first_mvlan_ip is not None:
if args.first_mvlan_ip.lower() == "dhcp":

View File

@@ -29,11 +29,15 @@ class CreateQVlan(Realm):
netmask=None,
first_qvlan_ip=None,
gateway=None,
port_list=[],
ip_list=[],
port_list=None,
ip_list=None,
exit_on_error=False,
debug=False):
super().__init__(host, port)
if port_list is None:
port_list = []
if ip_list is None:
ip_list = []
self.host = host
self.port = port
self.qvlan_parent = qvlan_parent
@@ -54,7 +58,8 @@ class CreateQVlan(Realm):
def build(self):
print("Creating QVLAN stations")
self.qvlan_profile.create(admin_down=False, sleep_time=.5, debug=self.debug)
self.qvlan_profile.create(
sleep_time=.5)
def main():
@@ -68,21 +73,50 @@ def main():
---------------------
Generic command ''')
parser.add_argument('--radio', help='radio EID, e.g: 1.wiphy2')
parser.add_argument('--qvlan_parent', help='specifies parent port for qvlan creation', default=None)
parser.add_argument('--first_port', help='specifies name of first port to be used', default=None)
parser.add_argument('--num_ports', help='number of ports to create', default=1)
parser.add_argument('--first_qvlan_ip', help='specifies first static ip address to be used or dhcp', default=None)
parser.add_argument('--netmask', help='specifies netmask to be used with static ip addresses', default=None)
parser.add_argument('--gateway', help='specifies default gateway to be used with static addressing', default=None)
parser.add_argument('--use_ports',
parser.add_argument(
'--qvlan_parent',
help='specifies parent port for qvlan creation',
default=None,
required=True)
parser.add_argument(
'--first_port',
help='specifies name of first port to be used',
default=None)
parser.add_argument(
'--num_ports',
type=int,
help='number of ports to create',
default=1)
parser.add_argument(
'--first_qvlan_ip',
help='specifies first static ip address to be used or dhcp',
default=None)
parser.add_argument(
'--netmask',
help='specifies netmask to be used with static ip addresses',
default=None)
parser.add_argument(
'--gateway',
help='specifies default gateway to be used with static addressing',
default=None)
parser.add_argument(
'--use_ports',
help='list of comma separated ports to use with ips, \'=\' separates name and ip { port_name1=ip_addr1,port_name1=ip_addr2 }. Ports without ips will be left alone',
default=None)
tg_group = parser.add_mutually_exclusive_group()
tg_group.add_argument('--add_to_group', help='name of test group to add cxs to', default=None)
parser.add_argument('--cxs', help='list of cxs to add/remove depending on use of --add_to_group or --del_from_group'
, default=None)
parser.add_argument('--use_qvlans', help='will create qvlans', action='store_true', default=False)
tg_group.add_argument(
'--add_to_group',
help='name of test group to add cxs to',
default=None)
parser.add_argument(
'--cxs',
help='list of cxs to add/remove depending on use of --add_to_group or --del_from_group',
default=None)
parser.add_argument(
'--use_qvlans',
help='will create qvlans',
action='store_true',
default=False)
args = parser.parse_args()
@@ -100,33 +134,43 @@ def main():
update_group_args['cxs'] = args.cxs
port_list = []
ip_list = []
if args.first_port is not None and args.use_ports is not None:
if args.first_port and args.use_ports:
if args.first_port.startswith("sta"):
if (args.num_ports is not None) and (int(args.num_ports) > 0):
if args.num_ports and args.num_ports > 0:
start_num = int(args.first_port[3:])
num_ports = int(args.num_ports)
port_list = LFUtils.port_name_series(prefix="sta", start_id=start_num, end_id=start_num + num_ports - 1,
port_list = LFUtils.port_name_series(
prefix="sta",
start_id=start_num,
end_id=start_num + args.num_ports - 1,
padding_number=10000,
radio=args.radio)
print(1)
else:
if (args.num_ports is not None) and args.qvlan_parent is not None and (int(args.num_ports) > 0) \
and args.qvlan_parent in args.first_port:
start_num = int(args.first_port[args.first_port.index('#') + 1:])
num_ports = int(args.num_ports)
port_list = LFUtils.port_name_series(prefix=args.qvlan_parent + "#", start_id=start_num,
end_id=start_num + num_ports - 1, padding_number=10000,
if args.num_ports and args.qvlan_parent and (args.num_ports > 0) and args.qvlan_parent in args.first_port:
start_num = int(
args.first_port[args.first_port.index('#') + 1:])
port_list = LFUtils.port_name_series(
prefix=str(
args.qvlan_parent) + "#",
start_id=start_num,
end_id=start_num + args.num_ports - 1,
padding_number=10000,
radio=args.radio)
print(2)
else:
raise ValueError("Invalid values for num_ports [%s], qvlan_parent [%s], and/or first_port [%s].\n"
"first_port must contain parent port and num_ports must be greater than 0"
% (args.num_ports, args.qvlan_parent, args.first_port))
raise ValueError(
"Invalid values for num_ports [%s], qvlan_parent [%s], and/or first_port [%s].\n"
"first_port must contain parent port and num_ports must be greater than 0" %
(args.num_ports, args.qvlan_parent, args.first_port))
else:
if args.use_ports is None:
if not args.use_ports:
num_ports = int(args.num_ports)
port_list = LFUtils.port_name_series(prefix=args.qvlan_parent + "#", start_id=1,
end_id=num_ports, padding_number=10000,
port_list = LFUtils.port_name_series(
prefix=str(
args.qvlan_parent) + "#",
start_id=1,
end_id=num_ports,
padding_number=10000,
radio=args.radio)
print(3)
else:
@@ -139,7 +183,8 @@ def main():
ip_list.append(0)
if len(port_list) != len(ip_list):
raise ValueError(temp_list, " ports must have matching ip addresses!")
raise ValueError(
temp_list, " ports must have matching ip addresses!")
print(port_list)
print(ip_list)
@@ -155,7 +200,8 @@ def main():
ip_list=ip_list,
debug=args.debug)
create_qvlan.build()
print('Created %s QVLAN stations' % num_ports)
print('Created %s QVLAN stations' % args.num_ports)
if __name__ == "__main__":
main()

View File

@@ -74,23 +74,31 @@ class CreateStation(Realm):
def build(self):
# Build stations
self.station_profile.use_security(self.security, self.ssid, self.password)
self.station_profile.use_security(
self.security, self.ssid, self.password)
self.station_profile.set_number_template(self.number_template)
print("Creating stations")
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag(
"add_sta", "create_admin_down", 1)
self.station_profile.set_command_param(
"set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
if self.set_txo_data is not None:
self.station_profile.set_wifi_txo(txo_ena=self.set_txo_data["txo_enable"],
self.station_profile.set_wifi_txo(
txo_ena=self.set_txo_data["txo_enable"],
tx_power=self.set_txo_data["txpower"],
pream=self.set_txo_data["pream"],
mcs=self.set_txo_data["mcs"],
nss=self.set_txo_data["nss"],
bw=self.set_txo_data["bw"],
retries=self.set_txo_data["retries"],
sgi=self.set_txo_data["sgi"], )
self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
sgi=self.set_txo_data["sgi"],
)
self.station_profile.create(
radio=self.radio,
sta_names_=self.sta_list,
debug=self.debug)
if self.up:
self.station_profile.admin_up()
@@ -119,21 +127,32 @@ def main():
--debug
''')
required = parser.add_argument_group('required arguments')
required.add_argument('--start_id', help='--start_id <value> default 0', default=0)
required.add_argument(
'--start_id',
help='--start_id <value> default 0',
default=0)
optional = parser.add_argument_group('Optional arguments')
optional.add_argument('--mode', help='Mode for your station (as a number)',default=0)
optional.add_argument('--station_flag', help='station flags to add', required=False, default=None, action='append')
optional.add_argument(
'--mode',
help='Mode for your station (as a number)',
default=0)
optional.add_argument(
'--station_flag',
help='station flags to add',
required=False,
default=None,
action='append')
args = parser.parse_args()
# if args.debug:
# pprint.pprint(args)
# time.sleep(5)
if (args.radio is None):
if args.radio is None:
raise ValueError("--radio required")
start_id = 0
if (args.start_id != 0):
if args.start_id != 0:
start_id = int(args.start_id)
num_sta = 2
@@ -148,16 +167,6 @@ def main():
radio=args.radio)
print("station_list {}".format(station_list))
set_txo_data={
"txo_enable": 1,
"txpower": 255,
"pream": 0,
"mcs": 0,
"nss": 0,
"bw": 3,
"retries": 1,
"sgi": 0
}
create_station = CreateStation(_host=args.mgr,
_port=args.mgr_port,
@@ -175,5 +184,6 @@ def main():
create_station.build()
print('Created %s stations' % num_sta)
if __name__ == "__main__":
main()

View File

@@ -13,7 +13,6 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
@@ -62,7 +61,6 @@ class CreateStation(Realm):
pprint.pprint(self.sta_list)
print("---- ~Station List ----- ----- ----- ----- ----- ----- \n")
def build(self):
# Build stations
self.station_profile.use_security(self.security, self.ssid, self.password)
@@ -80,8 +78,6 @@ class CreateStation(Realm):
def main():
required=[]
required.append({'name':'--df','help':'Which file you want to build stations off of?'})
parser = LFCliBase.create_basic_argparse(
prog='create_station_from_df.py',
formatter_class=argparse.RawTextHelpFormatter,
@@ -99,9 +95,9 @@ def main():
--ssid netgear
--passwd BLANK
--debug
''',
more_required=required)
''')
required = parser.add_argument_group('required arguments')
required.add_argument('df', help='Which file do you want to build stations off of?', required=True)
args = parser.parse_args()
df = pd.read_csv(args.df)
@@ -109,16 +105,16 @@ def main():
for item in unique.index:
uniquedf = unique.iloc[item]
df1 = df.merge(pd.DataFrame(uniquedf).transpose(), on=['radio', 'ssid', 'passwd', 'security'])
try:
if uniquedf['radio']:
radio = uniquedf['radio']
except:
else:
radio = args.radio
station_list = df1['station']
try:
if uniquedf['ssid']:
ssid = uniquedf['ssid']
passwd = uniquedf['passwd']
security = uniquedf['security']
except:
else:
ssid = args.ssid
passwd = args.passwd
security = args.security
@@ -135,5 +131,6 @@ def main():
create_station.build()
print('Created %s stations' % len(unique.index))
if __name__ == "__main__":
main()

View File

@@ -55,7 +55,10 @@ class CreateVAP(Realm):
self.vap_list = _vap_list
self.resource = _resource
if _vap_flags is None:
self.vap_flags = ["wpa2_enable", "80211u_enable", "create_admin_down"]
self.vap_flags = [
"wpa2_enable",
"80211u_enable",
"create_admin_down"]
else:
self.vap_flags = _vap_flags
self.mode = _mode
@@ -74,8 +77,10 @@ class CreateVAP(Realm):
self.vap_profile.ssid_pass = self.password
self.vap_profile.dhcp = self.dhcp
self.vap_profile.mode = self.mode
self.vap_profile.desired_add_vap_flags = self.vap_flags + ["wpa2_enable", "80211u_enable", "create_admin_down"]
self.vap_profile.desired_add_vap_flags_mask = self.vap_flags + ["wpa2_enable", "80211u_enable", "create_admin_down"]
self.vap_profile.desired_add_vap_flags = self.vap_flags + \
["wpa2_enable", "80211u_enable", "create_admin_down"]
self.vap_profile.desired_add_vap_flags_mask = self.vap_flags + \
["wpa2_enable", "80211u_enable", "create_admin_down"]
if self.debug:
print("----- VAP List ----- ----- ----- ----- ----- ----- \n")
pprint.pprint(self.vap_list)
@@ -83,13 +88,13 @@ class CreateVAP(Realm):
def build(self):
# Build VAPs
self.vap_profile.use_security(self.security, self.ssid, passwd=self.password)
self.vap_profile.use_security(
self.security, self.ssid, passwd=self.password)
print("Creating VAPs")
self.vap_profile.create(resource=self.resource,
radio=self.radio,
channel=self.channel,
country=self.country_code,
up_=True,
debug=False,
use_ht40=True,
@@ -125,10 +130,26 @@ Command example:
''')
optional = parser.add_argument_group('optional arguments')
optional.add_argument('--num_vaps', help='Number of VAPs to Create', required=False, default=1)
optional.add_argument('--vap_flag', help='VAP flags to add', required=False, default=None, action='append')
optional.add_argument('--bridge', help='Create a bridge connecting the VAP to a port', required=False, default=False)
optional.add_argument('--mac', help='Custom mac address', default="xx:xx:xx:xx:*:xx")
optional.add_argument(
'--num_vaps',
help='Number of VAPs to Create',
required=False,
default=1)
optional.add_argument(
'--vap_flag',
help='VAP flags to add',
required=False,
default=None,
action='append')
optional.add_argument(
'--bridge',
help='Create a bridge connecting the VAP to a port',
required=False,
default=False)
optional.add_argument(
'--mac',
help='Custom mac address',
default="xx:xx:xx:xx:*:xx")
optional.add_argument('--mode', default='AUTO')
optional.add_argument('--channel', default=36)
optional.add_argument('--country_code', default=0)
@@ -140,7 +161,7 @@ Command example:
# if args.debug:
# pprint.pprint(args)
# time.sleep(5)
if (args.radio is None):
if args.radio is None:
raise ValueError("--radio required")
num_vap = int(args.num_vaps)
@@ -196,5 +217,6 @@ Command example:
create_vap.build()
if __name__ == "__main__":
main()

View File

@@ -50,7 +50,8 @@ class CreateVR(Realm):
self.vr_profile = self.new_vr_profile()
def clean(self):
if (self.vr_name is None) or (self.vr_profile.vr_eid is None) and (self.vr_profile.vr_eid) == "":
if (self.vr_name is None) or (self.vr_profile.vr_eid is None) and (
self.vr_profile.vr_eid) == "":
print("No vr_eid to clean")
return
self.rm_port("1.1.rd90a", debug_=self.debug)
@@ -84,9 +85,9 @@ class CreateVR(Realm):
"cx_name": "all"
}, debug_=self.debug)
def build(self):
self.vr_profile.apply_netsmith(self.vr_name[1], delay=5, debug=self.debug)
self.vr_profile.apply_netsmith(
self.vr_name[1], delay=5, debug=self.debug)
self.json_post("/cli-json/add_rdd", {
"shelf": 1,
"resource": self.vr_name[1],
@@ -101,10 +102,17 @@ class CreateVR(Realm):
"peer_ifname": "rd90a",
"report_timer": "3000"
})
self.wait_until_ports_appear(sta_list=["1.1.rd90a", "1.1.rd90b"], debug_=self.debug)
self.vr_profile.vrcx_list(resource=self.vr_name[1], do_sync=True) # do_sync
self.wait_until_ports_appear(
sta_list=[
"1.1.rd90a",
"1.1.rd90b"],
debug_=self.debug)
self.vr_profile.vrcx_list(
resource=self.vr_name[1],
do_sync=True) # do_sync
self.vr_profile.create(vr_name=self.vr_name, debug=self.debug)
self.vr_profile.sync_netsmith(resource=self.vr_name[1], debug=self.debug)
self.vr_profile.sync_netsmith(
resource=self.vr_name[1], debug=self.debug)
self._pass("created router")
def start(self):
@@ -113,21 +121,35 @@ class CreateVR(Realm):
:return: void
"""
# move rd90a into router
self.vr_profile.refresh_netsmith(resource=self.vr_name[1], debug=self.debug)
self.vr_profile.refresh_netsmith(
resource=self.vr_name[1], debug=self.debug)
if self.debug:
pprint(("vr_eid", self.vr_name))
self.vr_profile.wait_until_vrcx_appear(resource=self.vr_name[1], name_list=["rd90a", "rd90b"])
self.vr_profile.add_vrcx(vr_eid=self.vr_name, connection_name_list="rd90a", debug=True)
self.vr_profile.wait_until_vrcx_appear(
resource=self.vr_name[1], name_list=[
"rd90a", "rd90b"])
self.vr_profile.add_vrcx(
vr_eid=self.vr_name,
connection_name_list="rd90a",
debug=True)
self.vr_profile.refresh_netsmith(resource=self.vr_name[1], debug=self.debug)
self.vr_profile.refresh_netsmith(
resource=self.vr_name[1], debug=self.debug)
# test to make sure that vrcx is inside vr we expect
self.vr_profile.vrcx_list(resource=self.vr_name[1], do_sync=True)
vr_list = self.vr_profile.router_list(resource=self.vr_name[1], do_refresh=True)
router = self.vr_profile.find_cached_router(resource=self.vr_name[1], router_name=self.vr_name[2])
vr_list = self.vr_profile.router_list(
resource=self.vr_name[1], do_refresh=True)
router = self.vr_profile.find_cached_router(
resource=self.vr_name[1], router_name=self.vr_name[2])
pprint(("cached router 120: ", router))
router_eid = LFUtils.name_to_eid(router["eid"])
pprint(("router eid 122: ", router_eid))
full_router = self.json_get("/vr/1/%s/%s/%s" %(router_eid[0], router_eid[1], self.vr_name[2]), debug_=True)
full_router = self.json_get(
"/vr/1/%s/%s/%s" %
(router_eid[0],
router_eid[1],
self.vr_name[2]),
debug_=True)
pprint(("full router: ", full_router))
time.sleep(5)
if router is None:
@@ -156,7 +178,10 @@ Command example:
optional = parser.add_argument_group('optional arguments')
optional.add_argument('--ports', default=None, required=False,
optional.add_argument(
'--ports',
default=None,
required=False,
help='Comma separated list of ports to add to virtual router')
optional.add_argument('--services', default=None, required=False,
help='Add router services to a port, "br0=nat,dhcp"')
@@ -179,6 +204,7 @@ Command example:
# create_vr.clean()
print('Created Virtual Router')
if __name__ == "__main__":
main()

View File

@@ -1,314 +0,0 @@
#!/usr/bin/python3
"""
Create and modify WAN Links Using LANforge JSON AP : http://www.candelatech.com/cookbook.php?vol=cli&book=JSON:+Managing+WANlinks+using+JSON+and+Python
Written by Candela Technologies Inc.
Updated by: Erin Grimes
"""
import sys
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
from time import sleep
import urllib
import pprint
sys.path.append("../py-json")
from LANforge import LFRequest
from LANforge import LFUtils
from LANforge.lfcli_base import LFCliBase
j_printer = pprint.PrettyPrinter(indent=2)
# todo: this needs to change
resource_id = 1
def main():
parser = LFCliBase.create_basic_argparse()
args = parser.parse_args()
base_url = 'http://%s:%s' % (args.mgr, args.mgr_port)
print(base_url)
json_post = ""
json_response = ""
num_wanlinks = -1
# force a refresh on the ports and wanlinks
lf_r = LFRequest.LFRequest(base_url+"/cli-json/nc_show_ports", debug_=True)
lf_r.addPostData({
"shelf": 1,
"resource": 1,
"port": "all",
})
json_response = lf_r.jsonPost(debug=True)
lf_r = LFRequest.LFRequest(base_url+"/cli-json/nc_show_endpoints", debug_=True)
lf_r.addPostData({
"endpoint": "all"
})
json_response = lf_r.jsonPost(debug=True)
sleep(1)
# see if there are old wanlinks to remove
lf_r = LFRequest.LFRequest(base_url+"/wl_ep/list", debug_=True)
json_reponse = lf_r.get_as_json()
endpA = args['name']+"-A"
endpB = args['name']+"-B"
# count the number of wanlink endpoints
if "endpoint" in json_response:
endpoint_map = LFUtils.list_to_alias_map(json_list=json_reponse, from_element="endpoint")
if endpA in endpoint_map:
num_wanlinks += 1
if endpB in endpoint_map:
num_wanlinks += 1
# remove old wanlinks
if (num_wanlinks > 0):
print("Removing old wanlinks...")
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_cx", debug_=True)
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': args['name']
})
lf_r.jsonPost()
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp", debug_=True)
lf_r.addPostData({
'endp_name': endpA
})
lf_r.jsonPost()
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp", debug_=True)
lf_r.addPostData({
'endp_name': endpB
})
lf_r.jsonPost()
sleep(1)
# check to see if we have ports
lf_r = LFRequest.LFRequest(base_url+"/ports/1/1/list", debug_=True)
port_response = lf_r.getAsJson()
if "interfaces" not in port_response:
print("No interfaces in port_response!")
pprint.pprint(port_response)
exit(1)
if "interfaces" in port_response:
port_map = LFUtils.list_to_alias_map(json_list=port_response, from_element="interfaces")
ports_created = 0
if args["port_A"] not in port_map:
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_rdd", debug_=True)
lf_r.addPostData({
"shelf": 1,
"resource": 1,
"port": args["port_A"],
"peer_ifname": args["port_A"]+"b",
})
json_reponse = lf_r.jsonPost(debug=True)
if not json_response:
print("could not create port "+args["port_A"])
exit(1)
sleep(0.1)
ports_created += 1
if args["port_B"] not in port_map:
lf_r.addPostData({
"shelf": 1,
"resource": 1,
"port": args["port_B"],
"peer_ifname": args["port_B"]+"b",
})
json_reponse = lf_r.jsonPost(debug=True)
if not json_response:
print("could not create port " + args["port_B"])
exit(1)
ports_created += 1
sleep(0.1)
if ports_created > 0:
LFUtils.wait_until_ports_appear(base_url=base_url,
port_list=(args["port_A"], args["port_B"]),
debug=True)
print("Created {} ports".format(ports_created))
# create wanlink endpoint A
print("Adding WL Endpoints...", end='')
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp", debug_=True)
lf_r.addPostData({
'alias': endpA,
'shelf': 1,
'resource': '1',
'port': args['port_A'],
'latency': args['latency_A'],
'max_rate': args['rate_A'],
})
json_response = lf_r.jsonPost(debug=True)
if not json_response:
print("Unable to create "+endpA)
else:
print("A, ", end='')
# create wanlink endpoint B
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp", debug_=True)
lf_r.addPostData({
'alias': endpB,
'shelf': 1,
'resource': '1',
'port': args['port_B'],
'latency': args['latency_B'],
'max_rate': args['rate_B'],
})
json_response = lf_r.jsonPost()
if not json_response:
print("Unable to create "+endpB)
else:
print("B")
sleep(1)
# create cx
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_cx", debug_=True)
lf_r.addPostData({
'alias': args['name'],
'test_mgr': 'default_tm',
'tx_endp': endpA,
'rx_endp': endpB
})
lf_r.jsonPost(debug=True)
sleep(0.5)
# modify wanlink endpoint A
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_wanlink_info", debug_=True)
lf_r.addPostData({
'name': endpA,
'max_jitter': args['jitter_A'],
'jitter_freq': args['jitter_freq_A'],
'drop_freq': args['drop_A']
})
lf_r.jsonPost(debug=True)
# modify wanlink endpoint B
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_wanlink_info", debug_=True)
lf_r.addPostData({
'name': endpB,
'max_jitter': args['jitter_B'],
'jitter_freq': args['jitter_freq_B'],
'drop_freq': args['drop_B']
})
lf_r.jsonPost()
# start wanlink once we see it
seen = 0
print("Looking for {} and {}: ".format(endpA, endpB), end='')
while (seen < 2):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl_ep/list?fields=name,eid")
try:
json_response = lf_r.getAsJson()
if json_response is None:
print(".", end="")
continue
LFUtils.debug_printer.pprint(json_response)
if "endpoint" not in json_response:
print("-", end="")
continue
endpoint_map = LFUtils.list_to_alias_map(json_list=json_response["endpoint"],
from_element="endpoint")
if endpA in endpoint_map:
seen += 1
print("+", end="")
if endpB in endpoint_map:
seen += 1
print("+", end="")
except urllib.error.HTTPError as error:
print("Error code {}".format(error.code))
continue
print("")
print("Starting wanlink:")
# print("the latency is {laten}".format(laten=latency))
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': args['name'],
'cx_state': 'RUNNING'
})
lf_r.jsonPost()
running = 0
while (running < 1):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,state,_links")
try:
json_response = lf_r.getAsJson()
if (json_response is None):
continue
for key, value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == args['name']):
if (value["state"].startswith("Run")):
LFUtils.debug_printer.pprint(json_response)
running = 1
except urllib.error.HTTPError as error:
print("Error code {}".format(error.code))
continue
print("Wanlink is running")
# stop wanlink
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
lf_r.addPostData({
'test_mgr': 'all',
'cx_name': args['name'],
'cx_state': 'STOPPED'
})
lf_r.jsonPost()
running = 1
while (running > 0):
sleep(1)
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,eid,state,_links")
LFUtils.debug_printer.pprint(json_response)
try:
json_response = lf_r.getAsJson()
if (json_response is None):
continue
for key, value in json_response.items():
if (isinstance(value, dict)):
if ("_links" in value):
if (value["name"] == args['name']):
if (value["state"].startswith("Stop")):
LFUtils.debug_printer.pprint(json_response)
running = 0
except urllib.error.HTTPError as error:
print("Error code {}".format(error.code))
continue
print("Wanlink is stopped.")
# print("Wanlink info:")
# lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-A")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-B")
# json_response = lf_r.getAsJson()
# LFUtils.debug_printer.pprint(json_response)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if __name__ == '__main__':
main()

View File

@@ -23,14 +23,12 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
class CSVParcer():
class CSVParcer:
def __init__(self, csv_infile=None, csv_outfile=None):
idx = 0
i_atten = -1
i_rotation = -1
i_rxbps = -1
@@ -46,20 +44,21 @@ class CSVParcer():
x = line.split(",")
cni = 0
for cn in x:
if (cn == "Attenuation [dB]"):
if cn == "Attenuation [dB]":
i_atten = cni
if (cn == "Position [Deg]"):
if cn == "Position [Deg]":
i_rotation = cni
if (cn == "Throughput [Mbps]"):
if cn == "Throughput [Mbps]":
i_rxbps = cni
if (cn == "Beacon RSSI [dBm]"):
if cn == "Beacon RSSI [dBm]":
i_beacon_rssi = cni
if (cn == "Data RSSI [dBm]"):
if cn == "Data RSSI [dBm]":
i_data_rssi = cni
cni += 1
# Write out out header for the new file.
fpo.write("Test Run,Position [Deg],Attenuation 1 [dB],Pal Stats Endpoint 1 Control Rssi [dBm],Pal Stats Endpoint 1 Data Rssi [dBm]\n")
fpo.write(
"Test Run,Position [Deg],Attenuation 1 [dB],Pal Stats Endpoint 1 Control Rssi [dBm],Pal Stats Endpoint 1 Data Rssi [dBm]\n")
# Read rest of the input lines, processing one at a time. Covert the columns as
# needed, and write out new data to the output file.
@@ -72,8 +71,6 @@ class CSVParcer():
step_i = 0
while line:
x = line.split(",")
#print(x)
#print([test_run, x[i_rotation], x[i_atten], x[i_beacon_rssi], x[i_data_rssi]])
fpo.write("%s,%s,%s,%s,%s" % (test_run, x[i_rotation], x[i_atten], x[i_beacon_rssi], x[i_data_rssi]))
bottom_half += ("%s,%s,%s,%s\n" % (step_i, x[i_rotation], x[i_atten], x[i_rxbps]))
line = fp.readline()
@@ -83,9 +80,8 @@ class CSVParcer():
fpo.write("\n\n# RvRvO Data\n\n")
fpo.write(bottom_half)
def main():
#debug_on = False
def main():
parser = argparse.ArgumentParser(
prog='csv_convert.py',
formatter_class=argparse.RawTextHelpFormatter,
@@ -98,13 +94,12 @@ csv_convert.py:
converts the candela brief csv into the data for specific customer,
''')
# for testing parser.add_argument('-i','--infile', help="input file of csv data", default='text-csv-0-candela.csv')
parser.add_argument('-i', '--infile', help="input file of csv data", required=True)
parser.add_argument('-o', '--outfile', help="output file in .csv format", default='outfile.csv')
args = parser.parse_args()
csv_outfile_name = None
csv_infile_name = None
if args.infile:
csv_infile_name = args.infile
@@ -115,5 +110,6 @@ csv_convert.py:
CSVParcer(csv_infile_name, csv_outfile_name)
if __name__ == "__main__":
main()

View File

@@ -8,7 +8,6 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
@@ -29,6 +28,7 @@ class CVManager(cv_test):
self.apply_cv_scenario(self.scenario)
self.build_cv_scenario()
def main():
parser = argparse.ArgumentParser(
prog='cv_manager.py',
@@ -45,5 +45,6 @@ def main():
lfclient_host=args.mgr)
manager.apply_and_build_scenario()
if __name__ == "__main__":
main()

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python3
'''
"""
This script loads and builds a Chamber View Scenario, runs WiFi Capacity Test, runs Dataplane Test,
and posts the results to Influx.
There are optional arguments which will create a Grafana dashboard which will import the data posted to
@@ -59,7 +59,7 @@ AP Auto test has the following argument:
DUT syntax is somewhat tricky: DUT-name SSID BSID (bssid-idx), example: linksys-8450 Default-SSID-5gl c4:41:1e:f5:3f:25 (2)
* radio2: Specify 2.4Ghz radio. May be specified multiple times.
* radio5: Specify 5Ghz radio. May be specified multiple times.
'''
"""
import sys
import os
import importlib
@@ -70,20 +70,24 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lf_wifi_capacity_test = importlib.import_module("py-scripts.lf_wifi_capacity_test")
WiFiCapacityTest = lf_wifi_capacity_test.WiFiCapacityTest
cv_test_manager = importlib.import_module("py-scripts.cv_test_manager")
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
create_chamberview = importlib.import_module("py-scripts.create_chamberview")
CreateChamberview = create_chamberview.CreateChamberview
DUT = create_chamberview.DUT
create_chamberview_dut = importlib.import_module("py-scripts.create_chamberview_dut")
DUT = create_chamberview_dut.DUT
lf_dataplane_test = importlib.import_module("py-scripts.lf_dataplane_test")
DataplaneTest = lf_dataplane_test.DataplaneTest
grafana_profile = importlib.import_module("py-scripts.grafana_profile")
UseGrafana = grafana_profile.UseGrafana
lf_ap_auto_test = importlib.import_module("py-scripts.lf_ap_auto_test")
ApAutoTest = lf_ap_auto_test.ApAutoTest
cv_add_base_parser = cv_test_manager.cv_add_base_parser
cv_base_adjust_parser = cv_add_base_parser.cv_base_adjust_parser
def main():
@@ -193,7 +197,8 @@ def main():
parser.add_argument('--scripts', help='Scripts to graph in Grafana', default=None, action='append')
parser.add_argument('--title', help='title of your Grafana Dashboard', default=None)
parser.add_argument('--testbed', help='Which testbed you want to query', default=None)
parser.add_argument('--graph_groups_file', help='File which determines how you want to filter your graphs on your dashboard',
parser.add_argument('--graph_groups_file',
help='File which determines how you want to filter your graphs on your dashboard',
default=None)
parser.add_argument('--kpi', help='KPI file(s) which you want to graph form', action='append', default=None)
parser.add_argument('--datasource', help='Name of Influx database if different from InfluxDB', default='InfluxDB')

View File

@@ -35,7 +35,8 @@ class IPv4Test(LFCliBase):
self.timeout = 120
self.number_template = number_template
self.debug = _debug_on
self.local_realm = realm.Realm(lfclient_host=self.host, lfclient_port=self.port)
self.local_realm = realm.Realm(
lfclient_host=self.host, lfclient_port=self.port)
self.station_profile = self.local_realm.new_station_profile()
self.station_profile.lfclient_url = self.lfclient_url
@@ -51,27 +52,32 @@ class IPv4Test(LFCliBase):
def build(self):
# Build stations
#print("We've gotten into the build stations function")
self.station_profile.use_security(self.security, self.ssid, self.password)
self.station_profile.use_security(
self.security, self.ssid, self.password)
self.station_profile.set_number_template(self.number_template)
print("Creating stations")
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag(
"add_sta", "create_admin_down", 1)
self.station_profile.set_command_param(
"set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
self.station_profile.create(
radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
self.station_profile.admin_up()
if self.local_realm.wait_for_ip(station_list=self.sta_list, debug=self.debug, timeout_sec=30):
self._pass("Station build finished")
self.exit_success()
else:
self._fail("Stations not able to acquire IP. Please check network input.")
self._fail(
"Stations not able to acquire IP. Please check network input.")
self.exit_fail()
def cleanup(self, sta_list):
self.station_profile.cleanup(sta_list)
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=sta_list,
debug=self.debug)
def main():
parser = LFCliBase.create_basic_argparse(
@@ -108,7 +114,8 @@ def main():
optional = agroup
if optional is not None:
optional.add_argument('--mode', help=LFCliBase.Help_Mode)
optional.add_argument('--ap',help='Add BSSID of access point to connect to')
optional.add_argument(
'--ap', help='Add BSSID of access point to connect to')
args = parser.parse_args()
num_sta = 2
@@ -130,5 +137,6 @@ def main():
ip_test.timeout = 60
ip_test.build()
if __name__ == "__main__":
main()

View File

@@ -172,13 +172,13 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
cvtest = cv_test_manager.cv_test
cv_add_base_parser = cv_test_manager.cv_add_base_parser
cv_base_adjust_parser = cv_test_manager.cv_base_adjust_parser
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
class ApAutoTest(cvtest):
@@ -188,10 +188,11 @@ class ApAutoTest(cvtest):
lf_user="lanforge",
lf_password="lanforge",
ssh_port=22,
local_lf_report_dir="",
local_lf_report_dir=None,
lf_report_dir=None,
instance_name="ap_auto_instance",
config_name="ap_auto_config",
upstream="1.1.eth1",
upstream=None,
pull_report=False,
dut5_0="NA",
dut2_0="NA",
@@ -199,17 +200,29 @@ class ApAutoTest(cvtest):
max_stations_2=100,
max_stations_5=100,
max_stations_dual=200,
radio2=[],
radio5=[],
enables=[],
disables=[],
raw_lines=[],
radio2=None,
radio5=None,
enables=None,
disables=None,
raw_lines=None,
raw_lines_file="",
sets=[],
sets=None,
graph_groups=None
):
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
if radio2 is None:
radio2 = []
if radio5 is None:
radio5 = []
if enables is None:
enables = []
if disables is None:
disables = []
if raw_lines is None:
raw_lines = []
if sets is None:
sets = []
self.lf_host = lf_host
self.lf_port = lf_port
self.lf_user = lf_user
@@ -234,19 +247,19 @@ class ApAutoTest(cvtest):
self.sets = sets
self.ssh_port = ssh_port
self.graph_groups = graph_groups
self.lf_report_dir = lf_report_dir
self.local_lf_report_dir = local_lf_report_dir
def setup(self):
# Nothing to do at this time.
return
def run(self):
self.sync_cv()
time.sleep(2)
self.sync_cv()
blob_test = "%s-"%(self.test_name)
blob_test = "%s-" % self.test_name
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
self.show_text_blob(None, None, False)
@@ -267,8 +280,8 @@ class ApAutoTest(cvtest):
self.apply_cfg_options(cfg_options, self.enables, self.disables, self.raw_lines, self.raw_lines_file)
# Command line args take precedence.
if self.upstream != "":
cfg_options.append("upstream_port: " + self.upstream)
if self.upstream:
cfg_options.append("upstream-port: %s" % self.upstream)
if self.dut5_0 != "":
cfg_options.append("dut5-0: " + self.dut5_0)
if self.dut2_0 != "":
@@ -294,7 +307,6 @@ class ApAutoTest(cvtest):
def main():
parser = argparse.ArgumentParser(
prog="lf_ap_auto_test.py",
formatter_class=argparse.RawTextHelpFormatter,
@@ -302,28 +314,28 @@ def main():
Open this file in an editor and read the top notes for more details.
Example:
./lf_ap_auto_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \
--instance_name ap-auto-instance --config_name test_con --upstream 1.1.eth2 \
--dut5_0 'linksys-8450 Default-SSID-5gl c4:41:1e:f5:3f:25 (2)' \
--dut2_0 'linksys-8450 Default-SSID-2g c4:41:1e:f5:3f:24 (1)' \
--max_stations_2 100 --max_stations_5 100 --max_stations_dual 200 \
--radio2 1.1.wiphy0 --radio2 1.1.wiphy2 \
--radio5 1.1.wiphy1 --radio5 1.1.wiphy3 --radio5 1.1.wiphy4 \
--radio5 1.1.wiphy5 --radio5 1.1.wiphy6 --radio5 1.1.wiphy7 \
--set 'Basic Client Connectivity' 1 --set 'Multi Band Performance' 1 \
--set 'Skip 2.4Ghz Tests' 1 --set 'Skip 5Ghz Tests' 1 \
--set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Stability' 0 --set 'Band-Steering' 0 \
--set 'Multi-Station Throughput vs Pkt Size' 0 --set 'Long-Term' 0 \
--test_rig Testbed-01 --test_tag ATH10K --pull_report \
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \
--influx_bucket ben \
./lf_ap_auto_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \\
--instance_name ap-auto-instance --config_name test_con --upstream 1.1.eth2 \\
--dut5_0 'linksys-8450 Default-SSID-5gl c4:41:1e:f5:3f:25 (2)' \\
--dut2_0 'linksys-8450 Default-SSID-2g c4:41:1e:f5:3f:24 (1)' \\
--max_stations_2 100 --max_stations_5 100 --max_stations_dual 200 \\
--radio2 1.1.wiphy0 --radio2 1.1.wiphy2 \\
--radio5 1.1.wiphy1 --radio5 1.1.wiphy3 --radio5 1.1.wiphy4 \\
--radio5 1.1.wiphy5 --radio5 1.1.wiphy6 --radio5 1.1.wiphy7 \\
--set 'Basic Client Connectivity' 1 --set 'Multi Band Performance' 1 \\
--set 'Skip 2.4Ghz Tests' 1 --set 'Skip 5Ghz Tests' 1 \\
--set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Stability' 0 --set 'Band-Steering' 0 \\
--set 'Multi-Station Throughput vs Pkt Size' 0 --set 'Long-Term' 0 \\
--test_rig Testbed-01 --test_tag ATH10K --pull_report \\
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \\
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \\
--influx_bucket ben \\
--influx_tag testbed Ferndale-01
"""
)
cv_add_base_parser(parser) # see cv_test_manager.py
parser.add_argument("-u", "--upstream", type=str, default="",
parser.add_argument("-u", "--upstream", type=str, default=None,
help="Upstream port for wifi capacity test ex. 1.1.eth1")
parser.add_argument("--max_stations_2", type=int, default=-1,
@@ -341,7 +353,12 @@ def main():
help="Specify 2.4Ghz radio. May be specified multiple times.")
parser.add_argument("--radio5", action='append', nargs=1, default=[],
help="Specify 5Ghz radio. May be specified multiple times.")
parser.add_argument("--local_lf_report_dir", help="--local_lf_report_dir <where to pull reports to> default '' put where dataplane script run from",default="")
parser.add_argument("--local_lf_report_dir",
help="--local_lf_report_dir <where to pull reports to> default '' put where dataplane script run from",
default="")
parser.add_argument("--lf_report_dir",
help="--lf_report_dir <where to pull reports from> default '' put where dataplane script run from",
default="")
args = parser.parse_args()
@@ -356,6 +373,7 @@ def main():
upstream=args.upstream,
pull_report=args.pull_report,
local_lf_report_dir=args.local_lf_report_dir,
lf_report_dir=args.lf_report_dir,
dut5_0=args.dut5_0,
dut2_0=args.dut2_0,
load_old_cfg=args.load_old_cfg,
@@ -375,5 +393,6 @@ def main():
CV_Test.check_influx_kpi(args)
if __name__ == "__main__":
main()

38
py-scripts/lf_csv.py Normal file → Executable file
View File

@@ -42,39 +42,9 @@ class lf_csv:
print(csv_df)
csv_df.to_csv(self.filename, index=False, encoding='utf-8', na_rep='NA', float_format='%.2f')
# this layout may need to change
'''
kpi.csv : specific file that is used for the database, dashboard and blog post
A blank entry is a valid entry in some cases.
Date: date of run
test-rig : testbed that the tests are run on for example ct_us_001
test-tag : test specific information to differenciate the test, LANforge radios used, security modes (wpa2 , open)
dut-hw-version : hardware version of the device under test
dut-sw-version : software version of the device under test
dut-model-num : model number / name of the device under test
test-priority : test-priority is arbitrary number, choosing under 95 means it goes down at bottom of blog report, and higher priority goes at top.
test-id : script or test name , AP Auto, wifi capacity, data plane, dfs
short-description : short description of the test
pass/fail : set blank for performance tests
numeric-score : this is the value for the y-axis (x-axis is a timestamp), numeric value of what was measured
test-details : what was measured in the numeric-score, e.g. bits per second, bytes per second, upload speed, minimum cx time (ms)
Units : units used for the numeric-scort
Graph-Group - For the dashboard the graph / panel to put the resutls in . Currently the dashboard is Grafana
'''
class lf_kpi_csv:
def __init__(self,
_kpi_headers = ['Date','test-rig','test-tag','dut-hw-version','dut-sw-version','dut-model-num',
'test-priority','test-id','short-description','pass/fail','numberic-score'
'test details','Units','Graph-Group','Subtest-Pass','Subtest-Fail'],
_kpi_file='kpi.csv' #Currently this is the only file name accepted
):
self.kpi_headers = _kpi_headers
self.kpi_rows = ""
self.kpi_filename = _kpi_file
if __name__ == "__main__":
def main():
test = lf_csv()
test.generate_csv()
if __name__ == "__main__":
main()

View File

@@ -105,7 +105,6 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
@@ -132,17 +131,24 @@ class DataplaneTest(cv_test):
duration="15s",
station="1.1.sta01500",
dut="NA",
enables=[],
disables=[],
raw_lines=[],
enables=None,
disables=None,
raw_lines=None,
raw_lines_file="",
sets=[],
sets=None,
graph_groups=None,
report_dir="",
test_rig=""
):
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
if enables is None:
enables = []
if disables is None:
disables = []
if raw_lines is None:
raw_lines = []
if sets is None:
sets = []
self.lf_host = lf_host
self.lf_port = lf_port
self.lf_user = lf_user
@@ -164,7 +170,6 @@ class DataplaneTest(cv_test):
self.raw_lines_file = raw_lines_file
self.sets = sets
self.graph_groups = graph_groups
self.report_dir = report_dir
self.ssh_port = ssh_port
self.local_lf_report_dir = local_lf_report_dir
self.test_rig = test_rig
@@ -180,14 +185,16 @@ class DataplaneTest(cv_test):
blob_test = "dataplane-test-latest-"
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
# To delete old config with same name
self.rm_text_blob(self.config_name, blob_test)
self.show_text_blob(None, None, False)
# Test related settings
cfg_options = []
### HERE###
self.apply_cfg_options(cfg_options, self.enables, self.disables, self.raw_lines, self.raw_lines_file)
self.apply_cfg_options(cfg_options, self.enables,
self.disables, self.raw_lines, self.raw_lines_file)
# cmd line args take precedence and so come last in the cfg array.
if self.upstream != "":
@@ -215,7 +222,8 @@ class DataplaneTest(cv_test):
self.pull_report, self.lf_host, self.lf_user, self.lf_password,
cv_cmds, ssh_port=self.ssh_port, local_lf_report_dir=self.local_lf_report_dir,
graph_groups_file=self.graph_groups)
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
# To delete old config with same name
self.rm_text_blob(self.config_name, blob_test)
def main():
@@ -291,9 +299,10 @@ def main():
cv_add_base_parser(parser) # see cv_test_manager.py
parser.add_argument('--json', help="--json <config.json> json input file", default="")
parser.add_argument('--influx_json', help="--influx_json <influx_config.json> influx config json input file",
default="")
parser.add_argument(
'--json', help="--json <config.json> json input file", default="")
parser.add_argument(
'--influx_json', help="--influx_json <influx_config.json> influx config json input file", default="")
parser.add_argument("-u", "--upstream", type=str, default="",
help="Upstream port for wifi capacity test ex. 1.1.eth2")
parser.add_argument("--station", type=str, default="",
@@ -307,8 +316,8 @@ def main():
help="Specify requested upload speed. Percentage of theoretical is also supported. Default: 0")
parser.add_argument("--duration", default="",
help="Specify duration of each traffic run")
parser.add_argument("--graph_groups", help="File to save graph_groups to", default=None)
parser.add_argument("--report_dir", default="")
parser.add_argument(
"--graph_groups", help="File to save graph_groups to", default=None)
parser.add_argument("--local_lf_report_dir",
help="--local_lf_report_dir <where to pull reports to> default '' put where dataplane script run from",
default="")
@@ -316,12 +325,12 @@ def main():
args = parser.parse_args()
# use json config file
if args.json != "":
try:
if args.json:
if os.path.exists(args.json):
with open(args.json, 'r') as json_config:
json_data = json.load(json_config)
except:
print("Error reading {}".format(args.json))
else:
return FileNotFoundError("Error reading {}".format(args.json))
# json configuation takes presidence to command line
if "mgr" in json_data:
args.mgr = json_data["mgr"]
@@ -356,12 +365,12 @@ def main():
args.raw_line = json_data_tmp
# use influx json config file
if args.influx_json != "":
try:
with open(args.influx_json, 'r') as influx_json_config:
influx_json_data = json.load(influx_json_config)
except:
print("Error reading {}".format(args.influx_json))
if args.influx_json:
if os.path.exists(args.influx_json):
with open(args.influx_json, 'r') as json_config:
influx_json_data = json.load(json_config)
else:
return FileNotFoundError("Error reading {}".format(args.influx_json))
# json configuation takes presidence to command line
# influx DB configuration
if "influx_host" in influx_json_data:

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env python3
'''
"""
This Script has two classes :
1. LoadScenario : It will load the existing saved scenario to the Lanforge (Here used for Loading Bridged VAP)
2. CreateSTA_CX : It will create stations and L3 Cross connects and start them
@@ -21,13 +21,12 @@
.\Lexus_Final.py --lf_host 192.168.200.15 --dut_host 192.168.200.18 --dut_radio wiphy1 --lf_radio wiphy1 --num_sta 1 --sta_id 1 --lf_ssid lanforge_ap --dut_ssid lexusap --security open --dut_upstream eth2 --lf_upstream eth1 --protocol lf_udp --min_bps 1000 --max_bps 10000 --time 1
This Script is intended to automate the testing of DUT that has stations as well as AP.
To automate the simultaenous testing and check the DUT Temperature
'''
"""
import sys
import os
import importlib
import argparse
import time
import logging
import paramiko as pm
from paramiko.ssh_exception import NoValidConnectionsError as exception
import xlsxwriter
@@ -39,7 +38,6 @@ if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
@@ -65,6 +63,7 @@ class Login_DUT:
if self.CLIENT == 0:
exit()
print("Connected to " + HOST + " DUT to Measure the Core Temperature")
def run(self):
stdin, stdout, stderr = self.CLIENT.exec_command("sensors")
out_lines = stdout.readlines()
@@ -73,7 +72,6 @@ class Login_DUT:
self.data_core1.append(out_lines[len(out_lines) - 3])
self.data_core2.append(out_lines[len(out_lines) - 2])
def Connect(self):
self.CLIENT.load_system_host_keys()
self.CLIENT.set_missing_host_key_policy(pm.AutoAddPolicy())
@@ -81,7 +79,7 @@ class Login_DUT:
self.CLIENT.connect(self.host, username=self.USERNAME, password=self.PASSWORD, timeout=10)
return None
except exception as error:
self.CLIENT = 0;
self.CLIENT = 0
return None
@@ -95,10 +93,12 @@ class LoadScenario(LFCliBase):
print(host + " : Scenario Loaded...")
time.sleep(2)
# Class to create stations and run L3 Cross connects and run them for given time. It also stores the endpoint names for measuring throughput
class CreateSTA_CX(LFCliBase):
def __init__(self, host, port, radio, num_sta, sta_id, ssid, security, password, upstream, protocol, min_bps, max_bps, security_debug_on=True, _exit_on_error=True, _exit_on_fail=True):
def __init__(self, host, port, radio, num_sta, sta_id, ssid, security, password, upstream, protocol, min_bps,
max_bps, security_debug_on=True, _exit_on_error=True, _exit_on_fail=True):
super().__init__(host, port, _debug=security_debug_on, _exit_on_fail=_exit_on_fail)
self.host = host
@@ -167,7 +167,8 @@ class CreateSTA_CX(LFCliBase):
self.cx_profile.side_a_max_pkt = 'Same'
# Create Connections of Given Parameters
self.cx_profile.create(self.protocol, side_a="1.1."+self.upstream, side_b=list(self.local_realm.find_ports_like("sta0+")))
self.cx_profile.create(self.protocol, side_a="1.1." + self.upstream,
side_b=list(self.local_realm.find_ports_like("sta0+")))
time.sleep(15)
# Getting all the Endpoint Names for measuring Throughput Later
@@ -179,7 +180,6 @@ class CreateSTA_CX(LFCliBase):
# print(self.endp)
return 0
def start(self):
# self.station_profile.admin_up()
@@ -221,12 +221,8 @@ class CreateSTA_CX(LFCliBase):
del_sta_names.append(tname)
except Exception as x:
self.local_realm.error(x)
try:
LFUtils.waitUntilPortsDisappear(base_url=self.local_realm.lfclient_url, port_list=del_sta_names, debug=True)
print("Ports Successfully Cleaned up")
return 0
except:
print("Ports Successfully Cleaned up")
time.sleep(5)
return 0
@@ -273,8 +269,6 @@ def GenerateReport(throughput_sta, throughput_vap, core1_temp, core2_temp, durat
# Plotting Function for Parameters
def plot(throughput_sta, throughput_vap, core1_temp, core2_temp, Time):
s1 = figure()
s1.title.text = "WIFI Throughput vs Temperature Plot"
s1.xaxis.axis_label = "Time in Seconds"
@@ -304,138 +298,65 @@ class VAP_Measure(LFCliBase):
super().__init__(lfclient_host, lfclient_port)
# main method
def main():
parser = argparse.ArgumentParser(
prog='lf_dut_sta_vap_test.py',
formatter_class=argparse.RawTextHelpFormatter,
description="Test Scenario of DUT Temperature measurement along with simultaneous throughput on VAP as well as stations")
parser.add_argument("-m", "--manager", type=str, help="Enter the address of Lanforge Manager (By default localhost)")
parser.add_argument("-sc", "--scenario", type=str, help="Enter the Name of the Scenario you want to load (by Default DFLT)")
parser.add_argument("-m", "--manager", type=str,
help="Enter the address of Lanforge Manager (By default localhost)", default="localhost")
parser.add_argument("-sc", "--scenario", type=str,
help="Enter the Name of the Scenario you want to load (by Default DFLT)")
parser.add_argument("-r", "--radio", type=str, help="Enter the radio on which you want to create a station/s on ")
parser.add_argument("-n", "--num_sta", type=int, help="Enter the Number of Stations You want to create")
parser.add_argument("-i", "--sta_id", type=int, help="Enter Station id [for sta001, enter 1]")
parser.add_argument("-ss", "--ssid", type=str, help="Enter the ssid, with which you want to associate your stations (Enter the SSID of DUT AP)")
parser.add_argument("-up", "--upstream", type=str, help="Enter the upstream ethernet port")
parser.add_argument("-sec", "--security", type=str, help="Enter the security type [open, wep, wpa, wpa2]")
parser.add_argument("-p", "--password", type=str, help="Enter the password if security is not open")
parser.add_argument("-pr", "--protocol", type=str, help="Enter the protocol on which you want to run your connections [lf_udp, lf_tcp]")
parser.add_argument("-mn", "--min_mbps", type=str, help="Enter the Minimum Rate")
parser.add_argument("-mx", "--max_mbps", type=str, help="Enter the Maximum Rate")
parser.add_argument("-t", "--duration", type=int, help="Enter the Time for which you want to run test (In Minutes)")
parser.add_argument("-o", "--report_name", type=str, help="Enter the Name of the Output file ('Report.xlsx')")
args = None
parser.add_argument("-n", "--num_sta", type=int, help="Enter the Number of Stations You want to create", default=0)
parser.add_argument("-i", "--sta_id", type=int, help="Enter Station id [for sta001, enter 1]", default=0)
parser.add_argument("-ss", "--ssid", type=str,
help="Enter the ssid, with which you want to associate your stations (Enter the SSID of DUT AP)")
parser.add_argument("-up", "--upstream", type=str, help="Enter the upstream ethernet port", default='br0000')
parser.add_argument("-sec", "--security", type=str, help="Enter the security type [open, wep, wpa, wpa2]",
default='open')
parser.add_argument("-p", "--password", type=str, help="Enter the password if security is not open",
default='[Blank]')
parser.add_argument("-pr", "--protocol", type=str,
help="Enter the protocol on which you want to run your connections [lf_udp, lf_tcp]",
default='lf_udp')
parser.add_argument("-mn", "--min_mbps", type=int, help="Enter the Minimum Rate", default=1000)
parser.add_argument("-mx", "--max_mbps", type=int, help="Enter the Maximum Rate")
parser.add_argument("-t", "--duration", type=int, help="Enter the Time for which you want to run test (In Minutes)",
default=15)
parser.add_argument("-o", "--report_name", type=str, help="Enter the Name of the Output file ('Report.xlsx')",
default='report.xlsx')
try:
args = parser.parse_args()
# Lanforge Manager IP Address
if (args.manager is None):
manager = "localhost"
if (args.manager is not None):
manager = args.manager
# Scenario Name
if (args.scenario is not None):
scenario = args.scenario
# Radio Name
if (args.radio is not None):
radio = args.radio
min_bps = args.min_mbps * 1000000
# Number of Stations
if (args.num_sta is None):
num_sta = 0
if (args.num_sta is not None):
num_sta = args.num_sta
# Station ID
if (args.sta_id is None):
sta_id = '0'
if (args.sta_id is not None):
sta_id = args.sta_id
# SSID
if (args.ssid is not None):
ssid = args.ssid
if (args.ssid is not None):
ssid = args.ssid
# Security (Open by Default)
if (args.security is None):
security = 'open'
if (args.security is not None):
security = args.security
# Password (if Security is not Open)
if (args.password is not None):
password = args.password
if (args.password == 'open'):
password = "[Blank]"
if (args.password is None):
password = "[Blank]"
# Upstream Port (By default br0000)
if (args.upstream is None):
upstream = 'br0000'
if (args.upstream is not None):
upstream = args.upstream
# Protocol (By Default lf_udp)
if (args.protocol is not None):
protocol = args.protocol
if (args.protocol is None):
protocol = 'lf_udp'
#Min BPS
if (args.min_mbps is not None):
min_bps = int(args.min_mbps)*1000000
if (args.min_mbps is None):
min_bps = int(1000)*1000000
if (args.max_mbps is None ):
max_bps = int(1000)*1000000
if (args.min_mbps is not None):
min_bps = int(args.min_mbps)*1000000
if (args.max_mbps is not None and args.max_mbps != "same"):
if args.max_mbps and args.max_mbps != "same":
max_bps = int(args.max_mbps) * 1000000
if (args.max_mbps is not None and args.max_mbps == "same"):
if args.max_mbps and args.max_mbps == "same":
max_bps = args.min_mbps
if (args.duration is not None):
duration = (args.duration * 60)/5
if (args.report_name is not None):
report_name = args.report_name
if (args.duration is None):
duration = (1 * 60)/5
if (args.report_name is None):
report_name = "report.xlsx"
except Exception as e:
logging.exception(e)
exit(2)
# Start DUT
# Loading the Scenario on Lanforge_1 (Here Considered as DUT) [Created VAP With SSID 'lexusap' on wiphy0 with eth1 as backhaul]
Scenario_1 = LoadScenario("192.168.200.18", 8080, "Lexus_DUT")
dut_traffic_profile = CreateSTA_CX("192.168.200.18", 8080, "wiphy1", 1, 0, 'lanforge_ap', 'open', password, 'br0000', 'lf_udp', min_bps, max_bps)
dut_traffic_profile = CreateSTA_CX("192.168.200.18", 8080, "wiphy1", 1, 0, 'lanforge_ap', 'open', args.password,
'br0000', 'lf_udp', min_bps, max_bps)
dut_traffic_profile.build()
print("DUT All Set... Lets setup Lanforge")
# Loading the Scenario on Lanforge_2 (Here Considered as LANFORGE Test) [Created VAP With SSID 'lanforge_ap' on wiphy0 with eth2 as backhaul]
DB_Lanforge_2 = "LANforge_TEST"
Scenario_2 = LoadScenario(manager, 8080, scenario)
Scenario_2 = LoadScenario(args.manager, 8080, args.scenario)
lf_traffic_profile = CreateSTA_CX(manager, 8080, radio, num_sta, sta_id, ssid, security, password, upstream, protocol, min_bps, max_bps)
lf_traffic_profile = CreateSTA_CX(args.manager, 8080, args.radio, args.num_sta, args.sta_id, args.ssid,
args.security, args.password, args.upstream, args.protocol, min_bps,
max_bps)
lf_traffic_profile.build()
print("Lanforge System is All set... Lets start and Measure")
@@ -448,7 +369,7 @@ def main():
print("Collecting Throughput Values...")
# Object to Measure Throughput at VAP Side
vap_measure_obj = VAP_Measure(manager, 8080)
vap_measure_obj = VAP_Measure(args.manager, 8080)
#
dut_temp_obj = Login_DUT(1, "Thread-1", "192.168.200.18")
@@ -458,7 +379,7 @@ def main():
throughput_vap = []
# This loop will get the Data from All the endpoints and sum up to give total Throughput over time
for i in range(0,int(duration)):
for i in range(0, int(args.duration)):
temp = 0
for j in lf_traffic_profile.endp:
y = lf_traffic_profile.local_realm.json_get("/endp/" + j).get('endpoint').get('rx rate')
@@ -472,10 +393,9 @@ def main():
print(throughput_sta)
dut_traffic_profile.cleanup()
lf_traffic_profile.cleanup()
GenerateReport(throughput_sta, throughput_vap, dut_temp_obj.data_core1, dut_temp_obj.data_core2, duration, report_name)
GenerateReport(throughput_sta, throughput_vap, dut_temp_obj.data_core1, dut_temp_obj.data_core2, args.duration,
args.report_name)
if __name__ == '__main__':
main()

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env python3
""" lf_ftp.py will verify that N clients connected on specified band and can simultaneously download/upload some amount of file from FTP server and measuring the time taken by client to download/upload the file.
cli- python3 lf_ftp.py --mgr localhost --mgr_port 8080 --upstream_port eth1 --ssid FTP --security open --passwd BLANK --ap_name WAC505 --ap_ip 192.168.213.90 --bands Both --directions Download --twog_radio wiphy1 --fiveg_radio wiphy0 --file_size 2MB --num_stations 40 --Both_duration 1 --traffic_duration 2 --ssh_port 22_
cli- ./lf_ftp.py --ssid <SSID> --passwd <PASSWORD> --file_sizes 2MB --fiveg_duration 4 --mgr 192.168.1.101 --traffic_duration 2 --security wpa2 --bands 5G --fiveg_radio wiphy1 --directions Download Upload
Copyright 2021 Candela Technologies Inc
License: Free to distribute and modify. LANforge systems must be licensed.
"""
@@ -11,6 +12,7 @@ from datetime import datetime
import time
import os
import matplotlib.patches as mpatches
import pandas as pd
if sys.version_info[0] != 3:
print("This script requires Python 3")
@@ -26,13 +28,14 @@ realm = importlib.import_module("py-json.realm")
Realm = realm.Realm
lf_report = importlib.import_module("py-scripts.lf_report")
lf_graph = importlib.import_module("py-scripts.lf_graph")
lf_kpi_csv = importlib.import_module("py-scripts.lf_kpi_csv")
class FtpTest(LFCliBase):
def __init__(self, lfclient_host="localhost", lfclient_port=8080, sta_prefix="sta", start_id=0, num_sta=None,
dut_ssid=None, dut_security=None, dut_passwd=None, file_size=None, band=None, twog_radio=None,
fiveg_radio=None, upstream="eth1", _debug_on=False, _exit_on_error=False, _exit_on_fail=False,
direction=None, duration=None, traffic_duration=None, ssh_port=None):
direction=None, duration=None, traffic_duration=None, ssh_port=None, kpi_csv=None):
super().__init__(lfclient_host, lfclient_port, _debug=_debug_on, _exit_on_fail=_exit_on_fail)
print("Test is about to start")
self.host = lfclient_host
@@ -47,6 +50,7 @@ class FtpTest(LFCliBase):
self.password = dut_passwd
self.requests_per_ten = 1
self.band = band
self.kpi_csv = kpi_csv
self.file_size = file_size
self.direction = direction
self.twog_radio = twog_radio
@@ -301,7 +305,7 @@ class FtpTest(LFCliBase):
for i in range(self.num_sta):
list_of_time.append(0)
#running layer 4 traffic upto user given time
while str(datetime.datetime.now() - time1) <= self.traffic_duration:
while str(datetime.now() - time1) <= self.traffic_duration:
if list_of_time.count(0) == 0:
break
@@ -309,11 +313,11 @@ class FtpTest(LFCliBase):
# run script upto given time
if counter == 0:
if str(datetime.datetime.now() - time1) >= self.duration:
if str(datetime.now() - time1) >= self.duration:
counter = counter + 1
break
else:
if str(datetime.datetime.now() - time1) >= self.traffic_duration:
if str(datetime.now() - time1) >= self.traffic_duration:
break
for i in range(self.num_sta):
@@ -321,9 +325,9 @@ class FtpTest(LFCliBase):
# reading uc-avg data in json format
uc_avg = self.json_get("layer4/list?fields=uc-avg")
if data['endpoint'][i][data2[i]]['bytes-rd'] <= self.file_size_bytes:
if int(data['endpoint'][i][data2[i]]['bytes-rd']) <= self.file_size_bytes:
data = self.json_get("layer4/list?fields=bytes-rd")
if data['endpoint'][i][data2[i]]['bytes-rd'] >= self.file_size_bytes:
if int(data['endpoint'][i][data2[i]]['bytes-rd']) >= self.file_size_bytes:
list1.append(i)
if list1.count(i) == 1:
list2.append(i)
@@ -614,7 +618,7 @@ class FtpTest(LFCliBase):
def bar_graph(self, x_axis, image_name, dataset, color, labels, x_axis_name, y_axis_name,handles, ncol, box, fontsize):
'''This Method will plot bar graph'''
graph = lf_bar_graph(_data_set=dataset,
graph = lf_graph.lf_bar_graph(_data_set=dataset,
_xaxis_name=x_axis_name,
_yaxis_name=y_axis_name,
_xaxis_categories=x_axis,
@@ -660,7 +664,7 @@ class FtpTest(LFCliBase):
def generate_report(self, ftp_data, date,test_setup_info, input_setup_info):
'''Method for generate the report'''
self.report = lf_report(_results_dir_name="ftp_test", _output_html="ftp_test.html", _output_pdf="ftp_test.pdf")
self.report = lf_report.lf_report(_results_dir_name="ftp_test", _output_html="ftp_test.html", _output_pdf="ftp_test.pdf")
self.report.set_title("FTP Test")
self.report.set_date(date)
self.report.build_banner()
@@ -703,7 +707,14 @@ def main():
parser = argparse.ArgumentParser(
prog='lf_ftp.py',
formatter_class=argparse.RawTextHelpFormatter,
description="FTP Test Script")
description='''\
---------------------------
FTP Test Script - lf_ftp.py
---------------------------
CLI Example:
./lf_ftp.py --ssid <SSID> --passwd <PASSWORD> --file_sizes 2MB --fiveg_duration 4 --mgr 192.168.1.101 --traffic_duration 2 --security wpa2 --bands 5G --fiveg_radio wiphy1 --directions Download Upload
---------------------------
''')
parser.add_argument('--mgr', help='hostname for where LANforge GUI is running', default='localhost')
parser.add_argument('--mgr_port', help='port LANforge GUI HTTP service is running on', default=8080)
parser.add_argument('--upstream_port', help='non-station port that generates traffic: eg: eth1', default='eth1')
@@ -716,7 +727,7 @@ def main():
parser.add_argument('--fiveg_radio', type=str, help='specify radio for 5G client', default='wiphy0')
parser.add_argument('--twog_duration', nargs="+", help='Pass and Fail duration for 2.4G band in minutes')
parser.add_argument('--fiveg_duration', nargs="+", help='Pass and Fail duration for 5G band in minutes')
parser.add_argument('--Both_duration', nargs="+", help='Pass and Fail duration for Both band in minutes')
parser.add_argument('--both_duration', nargs="+", help='Pass and Fail duration for Both band in minutes')
parser.add_argument('--traffic_duration', type=int, help='duration for layer 4 traffic running')
parser.add_argument('--ssh_port', type=int, help="specify the shh port eg 22", default=22)
@@ -732,10 +743,10 @@ def main():
args = parser.parse_args()
# 1st time stamp for test duration
time_stamp1 = datetime.datetime.now()
time_stamp1 = datetime.now()
# use for creating ftp_test dictionary
iteraration_num = 0
interation_num = 0
# empty dictionary for whole test data
ftp_data = {}
@@ -759,12 +770,12 @@ def main():
index = list(args.file_sizes).index(size)
duration = args.fiveg_duration[index]
else:
if len(args.file_sizes) is not len(args.Both_duration):
if len(args.file_sizes) is not len(args.both_duration):
raise Exception("Give proper Pass or Fail duration for 5G band")
for size in args.file_sizes:
if size == file_size:
index = list(args.file_sizes).index(size)
duration = args.Both_duration[index]
duration = args.both_duration[index]
if duration.isdigit():
duration = int(duration)
else:
@@ -794,7 +805,7 @@ def main():
ssh_port=args.ssh_port
)
iteraration_num = iteraration_num + 1
interation_num = interation_num + 1
obj.file_create()
obj.set_values()
obj.precleanup()
@@ -804,7 +815,7 @@ def main():
exit(1)
# First time stamp
time1 = datetime.datetime.now()
time1 = datetime.now()
obj.start(False, False)
@@ -815,19 +826,19 @@ def main():
pass_fail = obj.pass_fail_check(time_list)
# dictionary of whole data
ftp_data[iteraration_num] = obj.ftp_test_data(time_list, pass_fail, args.bands, args.file_sizes,
ftp_data[interation_num] = obj.ftp_test_data(time_list, pass_fail, args.bands, args.file_sizes,
args.directions, args.num_stations)
obj.stop()
obj.postcleanup()
# 2nd time stamp for test duration
time_stamp2 = datetime.datetime.now()
time_stamp2 = datetime.now()
# total time for test duration
test_duration = str(time_stamp2 - time_stamp1)[:-7]
date = str(datetime.datetime.now()).split(",")[0].replace(" ", "-").split(".")[0]
date = str(datetime.now()).split(",")[0].replace(" ", "-").split(".")[0]
#print(ftp_data)

View File

@@ -24,31 +24,34 @@ import matplotlib.pyplot as plt
import numpy as np
import pdfkit
from matplotlib.colors import ListedColormap
import argparse
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
lf_csv = importlib.import_module("py-scripts.lf_csv")
lf_csv = lf_csv.lf_csv
# internal candela references included during intial phases, to be deleted at future date
# internal candela references included during intial phases, to be deleted
# at future date
# graph reporting classes
class lf_bar_graph():
def __init__(self, _data_set=[[30.4, 55.3, 69.2, 37.1], [45.1, 67.2, 34.3, 22.4], [22.5, 45.6, 12.7, 34.8]],
class lf_bar_graph:
def __init__(self, _data_set=None,
_xaxis_name="x-axis",
_yaxis_name="y-axis",
_xaxis_categories=[1, 2, 3, 4],
_xaxis_label=["a", "b", "c", "d"],
_xaxis_categories=None,
_xaxis_label=None,
_graph_title="",
_title_size=16,
_graph_image_name="image_name",
_label=["bi-downlink", "bi-uplink", 'uplink'],
_label=None,
_color=None,
_bar_width=0.25,
_color_edge='grey',
_font_weight='bold',
_color_name=['lightcoral', 'darkgrey', 'r', 'g', 'b', 'y'],
_color_name=None,
_figsize=(10, 5),
_show_bar_value=False,
_xaxis_step=1,
@@ -65,6 +68,16 @@ class lf_bar_graph():
_dpi=96,
_enable_csv=False):
if _data_set is None:
_data_set = [[30.4, 55.3, 69.2, 37.1], [45.1, 67.2, 34.3, 22.4], [22.5, 45.6, 12.7, 34.8]]
if _xaxis_categories is None:
_xaxis_categories = [1, 2, 3, 4]
if _xaxis_label is None:
_xaxis_label = ["a", "b", "c", "d"]
if _label is None:
_label = ["bi-downlink", "bi-uplink", 'uplink']
if _color_name is None:
_color_name = ['lightcoral', 'darkgrey', 'r', 'g', 'b', 'y']
self.data_set = _data_set
self.xaxis_name = _xaxis_name
self.yaxis_name = _yaxis_name
@@ -99,20 +112,20 @@ class lf_bar_graph():
if self.color is None:
i = 0
self.color = []
for col in self.data_set:
for _ in self.data_set:
self.color.append(self.color_name[i])
i = i + 1
fig = plt.subplots(figsize=self.figsize)
plt.subplots(figsize=self.figsize)
i = 0
def show_value(rects):
for rect in rects:
def show_value(rectangles):
for rect in rectangles:
h = rect.get_height()
plt.text(rect.get_x() + rect.get_width() / 2., h, h,
ha='center', va='bottom', rotation=self.text_rotation, fontsize=self.text_font)
for data in self.data_set:
for _ in self.data_set:
if i > 0:
br = br1
br2 = [x + self.bar_width for x in br]
@@ -132,14 +145,22 @@ class lf_bar_graph():
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
if self.xaxis_categories[0] == 0:
plt.xticks(np.arange(0, len(self.xaxis_categories), step=self.xaxis_step),fontsize = self.xticks_font)
plt.xticks(np.arange(0,
len(self.xaxis_categories),
step=self.xaxis_step),
fontsize=self.xticks_font)
else:
plt.xticks([i + self._xaxis_value_location for i in np.arange(0, len(self.data_set[0]), step=self.xaxis_step)],
self.xaxis_categories, fontsize=self.xticks_font)
plt.legend(handles=self.legend_handles, loc=self.legend_loc, bbox_to_anchor=self.legend_box, ncol=self.legend_ncol, fontsize=self.legend_fontsize)
plt.legend(
handles=self.legend_handles,
loc=self.legend_loc,
bbox_to_anchor=self.legend_box,
ncol=self.legend_ncol,
fontsize=self.legend_fontsize)
plt.suptitle(self.title, fontsize=self.title_size)
plt.title(self.grp_title)
fig = plt.gcf()
plt.gcf()
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
plt.close()
print("{}.png".format(self.graph_image_name))
@@ -155,25 +176,32 @@ class lf_bar_graph():
self.lf_csv.filename = f"{self.graph_image_name}.csv"
self.lf_csv.generate_csv()
else:
raise ValueError("Length and x-axis values and y-axis values should be same.")
raise ValueError(
"Length and x-axis values and y-axis values should be same.")
else:
print("No Dataset Found")
print("{}.csv".format(self.graph_image_name))
return "%s.png" % self.graph_image_name
class lf_scatter_graph():
class lf_scatter_graph:
def __init__(self,
_x_data_set=["sta0 ", "sta1", "sta2", "sta3"],
_y_data_set=[[30, 55, 69, 37]],
_x_data_set=None,
_y_data_set=None,
_values=None,
_xaxis_name="x-axis",
_yaxis_name="y-axis",
_label=["num1", "num2"],
_label=None,
_graph_image_name="image_name1",
_color=["r", "y"],
_color=None,
_figsize=(9, 4),
_enable_csv=True):
if _x_data_set is None:
_x_data_set = ["sta0 ", "sta1", "sta2", "sta3"]
if _y_data_set is None:
_y_data_set = [[30, 55, 69, 37]]
if _label is None:
_label = ["num1", "num2"]
self.x_data_set = _x_data_set
self.y_data_set = _y_data_set
self.xaxis_name = _xaxis_name
@@ -188,20 +216,38 @@ class lf_scatter_graph():
def build_scatter_graph(self):
if self.color is None:
self.color = ["orchid", "lime", "aquamarine", "royalblue", "darkgray", "maroon"]
fig = plt.subplots(figsize=self.figsize)
self.color = [
"orchid",
"lime",
"aquamarine",
"royalblue",
"darkgray",
"maroon"]
plt.subplots(figsize=self.figsize)
if self.values is None:
plt.scatter(self.x_data_set, self.y_data_set[0], color=self.color[0], label=self.label[0])
plt.scatter(
self.x_data_set,
self.y_data_set[0],
color=self.color[0],
label=self.label[0])
if len(self.y_data_set) > 1:
for i in range(1, len(self.y_data_set)):
plt.scatter(self.x_data_set, self.y_data_set[i], color=self.color[i], label=self.label[i])
plt.scatter(
self.x_data_set,
self.y_data_set[i],
color=self.color[i],
label=self.label[i])
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
plt.gcf().autofmt_xdate()
plt.legend()
else:
colours = ListedColormap(self.color)
scatter = plt.scatter(self.x_data_set, self.y_data_set, c=self.values, cmap=colours)
scatter = plt.scatter(
self.x_data_set,
self.y_data_set,
c=self.values,
cmap=colours)
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
plt.gcf().autofmt_xdate()
@@ -218,16 +264,20 @@ class lf_scatter_graph():
return "%s.png" % self.graph_image_name
class lf_stacked_graph():
class lf_stacked_graph:
def __init__(self,
_data_set=[[1, 2, 3, 4], [1, 1, 1, 1], [1, 1, 1, 1]],
_data_set=None,
_xaxis_name="Stations",
_yaxis_name="Numbers",
_label=['Success', 'Fail'],
_label=None,
_graph_image_name="image_name2",
_color=["b", "g"],
_color=None,
_figsize=(9, 4),
_enable_csv=True):
if _data_set is None:
_data_set = [[1, 2, 3, 4], [1, 1, 1, 1], [1, 1, 1, 1]]
if _label is None:
_label = ['Success', 'Fail']
self.data_set = _data_set # [x_axis,y1_axis,y2_axis]
self.xaxis_name = _xaxis_name
self.yaxis_name = _yaxis_name
@@ -239,11 +289,21 @@ class lf_stacked_graph():
self.lf_csv = lf_csv()
def build_stacked_graph(self):
fig = plt.subplots(figsize=self.figsize)
plt.subplots(figsize=self.figsize)
if self.color is None:
self.color = ["darkred", "tomato", "springgreen", "skyblue", "indigo", "plum"]
self.color = [
"darkred",
"tomato",
"springgreen",
"skyblue",
"indigo",
"plum"]
plt.bar(self.data_set[0], self.data_set[1], color=self.color[0])
plt.bar(self.data_set[0], self.data_set[2], bottom=self.data_set[1], color=self.color[1])
plt.bar(
self.data_set[0],
self.data_set[2],
bottom=self.data_set[1],
color=self.color[1])
if len(self.data_set) > 3:
for i in range(3, len(self.data_set)):
plt.bar(self.data_set[0], self.data_set[i],
@@ -251,7 +311,7 @@ class lf_stacked_graph():
plt.xlabel(self.xaxis_name)
plt.ylabel(self.yaxis_name)
plt.legend(self.label)
plt.savefig("%s.png" % (self.graph_image_name), dpi=96)
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
plt.close()
print("{}.png".format(self.graph_image_name))
if self.enable_csv:
@@ -259,23 +319,31 @@ class lf_stacked_graph():
self.lf_csv.rows = self.data_set
self.lf_csv.filename = f"{self.graph_image_name}.csv"
self.lf_csv.generate_csv()
return "%s.png" % (self.graph_image_name)
return "%s.png" % self.graph_image_name
class lf_horizontal_stacked_graph():
class lf_horizontal_stacked_graph:
def __init__(self,
_seg=2,
_yaxis_set=('A', 'B'),
_xaxis_set1=[12, 0, 0, 16, 15],
_xaxis_set2=[23, 34, 23, 0],
_xaxis_set1=None,
_xaxis_set2=None,
_unit="%",
_xaxis_name="Stations",
_label=['Success', 'Fail'],
_label=None,
_graph_image_name="image_name3",
_color=["success", "Fail"],
_color=None,
_figsize=(9, 4),
_disable_xaxis=False,
_enable_csv=True):
if _xaxis_set1 is None:
_xaxis_set1 = [12, 0, 0, 16, 15]
if _xaxis_set2 is None:
_xaxis_set2 = [23, 34, 23, 0]
if _label is None:
_label = ['Success', 'Fail']
if _color is None:
_color = ["success", "Fail"]
self.unit = _unit
self.seg = _seg
self.xaxis_set1 = _xaxis_set1
@@ -303,8 +371,19 @@ class lf_horizontal_stacked_graph():
ind = np.arange(n) + .15
width = 0.3
rects1 = plt.barh(ind, values1, width, color=self.color[0], label=self.label[0])
rects2 = plt.barh(ind, values2, width, left=sumzip(values1), color=self.color[1], label=self.label[1])
plt.barh(
ind,
values1,
width,
color=self.color[0],
label=self.label[0])
plt.barh(
ind,
values2,
width,
left=sumzip(values1),
color=self.color[1],
label=self.label[1])
extra_space = 0.15
ax.set_yticks(ind + width - extra_space)
@@ -326,7 +405,12 @@ class lf_horizontal_stacked_graph():
ax.spines['top'].set_visible(False)
ax.legend(loc='upper right')
if self.disable_xaxis:
plt.tick_params(axis='x', which='both', bottom=False, top=False, labelbottom=False) # disable x-axis
plt.tick_params(
axis='x',
which='both',
bottom=False,
top=False,
labelbottom=False) # disable x-axis
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
plt.close()
print("{}.png".format(self.graph_image_name))
@@ -338,18 +422,18 @@ class lf_horizontal_stacked_graph():
return "%s.png" % self.graph_image_name
class lf_line_graph():
def __init__(self,_data_set=[[30.4, 55.3, 69.2, 37.1], [45.1, 67.2, 34.3, 22.4], [22.5, 45.6, 12.7, 34.8]],
class lf_line_graph:
def __init__(self, _data_set=None,
_xaxis_name="x-axis",
_yaxis_name="y-axis",
_xaxis_categories=[1, 2, 3, 4, 5],
_xaxis_label=["a", "b", "c", "d", "e"],
_xaxis_categories=None,
_xaxis_label=None,
_graph_title="",
_title_size=16,
_graph_image_name="image_name",
_label=["bi-downlink", "bi-uplink", 'uplink'],
_label=None,
_font_weight='bold',
_color=['forestgreen', 'c', 'r', 'g', 'b', 'p'],
_color=None,
_figsize=(10, 5),
_xaxis_step=5,
_xticks_font=None,
@@ -362,6 +446,16 @@ class lf_line_graph():
_marker=None,
_dpi=96,
_enable_csv=False):
if _data_set is None:
_data_set = [[30.4, 55.3, 69.2, 37.1], [45.1, 67.2, 34.3, 22.4], [22.5, 45.6, 12.7, 34.8]]
if _xaxis_categories is None:
_xaxis_categories = [1, 2, 3, 4, 5]
if _xaxis_label is None:
_xaxis_label = ["a", "b", "c", "d", "e"]
if _label is None:
_label = ["bi-downlink", "bi-uplink", 'uplink']
if _color is None:
_color = ['forestgreen', 'c', 'r', 'g', 'b', 'p']
self.data_set = _data_set
self.xaxis_name = _xaxis_name
self.yaxis_name = _yaxis_name
@@ -387,17 +481,27 @@ class lf_line_graph():
self.legend_fontsize = _legend_fontsize
def build_line_graph(self):
fig = plt.subplots(figsize=self.figsize)
plt.subplots(figsize=self.figsize)
i = 0
for data in self.data_set:
plt.plot(self.xaxis_categories, data, color=self.color[i], label=self.label[i], marker = self.marker)
plt.plot(
self.xaxis_categories,
data,
color=self.color[i],
label=self.label[i],
marker=self.marker)
i += 1
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
plt.legend(handles=self.legend_handles, loc=self.legend_loc, bbox_to_anchor=self.legend_box, ncol=self.legend_ncol, fontsize=self.legend_fontsize)
plt.legend(
handles=self.legend_handles,
loc=self.legend_loc,
bbox_to_anchor=self.legend_box,
ncol=self.legend_ncol,
fontsize=self.legend_fontsize)
plt.suptitle(self.grp_title, fontsize=self.title_size)
fig = plt.gcf()
plt.gcf()
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
plt.close()
print("{}.png".format(self.graph_image_name))
@@ -412,8 +516,46 @@ class lf_line_graph():
print("{}.csv".format(self.graph_image_name))
return "%s.png" % self.graph_image_name
# Unit Test
if __name__ == "__main__":
def main():
# arguments
parser = argparse.ArgumentParser(
prog='lf_graph.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
lf_graph.py : unit test in lf_graph.py for exersizing the lf_graph.py library
''',
description='''\
-----------------
NAME: lf_graph.py
PURPOSE:
Common Library for generating graphs for LANforge output
SETUP:
/lanforge/html-reports directory needs to be present or output generated in local file
EXAMPLE:
see: /py-scritps/lf_report_test.py for example
COPYWRITE
Copyright 2021 Candela Technologies Inc
License: Free to distribute and modify. LANforge systems must be licensed.
INCLUDE_IN_README
---------------------
''')
parser.add_argument(
'--mgr',
'--lfmgr',
dest='lfmgr',
help='sample argument: where LANforge GUI is running',
default='localhost')
# the args parser is not really used , this is so the report is not generated when testing
# the imports with --help
args = parser.parse_args()
print("LANforge manager {lfmgr}".format(lfmgr=args.lfmgr))
output_html_1 = "graph_1.html"
output_pdf_1 = "graph_1.pdf"
@@ -432,7 +574,8 @@ if __name__ == "__main__":
# write logic to generate pdf here
# wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
# sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
options = {"enable-local-file-access": None} # prevent eerror Blocked access to file
# prevent eerror Blocked access to file
options = {"enable-local-file-access": None}
pdfkit.from_file(output_html_1, output_pdf_1, options=options)
# test build_bar_graph setting values
@@ -465,5 +608,11 @@ if __name__ == "__main__":
# write logic to generate pdf here
# wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
# sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
options = {"enable-local-file-access": None} # prevent eerror Blocked access to file
# prevent eerror Blocked access to file
options = {"enable-local-file-access": None}
pdfkit.from_file(output_html_2, output_pdf_2, options=options)
# Unit Test
if __name__ == "__main__":
main()

8
py-scripts/lf_help_check.bash Executable file
View File

@@ -0,0 +1,8 @@
#!/bin/bash
FILES=`ls *.py`
for FILE in $FILES
do
echo $FILE
(timeout 10 python3 ./${FILE} --help > /dev/null && echo PASSED) || echo "FAILED ${FILE}"
done

239
py-scripts/lf_kpi_csv.py Normal file
View File

@@ -0,0 +1,239 @@
#!/usr/bin/env python3
"""
NAME: lf_kpi_csv.py
PURPOSE:
Common Library for generating kpi csv for LANforge output
KPI - Key Performance Indicators
SETUP:
None
EXAMPLE:
COPYWRITE
Copyright 2021 Candela Technologies Inc
License: Free to distribute and modify. LANforge systems must be licensed.
INCLUDE_IN_README
"""
# may need pandas if a data frame is passed in
# import pandas as pd
import csv
import time
import argparse
'''
Note teh delimiter for the kpi.csv is a tab
kpi.csv : specific file that is used for the database, dashboard and blog post
A blank entry is a valid entry in some cases.
Date: date of run
test-rig : testbed that the tests are run on for example ct_us_001
test-tag : test specific information to differenciate the test, LANforge radios used, security modes (wpa2 , open)
dut-hw-version : hardware version of the device under test
dut-sw-version : software version of the device under test
dut-model-num : model number / name of the device under test
dut-serial-num : serial number / serial number of the device under test
test-priority : test-priority is arbitrary number, choosing under 95 means it goes down at bottom of blog report, and higher priority goes at top.
test-id : script or test name , AP Auto, wifi capacity, data plane, dfs
short-description : short description of the test
pass/fail : set blank for performance tests
numeric-score : this is the value for the y-axis (x-axis is a timestamp), numeric value of what was measured
test-details : what was measured in the numeric-score, e.g. bits per second, bytes per second, upload speed, minimum cx time (ms)
Units : units used for the numeric-scort
Graph-Group - Items graphed together used by dashboard, For the lf_qa.py dashboard
'''
class lf_kpi_csv:
def __init__(self,
_kpi_headers=None,
_kpi_filename='kpi.csv', # Currently this is the only file name accepted
_kpi_path="",
_kpi_test_rig="TEST_RIG",
_kpi_test_tag="TEST_TAG",
_kpi_dut_hw_version="HW_VERSION",
_kpi_dut_sw_version="SW_VERSION",
_kpi_dut_model_num="MODEL_NUM",
_kpi_dut_serial_num="SERIAL_NUM",
_kpi_test_id="TEST_ID"
):
if _kpi_headers is None:
_kpi_headers = ['Date', 'test-rig', 'test-tag', 'dut-hw-version', 'dut-sw-version', 'dut-model-num',
'dut-serial-num',
'test-priority', 'test-id', 'short-description', 'pass/fail', 'numeric-score',
'test details', 'Units', 'Graph-Group', 'Subtest-Pass', 'Subtest-Fail']
self.kpi_headers = _kpi_headers
self.kpi_filename = _kpi_filename
self.kpi_full_path = ''
self.kpi_file = ""
self.kpi_path = _kpi_path
self.kpi_test_rig = _kpi_test_rig
self.kpi_test_tag = _kpi_test_tag
self.kpi_dut_hw_version = _kpi_dut_hw_version
self.kpi_dut_sw_version = _kpi_dut_sw_version
self.kpi_dut_model_num = _kpi_dut_model_num
self.kpi_dut_serial_num = _kpi_dut_serial_num
self.kpi_test_id = _kpi_test_id
self.kpi_rows = ""
try:
print("self.kpi_path {kpi_path}".format(kpi_path=self.kpi_path))
print("self.kpi_filename {kpi_filename}".format(kpi_filename=self.kpi_filename))
if self.kpi_path == "":
kpifile = self.kpi_filename
else:
kpifile = self.kpi_path + '/' + self.kpi_filename
print("kpifile {kpifile}".format(kpifile=kpifile))
self.kpi_file = open(kpifile, 'w')
self.kpi_writer = csv.DictWriter(self.kpi_file, delimiter="\t", fieldnames=self.kpi_headers)
self.kpi_writer.writeheader()
except:
print("lf_kpi_csv.py: {} WARNING unable to open".format(self.kpi_file))
self.kpi_dict = dict()
self.kpi_dict['Date'] = '{date}'.format(date=int(time.time()))
self.kpi_dict['test-rig'] = '{test_rig}'.format(test_rig=self.kpi_test_rig)
self.kpi_dict['test-tag'] = '{test_tag}'.format(test_tag=self.kpi_test_tag)
self.kpi_dict['dut-hw-version'] = '{dut_hw_version}'.format(dut_hw_version=self.kpi_dut_hw_version)
self.kpi_dict['dut-sw-version'] = '{dut_sw_version}'.format(dut_sw_version=self.kpi_dut_sw_version)
self.kpi_dict['dut-model-num'] = '{dut_model_num}'.format(dut_model_num=self.kpi_dut_model_num)
self.kpi_dict['dut-serial-num'] = '{dut_serial_num}'.format(dut_serial_num=self.kpi_dut_serial_num)
self.kpi_dict['test-priority'] = ''
self.kpi_dict['test-id'] = '{test_id}'.format(test_id=self.kpi_test_id)
self.kpi_dict['short-description'] = ''
self.kpi_dict['pass/fail'] = ''
self.kpi_dict['numeric-score'] = ''
self.kpi_dict['test details'] = ''
self.kpi_dict['Units'] = ''
self.kpi_dict['Graph-Group'] = ''
self.kpi_dict['Subtest-Pass'] = ''
self.kpi_dict['Subtest-Fail'] = ''
def kpi_csv_get_dict_update_time(self):
self.kpi_dict['Date'] = '{date}'.format(date=round(time.time() * 1000))
return self.kpi_dict
def kpi_csv_write_dict(self, kpi_dict):
self.kpi_writer.writerow(kpi_dict)
self.kpi_file.flush()
def main():
# arguments
parser = argparse.ArgumentParser(
prog='lf_kpi_csv.py',
formatter_class=argparse.RawTextHelpFormatter,
epilog='''\
lf_kpi_csv.py : unit test in lf_kpi_csv.py for exersiging lf_kpi_csv.py library
''',
description='''\
lf_kpi_csv.py
-----------
Summary :
---------
lf_kpi_csv.py library :
Date: date of run
test-rig : testbed that the tests are run on for example ct_us_001
test-tag : test specific information to differenciate the test, LANforge radios used, security modes (wpa2 , open)
dut-hw-version : hardware version of the device under test
dut-sw-version : software version of the device under test
dut-model-num : model number / name of the device under test
dut-serial-num : serial number / serial number of the device under test
test-priority : test-priority is arbitrary number, choosing under 95 means it goes down at bottom of blog report, and higher priority goes at top.
test-id : script or test name , AP Auto, wifi capacity, data plane, dfs
short-description : short description of the test
pass/fail : set blank for performance tests
numeric-score : this is the value for the y-axis (x-axis is a timestamp), numeric value of what was measured
test-details : what was measured in the numeric-score, e.g. bits per second, bytes per second, upload speed, minimum cx time (ms)
Units : units used for the numeric-scort
Graph-Group - Items graphed together used by dashboard, For the lf_qa.py dashboard
Example :
This module is included to assist in filling out the kpi.csv correctly
The Unit test is used for helping to become familiar with the library
---------
''')
parser.add_argument(
'--local_lf_report_dir',
help='--local_lf_report_dir override the report path, primary use when running test in test suite',
default="")
parser.add_argument("--test_rig", default="lanforge",
help="test rig for kpi.csv, testbed that the tests are run on")
parser.add_argument("--test_tag", default="kpi_generation",
help="test tag for kpi.csv, test specific information to differenciate the test")
parser.add_argument("--dut_hw_version", default="hw_01",
help="dut hw version for kpi.csv, hardware version of the device under test")
parser.add_argument("--dut_sw_version", default="sw_01",
help="dut sw version for kpi.csv, software version of the device under test")
parser.add_argument("--dut_model_num", default="can_ap",
help="dut model for kpi.csv, model number / name of the device under test")
parser.add_argument("--test_priority", default="95",
help="dut model for kpi.csv, test-priority is arbitrary number")
parser.add_argument("--test_id", default="kpi_unit_test", help="test-id for kpi.csv, script or test name")
'''
Other values that are included in the kpi.csv row.
short-description : short description of the test
pass/fail : set blank for performance tests
numeric-score : this is the value for the y-axis (x-axis is a timestamp), numeric value of what was measured
test details : what was measured in the numeric-score, e.g. bits per second, bytes per second, upload speed, minimum cx time (ms)
Units : units used for the numeric-scort
Graph-Group - For the lf_qa.py dashboard
'''
args = parser.parse_args()
# Get the report path to create the kpi.csv path
# kpi_path = report.get_report_path() in normal use case would get from lf_report.py library
kpi_csv = lf_kpi_csv(
_kpi_path=args.local_lf_report_dir,
_kpi_test_rig=args.test_rig,
_kpi_test_tag=args.test_tag,
_kpi_dut_hw_version=args.dut_hw_version,
_kpi_dut_sw_version=args.dut_sw_version,
_kpi_dut_model_num=args.dut_model_num,
_kpi_test_id=args.test_id)
results_dict = kpi_csv.kpi_dict
results_dict['Graph-Group'] = "graph_group"
results_dict['short-description'] = "short_description"
results_dict['numeric-score'] = "100"
results_dict['Units'] = "Mbps"
print("results_dict {results_dict}".format(results_dict=results_dict))
print("date {date}".format(date=results_dict['Date']))
kpi_csv.kpi_csv_write_dict(results_dict)
# reuse the dictionary
results_dict['Graph-Group'] = "graph_group_1_5"
results_dict['short-description'] = "short_description_1_5"
results_dict['numeric-score'] = "99"
results_dict['Units'] = "Mbps"
kpi_csv.kpi_csv_write_dict(results_dict)
# append to a row to the existing dictionary
results_dict_2 = kpi_csv.kpi_dict
# modify an entry
results_dict_2['test-tag'] = 'kpi_generation_2'
results_dict_2['Graph-Group'] = "graph_group"
results_dict_2['short-description'] = "short_description"
results_dict_2['numeric-score'] = "100"
results_dict_2['Units'] = "Mbps"
print("results_dict_2 {results_dict_2}".format(results_dict_2=results_dict_2))
print("date 2 {date}".format(date=results_dict_2['Date']))
kpi_csv.kpi_csv_write_dict(results_dict_2)
if __name__ == "__main__":
main()

View File

@@ -146,14 +146,22 @@ class MeshTest(cvtest):
upload_speed="56Kbps",
download_speed="85%",
duration="60s",
enables=[],
disables=[],
raw_lines=[],
enables=None,
disables=None,
raw_lines=None,
raw_lines_file="",
sets=[],
sets=None,
):
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
if enables is None:
enables = []
if disables is None:
disables = []
if raw_lines is None:
raw_lines = []
if sets is None:
sets = []
self.lf_host = lf_host
self.lf_port = lf_port
self.lf_user = lf_user
@@ -225,14 +233,14 @@ def main():
Open this file in an editor and read the top notes for more details.
Example:
./lf_mesh_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \
--instance_name mesh-instance --config_name test_con --upstream 1.1.eth1 \
--raw_line 'selected_dut2: RootAP wactest 08:36:c9:19:47:40 (1)' \
--raw_line 'selected_dut5: RootAP wactest 08:36:c9:19:47:50 (2)' \
--duration 15s \
--download_speed 85% --upload_speed 56Kbps \
--raw_line 'velocity: 100' \
--raw_lines_file example-configs/mesh-ferndale-cfg.txt \
./lf_mesh_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \\
--instance_name mesh-instance --config_name test_con --upstream 1.1.eth1 \\
--raw_line 'selected_dut2: RootAP wactest 08:36:c9:19:47:40 (1)' \\
--raw_line 'selected_dut5: RootAP wactest 08:36:c9:19:47:50 (2)' \\
--duration 15s \\
--download_speed 85% --upload_speed 56Kbps \\
--raw_line 'velocity: 100' \\
--raw_lines_file example-configs/mesh-ferndale-cfg.txt \\
--test_rig Ferndale-Mesh-01 --pull_report
NOTE: There is quite a lot of config needed, see example-configs/mesh-ferndale-cfg.txt
@@ -246,9 +254,9 @@ def main():
parser.add_argument("-u", "--upstream", type=str, default="",
help="Upstream port for wifi capacity test ex. 1.1.eth2")
# argparse uses the % formatting so use %%
parser.add_argument("--download_speed", default="",
help="Specify requested download speed. Percentage of theoretical is also supported. Default: 85%")
help="Specify requested download speed. Percentage of theoretical is also supported. Default: 85%%")
parser.add_argument("--upload_speed", default="",
help="Specify requested upload speed. Percentage of theoretical is also supported. Default: 0")
parser.add_argument("--duration", default="",

View File

@@ -45,7 +45,7 @@ Realm = realm.Realm
class MultiPsk(Realm):
def __init__(self,
host=None,
port=None,
port=8080,
ssid=None,
input=None,
security=None,
@@ -57,8 +57,10 @@ class MultiPsk(Realm):
sta_prefix="sta",
debug_=False,
):
self.host = host
self.port = port
super().__init__(lfclient_host=host,
lfclient_port=port),
self.lfclient_host = host
self.lfclient_port = port
self.ssid = ssid
self.input = input
self.security = security
@@ -69,8 +71,7 @@ class MultiPsk(Realm):
self.resource = resource
self.sta_prefix = sta_prefix
self.debug = debug_
self.local_realm = realm.Realm(lfclient_host=self.host, lfclient_port=self.port)
self.station_profile = self.local_realm.new_station_profile()
self.station_profile = self.new_station_profile()
def build(self):
station_list = []
@@ -84,30 +85,30 @@ class MultiPsk(Realm):
else:
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=self.start_id,
end_id_=input['num_station'] - 1, padding_number_=100,
radio=input['radio'])
radio=self.radio)
# implementation for non vlan pending ****
print("creating stations")
self.station_profile.use_security(self.security, self.ssid, str(input['password']))
self.station_profile.use_security(self.security, self.ssid, self.passwd)
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
self.station_profile.set_command_param("set_port", "report_timer", 1500)
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
self.station_profile.create(radio=input['radio'], sta_names_=station_list, debug=self.local_realm.debug)
self.local_realm.wait_until_ports_appear(sta_list=station_list)
self.station_profile.create(radio=self.radio, sta_names_=station_list, debug=self.debug)
self.wait_until_ports_appear(sta_list=station_list)
self.station_profile.admin_up()
if self.local_realm.wait_for_ip(station_list, timeout_sec=120):
if self.wait_for_ip(station_list, timeout_sec=120):
print("All stations got IPs")
else:
print("Stations failed to get IPs")
print("create udp endp")
self.cx_profile_udp = self.local_realm.new_l3_cx_profile()
self.cx_profile_udp = self.new_l3_cx_profile()
self.cx_profile_udp.side_a_min_bps = 128000
self.cx_profile_udp.side_b_min_bps = 128000
self.cx_profile_udp.side_a_min_pdu = 1200
self.cx_profile_udp.side_b_min_pdu = 1500
self.cx_profile_udp.report_timer = 1000
self.cx_profile_udp.name_prefix = "udp"
port_list = list(self.local_realm.find_ports_like("%s+" % self.sta_prefix))
port_list = list(self.find_ports_like("%s+" % self.sta_prefix))
# print("port list", port_list)
if (port_list is None) or (len(port_list) < 1):
raise ValueError("Unable to find ports named '%s'+" % self.sta_prefix)
@@ -118,13 +119,13 @@ class MultiPsk(Realm):
# Create TCP endpoints
print("create tcp endp")
self.l3_tcp_profile = self.local_realm.new_l3_cx_profile()
self.l3_tcp_profile = self.new_l3_cx_profile()
self.l3_tcp_profile.side_a_min_bps = 128000
self.l3_tcp_profile.side_b_min_bps = 56000
self.l3_tcp_profile.name_prefix = "tcp"
self.l3_tcp_profile.report_timer = 1000
self.l3_tcp_profile.create(endp_type="lf_tcp",
side_a=list(self.local_realm.find_ports_like("%s+" % self.sta_prefix)),
side_a=list(self.find_ports_like("%s+" % self.sta_prefix)),
side_b="%d.%s" % (self.resource, input['upstream']),
suppress_related_commands=True)
@@ -140,7 +141,7 @@ class MultiPsk(Realm):
if "." in i['upstream']:
# print(str(i['upstream']) + " is a vlan upstream port")
print("checking its ip ..")
data = self.local_realm.json_get("ports/list?fields=IP")
data = self.json_get("ports/list?fields=IP")
for val in data["interfaces"]:
for j in val:
if "1." + str(self.resource) + "." + str(i['upstream']) == j:
@@ -157,7 +158,7 @@ class MultiPsk(Realm):
if "." not in i['upstream']:
# print(str(i['upstream']) + " is not an vlan upstream port")
print("checking its ip ..")
data = self.local_realm.json_get("ports/list?fields=IP")
data = self.json_get("ports/list?fields=IP")
for val in data["interfaces"]:
for j in val:
if "1." + str(self.resource) + "." + str(i['upstream']) == j:
@@ -168,11 +169,8 @@ class MultiPsk(Realm):
return non_vlan_ips
def get_sta_ip(self):
# this function gives station ip dict eg{'eth2.100': '172.17.0.100'}
# self.input = [{'password': 'lanforge1', 'upstream': 'eth2.100', 'mac': '', 'num_station': 1, 'radio': 'wiphy4'}, {'password': 'lanforge2', 'upstream': 'eth2.200', 'mac': '', 'num_station': 1, 'radio': 'wiphy4'}, {'password': 'lanforge3', 'upstream': 'eth2', 'mac': '', 'num_station': 1, 'radio': 'wiphy0'}]
# port_list = ['1.1.sta200', '1.1.sta00', '1.1.sta100']
station_ip = {}
port_list = list(self.local_realm.find_ports_like("%s+" % self.sta_prefix))
port_list = list(self.find_ports_like("%s+" % self.sta_prefix))
# print("port list", port_list)
# port list ['1.1.sta200', '1.1.sta00', '1.1.sta100']
for name, id in zip(port_list, self.input):
@@ -182,7 +180,7 @@ class MultiPsk(Realm):
# print(x)
if name == "1." + str(self.resource) + ".sta" + str(x):
data = self.local_realm.json_get("ports/list?fields=IP")
data = self.json_get("ports/list?fields=IP")
for i in data["interfaces"]:
# print(i)
for j in i:
@@ -227,7 +225,7 @@ class MultiPsk(Realm):
# print(x)
if name == "1." + str(self.resource) + ".sta" + str(x):
data = self.local_realm.json_get("ports/list?fields=IP")
data = self.json_get("ports/list?fields=IP")
for i in data["interfaces"]:
# print(i)
for j in i:
@@ -241,7 +239,7 @@ class MultiPsk(Realm):
def get_non_vlan_sta_ip(self):
station_nonvlan_ip = {}
x = ""
port_list = list(self.local_realm.find_ports_like("%s+" % self.sta_prefix))
port_list = list(self.find_ports_like("%s+" % self.sta_prefix))
# print("port list", port_list)
for id in self.input:
if "." not in id['upstream']:
@@ -249,7 +247,7 @@ class MultiPsk(Realm):
# print(x)
for name in port_list:
if name == "1.1.sta00":
data = self.local_realm.json_get("ports/list?fields=IP")
data = self.json_get("ports/list?fields=IP")
for i in data["interfaces"]:
# print(i)
for j in i:
@@ -270,11 +268,10 @@ class MultiPsk(Realm):
y = station_ip[j].split('.')
if x[0] == y[0] and x[1] == y[1]:
print("station got ip from vlan")
x = "Pass"
return "Pass"
else:
print("station did not got ip from vlan")
x = "Fail"
return x
return "Fail"
def compare_nonvlan_ip_nat(self):
non_vlan_sta_ip = self.get_non_vlan_sta_ip()
@@ -312,27 +309,22 @@ class MultiPsk(Realm):
self.cx_profile_udp.cleanup()
self.l3_tcp_profile.cleanup()
self.station_profile.cleanup()
LFUtils.wait_until_ports_disappear(base_url=self.local_realm.lfclient_url, port_list=self.station_profile.station_names,
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_host, port_list=self.station_profile.station_names,
debug=self.debug)
print("Test Completed")
def main():
parser = argparse.ArgumentParser(
parser = Realm.create_basic_argparse(
prog="lf_multipsk.py",
formatter_class=argparse.RawTextHelpFormatter,
description="lanforge webpage download Test Script")
parser.add_argument('--mgr', help='hostname for where LANforge GUI is running', default='localhost')
parser.add_argument('--mgr_port', help='port LANforge GUI HTTP service is running on', default=8080)
parser.add_argument('--ssid', help='WiFi SSID for client to associate to')
parser.add_argument('--security', help='WiFi Security protocol: {open|wep|wpa2|wpa3', default="wpa2")
parser.add_argument('--mode', help="specify mode of ap eg BRIDGE or NAT", default="BRIDGE")
parser.add_argument('--n_vlan', help="type number of vlan using in test eg 1 or 2", default=1)
# parser.add_argument('--input', nargs="+", help="specify list of parameters like passwords,upstream,mac address, number of clients and radio as input, eg password@123,eth2.100,"",1,wiphy0 lanforge@123,eth2.100,"",1,wiphy1")
parser.add_argument('--mode', help="Mode for lf_multipsk", default=None)
args = parser.parse_args()
input_data = [{
"password": "lanforge1",
"password": args.passwd,
"upstream": "eth2.100",
"mac": "",
"num_station": 1,
@@ -364,8 +356,11 @@ def main():
multi_obj = MultiPsk(host=args.mgr,
port=args.mgr_port,
ssid=args.ssid,
passwd=args.passwd,
input=input_data,
security=args.security)
security=args.security,
debug_=args.debug,
radio=args.radio)
multi_obj.build()
multi_obj.start()

76
py-scripts/lf_port_probe.py Executable file
View File

@@ -0,0 +1,76 @@
#!/usr/bin/env python3
import json
import os
import pprint
import sys
if sys.version_info[0] != 3:
print("This script requires Python 3")
exit(1)
if 'py-json' not in sys.path:
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
from time import sleep
from LANforge.lfcli_base import LFCliBase
# see https://stackoverflow.com/questions/9295439/python-json-loads-fails-with-valueerror-invalid-control-character-at-line-1-c/16544933#16544933
# re-load and reexport JSON with strict=False?
class ProbePort2(LFCliBase):
def __init__(self,
lfhost=None,
lfport=None,
debug=False,
eid_str=None):
super().__init__(_lfjson_host=lfhost,
_lfjson_port=lfport,
_debug=debug)
hunks = eid_str.split(".")
self.probepath = "/probe/1/%s/%s" % (hunks[-2], hunks[-1])
# self.decoder = json.JSONDecoder()
def run(self):
self.json_post(self.probepath, {})
sleep(0.2)
response = self.json_get(self.probepath)
if not response:
print("problem probing port %s" % self.probepath)
exit(1)
# pprint.pprint(response)
if "probe-results" not in response:
print("problem probing port %s" % self.probepath)
exit(1)
probe_res = response["probe-results"][0]
#pprint.pprint(probe_res)
for (key, value) in probe_res.items():
# probe_results = [key]
print("port "+key)
# pprint.pprint(value['probe results'])
xlated_results = str(value['probe results']).replace("\\n", "\n").replace("\\r", "\r").replace("\\t", "\t")
print(xlated_results)
def main():
parser = LFCliBase.create_bare_argparse(
prog=__name__,
description='''\
Example:
./port_probe.py --port 1.1.eth0
''')
parser.add_argument('--mode', help='Used to force mode of stations')
parser.add_argument('--port_eid', help='EID of station to be used', default="1.1.eth0")
args = parser.parse_args()
probe = ProbePort2(lfhost=args.mgr,
lfport=args.mgr_port,
debug=args.debug,
eid_str=args.port_eid)
probe.run()
if __name__ == "__main__":
main()

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python3
'''
"""
NAME: lf_report.py
PURPOSE:
@@ -23,7 +23,7 @@ LICENSE:
INCLUDE_IN_README
'''
"""
# CAUTION: adding imports to this file which are not in update_dependencies.py is not advised
import os
import shutil
@@ -31,25 +31,27 @@ import datetime
import pandas as pd
import pdfkit
import argparse
# internal candela references included during intial phases, to be deleted at future date
# https://candelatech.atlassian.net/wiki/spaces/LANFORGE/pages/372703360/Scripting+Data+Collection+March+2021
# base report class
class lf_report():
class lf_report:
def __init__(self,
# _path the report directory under which the report directories will be created.
_path="/home/lanforge/html-reports",
_alt_path="",
_date="",
_title="LANForge Test Run Heading",
_title="LANForge Unit Test Run Heading",
_table_title="LANForge Table Heading",
_graph_title="LANForge Graph Title",
_obj="",
_obj_title="",
_output_html="outfile.html",
_output_pdf="outfile.pdf",
_results_dir_name="LANforge_Test_Results",
_results_dir_name="LANforge_Test_Results_Unit_Test",
_output_format='html', # pass in on the write functionality, current not used
_dataframe="",
_path_date_time="",
@@ -76,6 +78,7 @@ class lf_report():
self.output_html = _output_html
self.path_date_time = _path_date_time
self.write_output_html = ""
self.write_output_index_html = ""
self.output_pdf = _output_pdf
self.write_output_pdf = ""
self.banner_html = ""
@@ -271,6 +274,17 @@ class lf_report():
print("write_html failed")
return self.write_output_html
def write_index_html(self):
self.write_output_index_html = str(self.path_date_time) + '/' + str("index.html")
print("write_output_index_html: {}".format(self.write_output_index_html))
try:
test_file = open(self.write_output_index_html, "w")
test_file.write(self.html)
test_file.close()
except:
print("write_index_html failed")
return self.write_output_index_html
def write_html_with_timestamp(self):
self.write_output_html = "{}/{}-{}".format(self.path_date_time, self.date, self.output_html)
print("write_output_html: {}".format(self.write_output_html))
@@ -561,6 +575,16 @@ function copyTextToClipboard(ele) {
# Unit Test
if __name__ == "__main__":
parser = argparse.ArgumentParser(
prog="lf_report.py",
formatter_class=argparse.RawTextHelpFormatter,
description="Reporting library Unit Test")
parser.add_argument('--lfmgr', help='sample argument: where LANforge GUI is running', default='localhost')
# the args parser is not really used , this is so the report is not generated when testing
# the imports with --help
args = parser.parse_args()
print("LANforge manager {lfmgr}".format(lfmgr=args.lfmgr))
# Testing: generate data frame
dataframe = pd.DataFrame({
'product': ['CT521a-264-1ac-1n', 'CT521a-1ac-1ax', 'CT522-264-1ac2-1n', 'CT523c-2ac2-db-10g-cu',
@@ -605,4 +629,3 @@ if __name__ == "__main__":
report.write_pdf()
print("report path {}".format(report.get_path()))

View File

@@ -26,6 +26,7 @@ import numpy as np
import pandas as pd
import pdfkit
import random
import argparse
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
@@ -38,10 +39,45 @@ lf_scatter_graph = lf_graph.lf_scatter_graph
lf_stacked_graph = lf_graph.lf_stacked_graph
lf_horizontal_stacked_graph = lf_graph.lf_horizontal_stacked_graph
# Unit Test
if __name__ == "__main__":
def main():
# Testing: generate data frame
parser = argparse.ArgumentParser(
prog="lf_report_test.py",
formatter_class=argparse.RawTextHelpFormatter,
description='''\
-----------------
NAME: lf_report_test.py
PURPOSE:
Common file for testing lf_report and lf_graph Library generates html and pdf output
SETUP:
/lanforge/html-reports directory needs to be present or output generated in local file
EXAMPLE:
./lf_report_test.py : currently script does not accept input
COPYWRITE
Copyright 2021 Candela Technologies Inc
License: Free to distribute and modify. LANforge systems must be licensed.
INCLUDE_IN_README
''')
parser.add_argument(
'--mgr',
'--lfmgr',
dest='lfmgr',
help='sample argument: where LANforge GUI is running',
default='localhost')
# the args parser is not really used , this is so the report is not generated when testing
# the imports with --help
args = parser.parse_args()
print("LANforge manager {lfmgr}".format(lfmgr=args.lfmgr))
dataframe = pd.DataFrame({
'product': ['CT521a-264-1ac-1n', 'CT521a-1ac-1ax', 'CT522-264-1ac2-1n', 'CT523c-2ac2-db-10g-cu',
'CT523c-3ac2-db-10g-cu', 'CT523c-8ax-ac10g-cu', 'CT523c-192-2ac2-1ac-10g'],
@@ -149,7 +185,8 @@ if __name__ == "__main__":
report.move_graph_image()
report.build_graph()
# this will generate graph which is independent,we can customize the value with different colors
# this will generate graph which is independent,we can customize the value
# with different colors
graph2 = lf_scatter_graph(_x_data_set=set1, _y_data_set=[45, 67, 45, 34], _values=[0, 0, 0, 1],
_xaxis_name="x-axis",
_yaxis_name="y-axis",
@@ -165,7 +202,8 @@ if __name__ == "__main__":
report.move_graph_image()
report.build_graph()
dataset = [["1", "2", "3", "4"], [12, 45, 67, 34], [23, 67, 23, 12], [25, 45, 34, 23]]
dataset = [["1", "2", "3", "4"], [12, 45, 67, 34],
[23, 67, 23, 12], [25, 45, 34, 23]]
graph = lf_stacked_graph(_data_set=dataset,
_xaxis_name="Stations",
_yaxis_name="Login PASS/FAIL",
@@ -215,3 +253,5 @@ if __name__ == "__main__":
# report.write_pdf(_page_size = 'Legal', _orientation='Portrait')
# report.generate_report()
if __name__ == "__main__":
main()

Some files were not shown because too many files have changed in this diff Show More