mirror of
https://github.com/Telecominfraproject/wlan-lanforge-scripts.git
synced 2025-11-01 19:28:00 +00:00
fixed merge conflicts
Signed-off-by: shivamcandela <shivam.thakur@candelatech.com>
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -5,3 +5,5 @@
|
|||||||
*.iml
|
*.iml
|
||||||
**/*.iml
|
**/*.iml
|
||||||
.idea
|
.idea
|
||||||
|
*.env
|
||||||
|
*.zip
|
||||||
|
|||||||
60
Quali/lanforge_resource/.gitignore
vendored
Normal file
60
Quali/lanforge_resource/.gitignore
vendored
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
env/
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*,cover
|
||||||
|
.hypothesis/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
cloudshell_config.yml
|
||||||
4
Quali/lanforge_resource/TOSCA-Metadata/TOSCA.meta
Normal file
4
Quali/lanforge_resource/TOSCA-Metadata/TOSCA.meta
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
TOSCA-Meta-File-Version: 1.0
|
||||||
|
CSAR-Version: 0.1.0
|
||||||
|
Created-By: Anonymous
|
||||||
|
Entry-Definitions: shell-definition.yaml
|
||||||
BIN
Quali/lanforge_resource/canvil2-64x64-gray-yel-ico.png
Normal file
BIN
Quali/lanforge_resource/canvil2-64x64-gray-yel-ico.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.3 KiB |
53
Quali/lanforge_resource/deployment.xml
Normal file
53
Quali/lanforge_resource/deployment.xml
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<properties>
|
||||||
|
|
||||||
|
<!-- The address of the Quali server on which to deploy, mandatory -->
|
||||||
|
<serverRootAddress>localhost</serverRootAddress>
|
||||||
|
|
||||||
|
<!-- The port of the Quali server on which to deploy, defaults to "8029" -->
|
||||||
|
<port>8029</port>
|
||||||
|
|
||||||
|
<!-- The server admin username, password and domain to use when deploying -->
|
||||||
|
<username>YOUR_USERNAME</username>
|
||||||
|
<password>YOUR_PASSWORD</password>
|
||||||
|
<domain>Global</domain>
|
||||||
|
|
||||||
|
<!-- Simple patterns to filter when sending the driver to the server separated by semicolons (e.g. "file.xml;logs/", also supports regular expressions),
|
||||||
|
on top of the patterns specified here the plugin will automatically filter the "deployment/" and ".idea/" folders and the "deployment.xml" file -->
|
||||||
|
<fileFilters>dont_upload_me.xml</fileFilters>
|
||||||
|
|
||||||
|
<!-- The drivers to update, holds one or more drivers -->
|
||||||
|
<drivers>
|
||||||
|
<!-- runFromLocalProject - Decides whether to run the driver from the current project directory for debugging purposes, defaults to "false" -->
|
||||||
|
<!-- waitForDebugger - When `runFromLocalProject` is enabled, decides whether to wait for a debugger to attach before running any Python driver code, defaults to "false" -->
|
||||||
|
<!-- sourceRootFolder - The folder to refer to as the project source root (if specified, the folder will be zipped and deployed instead of the whole project), defaults to the root project folder -->
|
||||||
|
<driver runFromLocalProject="true" waitForDebugger="true" sourceRootFolder="lanforge-resource">
|
||||||
|
<!-- A list of paths to the driver's files or folders relative to the project's root.
|
||||||
|
may be a path to a directory, in which case all the files and folders under the directory are added into the driver's zip file.
|
||||||
|
if the <sources> element is not specified, all the files under the project are added to the driver's zip file -->
|
||||||
|
<sources>
|
||||||
|
<source>src</source>
|
||||||
|
</sources>
|
||||||
|
<!-- the driver name of the driver to update -->
|
||||||
|
<targetName>LanforgeResourceDriver</targetName>
|
||||||
|
</driver>
|
||||||
|
</drivers>
|
||||||
|
|
||||||
|
<!-- The scripts to update, holds one or more scripts -->
|
||||||
|
<!-- A list of paths to the script's files or folders relative to the project's root.
|
||||||
|
if the <sources> element is not specified, all the files under the project are added to the script's zip file.
|
||||||
|
if only one file is specified, the file will not be compressed into a zip file.
|
||||||
|
-->
|
||||||
|
<!--
|
||||||
|
<scripts>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
|
||||||
|
<sources>
|
||||||
|
<source>script1.py</source>
|
||||||
|
</sources>
|
||||||
|
<targetName>scriptToUpdate</targetName>
|
||||||
|
</script>
|
||||||
|
</scripts>
|
||||||
|
-->
|
||||||
|
</properties>
|
||||||
3
Quali/lanforge_resource/docs/readme.rst
Normal file
3
Quali/lanforge_resource/docs/readme.rst
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
.. _readme:
|
||||||
|
|
||||||
|
.. include:: ../README.rst
|
||||||
45
Quali/lanforge_resource/shell-definition.yaml
Normal file
45
Quali/lanforge_resource/shell-definition.yaml
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
tosca_definitions_version: tosca_simple_yaml_1_0
|
||||||
|
|
||||||
|
metadata:
|
||||||
|
template_name: Lanforge Resource
|
||||||
|
template_author: Anonymous
|
||||||
|
template_version: 0.1.0
|
||||||
|
template_icon: shell-icon.png
|
||||||
|
|
||||||
|
description: >
|
||||||
|
TOSCA based resource shell
|
||||||
|
|
||||||
|
imports:
|
||||||
|
- cloudshell_standard: cloudshell_resource_standard_2_0_3.yaml
|
||||||
|
|
||||||
|
node_types:
|
||||||
|
|
||||||
|
vendor.resource.Lanforge Resource:
|
||||||
|
derived_from: cloudshell.nodes.GenericResource
|
||||||
|
#properties:
|
||||||
|
# my_property:
|
||||||
|
# type: string # optional values: string, integer, float, boolean, cloudshell.datatypes.Password
|
||||||
|
# default: fast
|
||||||
|
# description: Some attribute description
|
||||||
|
# constraints:
|
||||||
|
# - valid_values: [fast, slow]
|
||||||
|
capabilities:
|
||||||
|
auto_discovery_capability:
|
||||||
|
type: cloudshell.capabilities.AutoDiscovery
|
||||||
|
properties:
|
||||||
|
enable_auto_discovery:
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
auto_discovery_description:
|
||||||
|
type: string
|
||||||
|
default: Describe the auto discovery
|
||||||
|
inventory_description:
|
||||||
|
type: string
|
||||||
|
default: Describe the resource shell template
|
||||||
|
artifacts:
|
||||||
|
icon:
|
||||||
|
file: canvil2-64x64-gray-yel-ico.png
|
||||||
|
type: tosca.artifacts.File
|
||||||
|
driver:
|
||||||
|
file: LanforgeResourceDriver.zip
|
||||||
|
type: tosca.artifacts.File
|
||||||
BIN
Quali/lanforge_resource/shell-icon.png
Normal file
BIN
Quali/lanforge_resource/shell-icon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 461 B |
1029
Quali/lanforge_resource/src/data_model.py
Normal file
1029
Quali/lanforge_resource/src/data_model.py
Normal file
File diff suppressed because it is too large
Load Diff
379
Quali/lanforge_resource/src/driver.py
Executable file
379
Quali/lanforge_resource/src/driver.py
Executable file
@@ -0,0 +1,379 @@
|
|||||||
|
from cloudshell.shell.core.resource_driver_interface import ResourceDriverInterface
|
||||||
|
from cloudshell.shell.core.driver_context import InitCommandContext, ResourceCommandContext, AutoLoadResource, \
|
||||||
|
AutoLoadAttribute, AutoLoadDetails, CancellationContext
|
||||||
|
from cloudshell.shell.core.session.cloudshell_session import CloudShellSessionContext
|
||||||
|
from cloudshell.api.cloudshell_api import CloudShellAPISession
|
||||||
|
from cloudshell.helpers.scripts.cloudshell_scripts_helpers import get_api_session, get_reservation_context_details
|
||||||
|
from cloudshell.shell.core.session.cloudshell_session import CloudShellSessionContext
|
||||||
|
import cloudshell.helpers.scripts.cloudshell_scripts_helpers as script_help
|
||||||
|
import cloudshell.helpers.scripts.cloudshell_dev_helpers as dev_helpers
|
||||||
|
# from cloudshell.shell.core.resource_driver_interface import ResourceDriverInterface
|
||||||
|
# from cloudshell.shell.core.context import InitCommandContext, ResourceCommandContext
|
||||||
|
import mock
|
||||||
|
from data_model import *
|
||||||
|
# run 'shellfoundry generate' to generate data model classes
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import importlib
|
||||||
|
import paramiko
|
||||||
|
from scp import SCPClient
|
||||||
|
import requests
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
|
# command = "./lanforge-scripts/py-scripts/update_dependencies.py"
|
||||||
|
# print("running:[{}]".format(command))
|
||||||
|
# process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
|
||||||
|
# outs, errs = process.communicate()
|
||||||
|
# print(outs)
|
||||||
|
# print(errs)
|
||||||
|
|
||||||
|
# if 'lanforge-scripts' not in sys.path:
|
||||||
|
# sys.path.append('./lanforge-scripts')
|
||||||
|
|
||||||
|
# create_wanlink = importlib.import_module("lanforge-scripts.py-json.create_wanlink")
|
||||||
|
# create_l3 = importlib.import_module("lanforge-scripts.py-scripts.create_l3")
|
||||||
|
# CreateL3 = create_l3.CreateL3
|
||||||
|
class LanforgeResourceDriver (ResourceDriverInterface):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""
|
||||||
|
ctor must be without arguments, it is created with reflection at run time
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def initialize(self, context):
|
||||||
|
"""
|
||||||
|
Initialize the driver session, this function is called everytime a new instance of the driver is created
|
||||||
|
This is a good place to load and cache the driver configuration, initiate sessions etc.
|
||||||
|
:param InitCommandContext context: the context the command runs on
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
"""
|
||||||
|
Destroy the driver session, this function is called everytime a driver instance is destroyed
|
||||||
|
This is a good place to close any open sessions, finish writing to log files
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_inventory(self, context):
|
||||||
|
"""
|
||||||
|
Discovers the resource structure and attributes.
|
||||||
|
:param AutoLoadCommandContext context: the context the command runs on
|
||||||
|
:return Attribute and sub-resource information for the Shell resource you can return an AutoLoadDetails object
|
||||||
|
:rtype: AutoLoadDetails
|
||||||
|
"""
|
||||||
|
# See below some example code demonstrating how to return the resource structure and attributes
|
||||||
|
# In real life, this code will be preceded by SNMP/other calls to the resource details and will not be static
|
||||||
|
# run 'shellfoundry generate' in order to create classes that represent your data model
|
||||||
|
|
||||||
|
'''
|
||||||
|
resource = LanforgeResource.create_from_context(context)
|
||||||
|
resource.vendor = 'specify the shell vendor'
|
||||||
|
resource.model = 'specify the shell model'
|
||||||
|
|
||||||
|
port1 = ResourcePort('Port 1')
|
||||||
|
port1.ipv4_address = '192.168.10.7'
|
||||||
|
resource.add_sub_resource('1', port1)
|
||||||
|
|
||||||
|
return resource.create_autoload_details()
|
||||||
|
'''
|
||||||
|
return AutoLoadDetails([], [])
|
||||||
|
|
||||||
|
def orchestration_save(self, context, cancellation_context, mode, custom_params):
|
||||||
|
"""
|
||||||
|
Saves the Shell state and returns a description of the saved artifacts and information
|
||||||
|
This command is intended for API use only by sandbox orchestration scripts to implement
|
||||||
|
a save and restore workflow
|
||||||
|
:param ResourceCommandContext context: the context object containing resource and reservation info
|
||||||
|
:param CancellationContext cancellation_context: Object to signal a request for cancellation. Must be enabled in drivermetadata.xml as well
|
||||||
|
:param str mode: Snapshot save mode, can be one of two values 'shallow' (default) or 'deep'
|
||||||
|
:param str custom_params: Set of custom parameters for the save operation
|
||||||
|
:return: SavedResults serialized as JSON
|
||||||
|
:rtype: OrchestrationSaveResult
|
||||||
|
"""
|
||||||
|
|
||||||
|
# See below an example implementation, here we use jsonpickle for serialization,
|
||||||
|
# to use this sample, you'll need to add jsonpickle to your requirements.txt file
|
||||||
|
# The JSON schema is defined at:
|
||||||
|
# https://github.com/QualiSystems/sandbox_orchestration_standard/blob/master/save%20%26%20restore/saved_artifact_info.schema.json
|
||||||
|
# You can find more information and examples examples in the spec document at
|
||||||
|
# https://github.com/QualiSystems/sandbox_orchestration_standard/blob/master/save%20%26%20restore/save%20%26%20restore%20standard.md
|
||||||
|
'''
|
||||||
|
# By convention, all dates should be UTC
|
||||||
|
created_date = datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
# This can be any unique identifier which can later be used to retrieve the artifact
|
||||||
|
# such as filepath etc.
|
||||||
|
|
||||||
|
# By convention, all dates should be UTC
|
||||||
|
created_date = datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
# This can be any unique identifier which can later be used to retrieve the artifact
|
||||||
|
# such as filepath etc.
|
||||||
|
identifier = created_date.strftime('%y_%m_%d %H_%M_%S_%f')
|
||||||
|
|
||||||
|
orchestration_saved_artifact = OrchestrationSavedArtifact('REPLACE_WITH_ARTIFACT_TYPE', identifier)
|
||||||
|
|
||||||
|
saved_artifacts_info = OrchestrationSavedArtifactInfo(
|
||||||
|
resource_name="some_resource",
|
||||||
|
created_date=created_date,
|
||||||
|
restore_rules=OrchestrationRestoreRules(requires_same_resource=True),
|
||||||
|
saved_artifact=orchestration_saved_artifact)
|
||||||
|
|
||||||
|
return OrchestrationSaveResult(saved_artifacts_info)
|
||||||
|
'''
|
||||||
|
pass
|
||||||
|
|
||||||
|
def orchestration_restore(self, context, cancellation_context, saved_artifact_info, custom_params):
|
||||||
|
"""
|
||||||
|
Restores a saved artifact previously saved by this Shell driver using the orchestration_save function
|
||||||
|
:param ResourceCommandContext context: The context object for the command with resource and reservation info
|
||||||
|
:param CancellationContext cancellation_context: Object to signal a request for cancellation. Must be enabled in drivermetadata.xml as well
|
||||||
|
:param str saved_artifact_info: A JSON string representing the state to restore including saved artifacts and info
|
||||||
|
:param str custom_params: Set of custom parameters for the restore operation
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
'''
|
||||||
|
# The saved_details JSON will be defined according to the JSON Schema and is the same object returned via the
|
||||||
|
# orchestration save function.
|
||||||
|
# Example input:
|
||||||
|
# {
|
||||||
|
# "saved_artifact": {
|
||||||
|
# "artifact_type": "REPLACE_WITH_ARTIFACT_TYPE",
|
||||||
|
# "identifier": "16_08_09 11_21_35_657000"
|
||||||
|
# },
|
||||||
|
# "resource_name": "some_resource",
|
||||||
|
# "restore_rules": {
|
||||||
|
# "requires_same_resource": true
|
||||||
|
# },
|
||||||
|
# "created_date": "2016-08-09T11:21:35.657000"
|
||||||
|
# }
|
||||||
|
|
||||||
|
# The example code below just parses and prints the saved artifact identifier
|
||||||
|
saved_details_object = json.loads(saved_details)
|
||||||
|
return saved_details_object[u'saved_artifact'][u'identifier']
|
||||||
|
'''
|
||||||
|
pass
|
||||||
|
|
||||||
|
def attach_file(self, report_server, resid, file_path, user, password, domain, filename):
|
||||||
|
|
||||||
|
# st = datetime.datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M%S')
|
||||||
|
data = {
|
||||||
|
'username': user,
|
||||||
|
'password': password,
|
||||||
|
'domain': domain
|
||||||
|
}
|
||||||
|
qq = 'Basic ' + requests.put(
|
||||||
|
url='http://' + report_server + ':9000/API/Auth/Login',
|
||||||
|
data=data
|
||||||
|
).text[1:-1]
|
||||||
|
head = {
|
||||||
|
'Authorization': qq,
|
||||||
|
}
|
||||||
|
dat_json ={
|
||||||
|
"reservationId": resid,
|
||||||
|
"saveFileAs": filename,
|
||||||
|
"overwriteIfExists": "true",
|
||||||
|
}
|
||||||
|
|
||||||
|
with open(file_path, 'rb') as upload_file:
|
||||||
|
xx = requests.post(
|
||||||
|
url='http://' + report_server + ':9000/API/Package/AttachFileToReservation',
|
||||||
|
headers=head,
|
||||||
|
data=dat_json,
|
||||||
|
files={'QualiPackage': upload_file}
|
||||||
|
)
|
||||||
|
return xx
|
||||||
|
|
||||||
|
def send_command(self, context, cmd):
|
||||||
|
|
||||||
|
msg = ""
|
||||||
|
resource = LanforgeResource.create_from_context(context)
|
||||||
|
session = CloudShellAPISession(host=context.connectivity.server_address,
|
||||||
|
token_id=context.connectivity.admin_auth_token,
|
||||||
|
domain=context.reservation.domain)
|
||||||
|
resource_model_name = resource.cloudshell_model_name
|
||||||
|
terminal_ip = context.resource.address
|
||||||
|
terminal_user = context.resource.attributes[f'{resource_model_name}.User']
|
||||||
|
terminal_pass = session.DecryptPassword(context.resource.attributes[f'{resource_model_name}.Password']).Value
|
||||||
|
|
||||||
|
msg += f"Initializing SSH connection to {terminal_ip}, with user {terminal_user} and password {terminal_pass}\n"
|
||||||
|
s = paramiko.SSHClient()
|
||||||
|
s.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
s.connect(hostname=terminal_ip, username=terminal_user, password=terminal_pass)
|
||||||
|
|
||||||
|
print(f"running:[{cmd}]")
|
||||||
|
(stdin, stdout, stderr) = s.exec_command(cmd)
|
||||||
|
|
||||||
|
output = ''
|
||||||
|
errors = ''
|
||||||
|
for line in stdout.readlines():
|
||||||
|
output += line
|
||||||
|
for line in stderr.readlines():
|
||||||
|
errors += line
|
||||||
|
msg += output + errors
|
||||||
|
s.close()
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def example_command(self, context):
|
||||||
|
"""
|
||||||
|
this is my example command
|
||||||
|
:param ResourceCommandContext context
|
||||||
|
:return: str
|
||||||
|
"""
|
||||||
|
resource = LanforgeResource.create_from_context(context)
|
||||||
|
session = CloudShellAPISession(host=context.connectivity.server_address,
|
||||||
|
token_id=context.connectivity.admin_auth_token,
|
||||||
|
domain=context.reservation.domain)
|
||||||
|
|
||||||
|
resource_model_name = resource.cloudshell_model_name
|
||||||
|
password = session.DecryptPassword(context.resource.attributes[f'{resource_model_name}.Password']).Value
|
||||||
|
username = context.resource.attributes[f'{resource_model_name}.User']
|
||||||
|
|
||||||
|
msg = f"My resource {resource.name} at address {context.resource.address} has model name {resource_model_name}. "
|
||||||
|
msg += f"The username is {username} and password is {password}."
|
||||||
|
return msg
|
||||||
|
|
||||||
|
|
||||||
|
def create_wanlink(self, context, name, latency, rate):
|
||||||
|
|
||||||
|
cmd = "/home/lanforge/lanforge-scripts/py-json/create_wanlink.py --host {host} --port_A {port_A} --port_B {port_B} --name \"{name}\" --latency \"{latency}\" --latency_A \"{latency_A}\" --latency_B \"{latency_B}\" --rate {rate} --rate_A {rate_A} --rate_B {rate_B} --jitter {jitter} --jitter_A {jitter_A} --jitter_B {jitter_B} --jitter_freq_A {jitter_freq_A} --jitter_freq_B {jitter_freq_B} --drop_A {drop_A} --drop_B {drop_B}".format(
|
||||||
|
host="localhost",
|
||||||
|
port_A="eth1",
|
||||||
|
port_B="eth2",
|
||||||
|
name=name,
|
||||||
|
latency=latency,
|
||||||
|
latency_A=latency,
|
||||||
|
latency_B=latency,
|
||||||
|
rate=rate,
|
||||||
|
rate_A=rate,
|
||||||
|
rate_B=rate,
|
||||||
|
jitter="0",
|
||||||
|
jitter_A="0",
|
||||||
|
jitter_B="0",
|
||||||
|
jitter_freq_A="0",
|
||||||
|
jitter_freq_B="0",
|
||||||
|
drop_A="0",
|
||||||
|
drop_B="0"
|
||||||
|
)
|
||||||
|
|
||||||
|
output = self.send_command(context, cmd)
|
||||||
|
print(output)
|
||||||
|
return output
|
||||||
|
|
||||||
|
def create_l3(self, context, name, min_rate_a, min_rate_b, endp_a, endp_b):
|
||||||
|
|
||||||
|
cmd = f"/home/lanforge/lanforge-scripts/py-scripts/create_l3.py --endp_a \"{endp_a}\" --endp_b \"{endp_b}\" --min_rate_a \"{min_rate_a}\" --min_rate_b \"{min_rate_b}\""
|
||||||
|
|
||||||
|
output = self.send_command(context, cmd)
|
||||||
|
print(output)
|
||||||
|
return output
|
||||||
|
|
||||||
|
def pull_reports(self, hostname="", port=22,
|
||||||
|
username="lanforge", password="lanforge",
|
||||||
|
report_location="/home/lanforge/html-reports/",
|
||||||
|
report_dir="./"):
|
||||||
|
|
||||||
|
ssh = paramiko.SSHClient()
|
||||||
|
ssh.load_system_host_keys()
|
||||||
|
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
ssh.connect(hostname="juicer", username=username, password=password, port=port, allow_agent=False, look_for_keys=False)
|
||||||
|
|
||||||
|
with SCPClient(ssh.get_transport()) as scp:
|
||||||
|
scp.get(remote_path=report_location, local_path=report_dir, recursive=True)
|
||||||
|
scp.close()
|
||||||
|
|
||||||
|
def dataplane_test(self, context, instance_name, upstream, station, duration, download_speed, upload_speed, traffic_types, local_lf_report_dir, output_report_dir, mgr):
|
||||||
|
|
||||||
|
cmd = '''/home/lanforge/lanforge-scripts/py-scripts/lf_dataplane_test.py --mgr {mgr} --port 8080 --lf_user lanforge --lf_password lanforge \
|
||||||
|
--instance_name {instance_name} --config_name test_con \
|
||||||
|
--upstream {upstream} --station {station} --duration {duration}\
|
||||||
|
--download_speed {download_speed} --upload_speed {upload_speed} \
|
||||||
|
--raw_line 'pkts: 256;1024' \
|
||||||
|
--raw_line 'directions: DUT Transmit' \
|
||||||
|
--raw_line 'traffic_types: {traffic_types}' \
|
||||||
|
--test_rig juicer --pull_report \
|
||||||
|
--local_lf_report_dir {local_lf_report_dir}'''.format(
|
||||||
|
instance_name=instance_name,
|
||||||
|
mgr=mgr,
|
||||||
|
upstream=upstream,
|
||||||
|
station=station,
|
||||||
|
duration=duration,
|
||||||
|
download_speed=download_speed,
|
||||||
|
upload_speed=upload_speed,
|
||||||
|
traffic_types=traffic_types,
|
||||||
|
local_lf_report_dir=local_lf_report_dir
|
||||||
|
)
|
||||||
|
|
||||||
|
output = self.send_command(context, cmd)
|
||||||
|
print(output)
|
||||||
|
|
||||||
|
resource = LanforgeResource.create_from_context(context)
|
||||||
|
session = CloudShellAPISession(host=context.connectivity.server_address,
|
||||||
|
token_id=context.connectivity.admin_auth_token,
|
||||||
|
domain=context.reservation.domain)
|
||||||
|
terminal_ip = context.resource.address
|
||||||
|
resource_model_name = resource.cloudshell_model_name
|
||||||
|
terminal_pass = session.DecryptPassword(context.resource.attributes[f'{resource_model_name}.Password']).Value
|
||||||
|
terminal_user = context.resource.attributes[f'{resource_model_name}.User']
|
||||||
|
reservation_id = context.reservation.reservation_id
|
||||||
|
api = CloudShellSessionContext(context).get_api()
|
||||||
|
cwd = os.getcwd()
|
||||||
|
# session.AttachFileToReservation(context.reservation.reservation_id, f"C:/Users/Administrator/{output_report_dir}", "C:/Users/Administrator/AppData/Local/Temp", True)
|
||||||
|
self.pull_reports(hostname=context.resource.address, port=22,
|
||||||
|
username=terminal_user, password=terminal_pass,
|
||||||
|
report_location="/home/lanforge/html-reports/",
|
||||||
|
report_dir=f"C:/Users/Administrator/{output_report_dir}")
|
||||||
|
|
||||||
|
# api = get_api_session()
|
||||||
|
# api.WriteMessageToReservationOutput(reservation_id, f"Attaching report to sandbox.")
|
||||||
|
api.WriteMessageToReservationOutput(reservation_id, f"The current working directory is {cwd}")
|
||||||
|
self.attach_file(
|
||||||
|
report_server=context.connectivity.server_address,
|
||||||
|
resid=context.reservation.reservation_id,
|
||||||
|
user='admin',
|
||||||
|
password='admin',
|
||||||
|
domain=context.reservation.domain,
|
||||||
|
file_path="C:/Users/Administrator/Desktop/My_Reports/html-reports/dataplane-2021-10-13-03-32-40/dataplane-report-2021-10-13-03-31-50.pdf",
|
||||||
|
filename="C:/Users/Administrator/Desktop/test_report.txt"
|
||||||
|
)
|
||||||
|
return output
|
||||||
|
|
||||||
|
def scenario(self, context, load):
|
||||||
|
cmd = f"/home/lanforge/lanforge-scripts/py-scripts/scenario.py --load {load}"
|
||||||
|
|
||||||
|
output = self.send_command(context, cmd)
|
||||||
|
print(output)
|
||||||
|
return output
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# setup for mock-debug environment
|
||||||
|
shell_name = "LanforgeResource"
|
||||||
|
cancellation_context = mock.create_autospec(CancellationContext)
|
||||||
|
context = mock.create_autospec(ResourceCommandContext)
|
||||||
|
context.resource = mock.MagicMock()
|
||||||
|
context.reservation = mock.MagicMock()
|
||||||
|
context.connectivity = mock.MagicMock()
|
||||||
|
context.reservation.reservation_id = "<RESERVATION_ID>"
|
||||||
|
context.resource.address = "192.168.100.176"
|
||||||
|
context.resource.name = "Lanforge_Resource"
|
||||||
|
context.resource.attributes = dict()
|
||||||
|
context.resource.attributes["{}.User".format(shell_name)] = "lanforge"
|
||||||
|
context.resource.attributes["{}.Password".format(shell_name)] = "lanforge"
|
||||||
|
context.resource.attributes["{}.SNMP Read Community".format(shell_name)] = "<READ_COMMUNITY_STRING>"
|
||||||
|
|
||||||
|
# add information for api connectivity
|
||||||
|
context.reservation.domain = "Global"
|
||||||
|
context.connectivity.server_address = "192.168.100.131"
|
||||||
|
driver = LanforgeResourceDriver()
|
||||||
|
# print driver.run_custom_command(context, custom_command="sh run", cancellation_context=cancellation_context)
|
||||||
|
# result = driver.example_command_with_api(context)
|
||||||
|
|
||||||
|
# driver.create_l3(context, "my_fire", "69000", "41000", "eth1", "eth2")
|
||||||
|
# driver.create_wanlink(context, name="my_wanlin", latency="49", rate="6000")
|
||||||
|
driver.dataplane_test(context, "instance", "upstream", "station", "duration")
|
||||||
|
print("done")
|
||||||
|
|
||||||
189
Quali/lanforge_resource/src/drivermetadata.xml
Normal file
189
Quali/lanforge_resource/src/drivermetadata.xml
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
<Driver Description="Describe the purpose of your CloudShell shell" MainClass="driver.LanforgeResourceDriver" Name="LanforgeResourceDriver" Version="1.0.0" PythonVersion="3">
|
||||||
|
<Layout>
|
||||||
|
<Category Name="Hidden Commands">
|
||||||
|
<Command Description=""
|
||||||
|
DisplayName="Orchestration Save"
|
||||||
|
Name="orchestration_save" />
|
||||||
|
<Command Description=""
|
||||||
|
DisplayName="Orchestration Restore"
|
||||||
|
Name="orchestration_restore" />
|
||||||
|
<Command Description="Send Command to Resource"
|
||||||
|
DisplayName="Scenario"
|
||||||
|
Name="send_command">
|
||||||
|
<Parameters>
|
||||||
|
<Parameter Name="cmd"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue=""
|
||||||
|
Description="The command to send"/>
|
||||||
|
</Parameters>
|
||||||
|
</Command>
|
||||||
|
<Command Description="Pull Reports from LANforge"
|
||||||
|
DisplayName="Pull Reports"
|
||||||
|
Name="pull_reports">
|
||||||
|
<Parameters>
|
||||||
|
<Parameter Name="hostname"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue=""
|
||||||
|
Description="hostname"/>
|
||||||
|
<Parameter Name="port"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue=""
|
||||||
|
Description="port"/>
|
||||||
|
<Parameter Name="username"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue=""
|
||||||
|
Description="username"/>
|
||||||
|
<Parameter Name="password"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue=""
|
||||||
|
Description="password"/>
|
||||||
|
<Parameter Name="report_location"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue=""
|
||||||
|
Description="report location"/>
|
||||||
|
<Parameter Name="report_dir"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue=""
|
||||||
|
Description="report dir"/>
|
||||||
|
</Parameters>
|
||||||
|
</Command>
|
||||||
|
</Category>
|
||||||
|
<Category Name="Example Commands">
|
||||||
|
<Command Description="Example Command from Demo"
|
||||||
|
DisplayName="Example Command"
|
||||||
|
Name="example_command"/>
|
||||||
|
</Category>
|
||||||
|
<Category Name="Scenario">
|
||||||
|
<Command Description="Load or start a scenario"
|
||||||
|
DisplayName="Scenario"
|
||||||
|
Name="scenario">
|
||||||
|
<Parameters>
|
||||||
|
<Parameter Name="load"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="BLANK"
|
||||||
|
Description="The name of the database to load"/>
|
||||||
|
</Parameters>
|
||||||
|
</Command>
|
||||||
|
</Category>
|
||||||
|
<Category Name="ICE">
|
||||||
|
<Command Description="Create a virtual wanlink with custom impairments."
|
||||||
|
DisplayName="Create Wanlink"
|
||||||
|
Name="create_wanlink">
|
||||||
|
<Parameters>
|
||||||
|
<Parameter Name="name"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="wl_eg1"
|
||||||
|
Description="Enter a name for the wanlink."/>
|
||||||
|
<Parameter Name="latency"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="20"
|
||||||
|
Description="Latency of both endpoints"/>
|
||||||
|
<Parameter Name="rate"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="1000000"
|
||||||
|
Description="The total throughput capacity of the wanlink."/>
|
||||||
|
</Parameters>
|
||||||
|
</Command>
|
||||||
|
</Category>
|
||||||
|
<Category Name="FIRE">
|
||||||
|
<Command Description="Generate traffic between two existing ports"
|
||||||
|
DisplayName="Create Layer-3"
|
||||||
|
Name="create_l3">
|
||||||
|
<Parameters>
|
||||||
|
<Parameter Name="name"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="scr-test-1"
|
||||||
|
Description="Enter a name for the connection"/>
|
||||||
|
<Parameter Name="min_rate_a"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="56000"
|
||||||
|
Description="Minimum transfer rate of side a"/>
|
||||||
|
<Parameter Name="min_rate_b"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="56000"
|
||||||
|
Description="Minimum transfer rate of side b"/>
|
||||||
|
<Parameter Name="endp_a"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="eth1"
|
||||||
|
Description="Station list"/>
|
||||||
|
<Parameter Name="endp_b"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="eth2"
|
||||||
|
Description="Upstream port"/>
|
||||||
|
</Parameters>
|
||||||
|
</Command>
|
||||||
|
<Command Description="Initialize a dataplane test"
|
||||||
|
DisplayName="Dataplane Test"
|
||||||
|
Name="dataplane_test">
|
||||||
|
<Parameters>
|
||||||
|
<Parameter Name="instance_name"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="dataplane-instance"
|
||||||
|
Description="The name for the dataplane test"/>
|
||||||
|
<Parameter Name="upstream"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="1.1.eth1"
|
||||||
|
Description="The upstream port"/>
|
||||||
|
<Parameter Name="station"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="1.1.eth2"
|
||||||
|
Description="The downstream port"/>
|
||||||
|
<Parameter Name="duration"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="2s"
|
||||||
|
Description="The duration of the test (append 's' for seconds)"/>
|
||||||
|
<Parameter Name="download_speed"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="10Mbps"
|
||||||
|
Description="The rate of upstream port (append Mbps/Bps)"/>
|
||||||
|
<Parameter Name="upload_speed"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="0"
|
||||||
|
Description="The rate of downstream port (append Mbps/Bps)"/>
|
||||||
|
<Parameter Name="traffic_types"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="UDP"
|
||||||
|
Description="The type of traffic (TCP/UDP)"/>
|
||||||
|
<Parameter Name="local_lf_report_dir"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="tmp/my_report/"
|
||||||
|
Description="The LANforge directory to save generated reports"/>
|
||||||
|
<Parameter Name="output_report_dir"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue=""
|
||||||
|
Description="The server directory to save generated reports"/>
|
||||||
|
<Parameter Name="mgr"
|
||||||
|
Type="String"
|
||||||
|
Mandatory="False"
|
||||||
|
DefaultValue="localhost"
|
||||||
|
Description="The IP address to run the test on"/>
|
||||||
|
</Parameters>
|
||||||
|
</Command>
|
||||||
|
</Category>
|
||||||
|
</Layout>
|
||||||
|
</Driver>
|
||||||
0
Quali/lanforge_resource/src/importlib
Normal file
0
Quali/lanforge_resource/src/importlib
Normal file
7
Quali/lanforge_resource/src/requirements.txt
Normal file
7
Quali/lanforge_resource/src/requirements.txt
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
mock
|
||||||
|
cloudshell-shell-core>=5.0.3,<6.0.0
|
||||||
|
cloudshell-automation-api
|
||||||
|
cloudshell-orch-core
|
||||||
|
requests
|
||||||
|
paramiko
|
||||||
|
scp
|
||||||
0
Quali/lanforge_resource/src/sys
Normal file
0
Quali/lanforge_resource/src/sys
Normal file
7
Quali/lanforge_resource/test_requirements.txt
Normal file
7
Quali/lanforge_resource/test_requirements.txt
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
nose
|
||||||
|
coverage
|
||||||
|
unittest2
|
||||||
|
mock
|
||||||
|
teamcity-messages
|
||||||
|
jsonpickle
|
||||||
|
nose-exclude
|
||||||
1
Quali/lanforge_resource/tests/__init__.py
Normal file
1
Quali/lanforge_resource/tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
27
Quali/lanforge_resource/tests/test_lanforge-resource.py
Normal file
27
Quali/lanforge_resource/tests/test_lanforge-resource.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
Tests for `LanforgeResourceDriver`
|
||||||
|
"""
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from driver import LanforgeResourceDriver
|
||||||
|
|
||||||
|
|
||||||
|
class TestLanforgeResourceDriver(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_000_something(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import sys
|
||||||
|
sys.exit(unittest.main())
|
||||||
@@ -339,6 +339,14 @@ clean_old_kernels() {
|
|||||||
echo "/lib/modules/$f"
|
echo "/lib/modules/$f"
|
||||||
done | xargs rm -rf
|
done | xargs rm -rf
|
||||||
fi
|
fi
|
||||||
|
if [ -d "/boot2" ]; then
|
||||||
|
rm -rf /boot2/*
|
||||||
|
rsync -a /boot/. /boot2/
|
||||||
|
local dev2=`df /boot2/ |awk '/dev/{print $1}'`
|
||||||
|
if [ x$dev2 != x ]; then
|
||||||
|
/usr/sbin/grub2-install $dev2 ||:
|
||||||
|
fi
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
clean_core_files() {
|
clean_core_files() {
|
||||||
@@ -348,7 +356,7 @@ clean_core_files() {
|
|||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
local counter=0
|
local counter=0
|
||||||
if [ ! -f "$lf_core_log" ]; then
|
if [ ! -f "$lf_core_log" ]; then
|
||||||
touch "$lf_core_log"
|
touch "$lf_core_log"
|
||||||
fi
|
fi
|
||||||
|
|||||||
44
desktop-hostname.bash
Executable file
44
desktop-hostname.bash
Executable file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
Q='"'
|
||||||
|
A="'"
|
||||||
|
function set_background() {
|
||||||
|
gsettings set "org.mate.background" "$1" "$2"
|
||||||
|
}
|
||||||
|
|
||||||
|
SourceFile="/usr/share/backgrounds/mate/desktop/Ubuntu-Mate-Cold-no-logo.png"
|
||||||
|
DesktopFile="/home/lanforge/desktop.png"
|
||||||
|
my_hostname=`hostname`
|
||||||
|
my_os="[os]"
|
||||||
|
if [ -f /etc/os-release ]; then
|
||||||
|
my_os=`egrep '^VERSION=' /etc/os-release`
|
||||||
|
if [ ! -z "$my_os" ]; then
|
||||||
|
my_os="${my_os/VERSION=/}"
|
||||||
|
my_os="${my_os//\"/}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
my_inver="[lfver]"
|
||||||
|
if [ -f "/var/www/html/installed-ver.txt" ]; then
|
||||||
|
my_inver=`cat /var/www/html/installed-ver.txt`;
|
||||||
|
fi
|
||||||
|
my_kver=`uname -r`
|
||||||
|
my_dev=`ip ro sho | awk '/default via/{print $5}'`
|
||||||
|
my_ip=`ip a sho $my_dev | awk '/inet /{print $2}'`
|
||||||
|
my_mac=`ip a sho | grep -A1 "$my_dev" | awk '/ether /{print $2}'`
|
||||||
|
fill_color=${my_mac//:/}
|
||||||
|
fill_color=${fill_color:6:12}
|
||||||
|
X=220
|
||||||
|
Y=150
|
||||||
|
convert -pointsize 80 -fill "#$fill_color" -stroke black -strokewidth 1 \
|
||||||
|
-draw "text $X,$Y \"$my_hostname\"" \
|
||||||
|
-draw "text $X,$(( Y + 75 )) \"LANForge $my_inver\"" \
|
||||||
|
-draw "text $X,$(( Y + 155 )) \"Kernel $my_kver $my_os\"" \
|
||||||
|
-draw "text $X,$(( Y + 225 )) \"$my_dev $my_ip\"" \
|
||||||
|
-draw "text $X,$(( Y + 295 )) \"$my_mac\"" \
|
||||||
|
$SourceFile \
|
||||||
|
-scale 1600x900 \
|
||||||
|
$DesktopFile
|
||||||
|
|
||||||
|
set_background picture-filename ${A}${DesktopFile}${A}
|
||||||
|
set_background picture-options 'stretched'
|
||||||
|
#
|
||||||
0
lanforge_client/__init__.py
Normal file
0
lanforge_client/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
259
lanforge_client/logg.py
Normal file
259
lanforge_client/logg.py
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
if sys.version_info[0] != 3:
|
||||||
|
print("This script requires Python 3")
|
||||||
|
exit()
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from logging import Logger
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
import inspect
|
||||||
|
# import traceback
|
||||||
|
# from typing import Optional
|
||||||
|
from pprint import pprint # pformat
|
||||||
|
from .strutil import nott # iss
|
||||||
|
|
||||||
|
class Logg:
|
||||||
|
"""
|
||||||
|
This method presently defines various log "levels" but does not yet express
|
||||||
|
ability to log "areas" or "keywords".
|
||||||
|
|
||||||
|
TODO:
|
||||||
|
- LOG BUFFER a list that only holds last 100 lines logged to it. This is useful
|
||||||
|
for emitting when an exception happens in a loop and you are not interested
|
||||||
|
in the first 10e6 log entries
|
||||||
|
|
||||||
|
- KEYWORD LOGGING: pair a --debug_kw=keyword,keyword set on the command line to only
|
||||||
|
recieve log output from log statements matching those keywords
|
||||||
|
|
||||||
|
- CLASS/METHOD/FUNCTION logging: --debug_fn=class.method,module.func set on the command
|
||||||
|
line that activates logging in the method or function listed. See inspection techniques
|
||||||
|
listed near this SO question https://stackoverflow.com/a/5104943/11014343
|
||||||
|
|
||||||
|
- BITWISE LOG LEVELS: --log_level=DEBUG|FILEIO|JSON|HTTP a maskable combination of enum_bitmask
|
||||||
|
names that combine to a value that can trigger logging.
|
||||||
|
|
||||||
|
These reserved words may not be used as tags:
|
||||||
|
debug, debugging, debug_log, digest, file, gui, http, json, log, method, tag
|
||||||
|
|
||||||
|
Protocol logging levels:
|
||||||
|
* always: X-Errors( stops script on halt_on_errors)
|
||||||
|
* timeouts: can be configured as halt level errors
|
||||||
|
- digest (POST set_port / GET /ports )
|
||||||
|
- url (POST /cli-json/set_port / GET /port/1/2/3/?fields)
|
||||||
|
- json (POST /cli-json/set_port { a:b } ; GET /port/1/2/3?fields {results interfaces[]}
|
||||||
|
- http that plus X-Warnings and ALL headers
|
||||||
|
- gui Xtra debugging messages generated by LANforgeGUI
|
||||||
|
|
||||||
|
|
||||||
|
Please also consider how log messages can be formatted:
|
||||||
|
https://stackoverflow.com/a/20112491/11014343:
|
||||||
|
logging.basicConfig(format="[%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s")
|
||||||
|
"""
|
||||||
|
DEFAULT_LEVEL = logging.WARNING
|
||||||
|
DefaultLogger = logging.getLogger(__name__)
|
||||||
|
method_name_list: list = [] # list[str]
|
||||||
|
tag_list: list = [] # list[str]
|
||||||
|
reserved_tags: list = [ # list[str]
|
||||||
|
"debug",
|
||||||
|
"debugging",
|
||||||
|
"debug_log",
|
||||||
|
"digest",
|
||||||
|
"file",
|
||||||
|
"gui",
|
||||||
|
"http",
|
||||||
|
"json",
|
||||||
|
"log",
|
||||||
|
"method",
|
||||||
|
"tag"
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
log_level: int = DEFAULT_LEVEL,
|
||||||
|
name: str = None,
|
||||||
|
filename: str = None,
|
||||||
|
debug: bool = False):
|
||||||
|
"""----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
|
||||||
|
Base class that can be used to send logging messages elsewhere. extend this
|
||||||
|
in order to send log messages from this framework elsewhere.
|
||||||
|
----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----"""
|
||||||
|
|
||||||
|
self.level = log_level
|
||||||
|
self.logger: Logger
|
||||||
|
|
||||||
|
# self.start_time = datetime.now() # py 3.9 maybe?
|
||||||
|
self.start_time = datetime.datetime.now() # py 3.9 maybe?
|
||||||
|
self.start_time_str = time.strftime("%Y%m%d-%I:%M%:%S")
|
||||||
|
if name:
|
||||||
|
self.name = name
|
||||||
|
if "@" in name:
|
||||||
|
self.name = name.replace('@', self.start_time_str)
|
||||||
|
else:
|
||||||
|
self.name = "started-" + self.start_time_str
|
||||||
|
|
||||||
|
self.logger = Logger(name, level=log_level)
|
||||||
|
if filename:
|
||||||
|
logging.basicConfig(filename=filename, filemode="a")
|
||||||
|
if debug:
|
||||||
|
self.logg(level=logging.WARNING,
|
||||||
|
msg="Logger {name} begun to {filename}".format(name=name,
|
||||||
|
filename=filename))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def logg(cls,
|
||||||
|
level: int = logging.WARNING,
|
||||||
|
tag: str = None,
|
||||||
|
msg: str = None) -> None:
|
||||||
|
"""
|
||||||
|
Use this *class method* to send logs to the DefaultLogger instance created when this class was created
|
||||||
|
:param level:
|
||||||
|
:param msg:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if nott(msg):
|
||||||
|
return
|
||||||
|
if level == logging.CRITICAL:
|
||||||
|
cls.DefaultLogger.critical(msg)
|
||||||
|
return
|
||||||
|
if level == logging.ERROR:
|
||||||
|
cls.DefaultLogger.error(msg)
|
||||||
|
return
|
||||||
|
if level == logging.WARNING:
|
||||||
|
cls.DefaultLogger.warning(msg)
|
||||||
|
return
|
||||||
|
if level == logging.INFO:
|
||||||
|
cls.DefaultLogger.info(msg)
|
||||||
|
return
|
||||||
|
if level == logging.DEBUG:
|
||||||
|
cls.DefaultLogger.debug(msg)
|
||||||
|
return
|
||||||
|
|
||||||
|
def by_level(self,
|
||||||
|
level: int = logging.WARNING,
|
||||||
|
msg: str = None):
|
||||||
|
"""
|
||||||
|
Use this *instance* version of the method for logging when you have a specific logger
|
||||||
|
customized for a purpose. Otherwise please use Logg.logg().
|
||||||
|
:param level: python logging priority
|
||||||
|
:param msg: text to send to logging channel
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
if nott(msg):
|
||||||
|
return
|
||||||
|
|
||||||
|
if level == logging.CRITICAL:
|
||||||
|
self.logger.critical(msg)
|
||||||
|
return
|
||||||
|
|
||||||
|
if level == logging.ERROR:
|
||||||
|
self.logger.error(msg)
|
||||||
|
return
|
||||||
|
|
||||||
|
if level == logging.WARNING:
|
||||||
|
self.logger.warning(msg)
|
||||||
|
return
|
||||||
|
|
||||||
|
if level == logging.INFO:
|
||||||
|
self.logger.info(msg)
|
||||||
|
return
|
||||||
|
|
||||||
|
if level == logging.DEBUG:
|
||||||
|
self.logger.debug(msg)
|
||||||
|
return
|
||||||
|
print("UNKNOWN: " + msg)
|
||||||
|
|
||||||
|
def error(self, message: str = None):
|
||||||
|
if not message:
|
||||||
|
return
|
||||||
|
self.logg(level=logging.ERROR, msg=message)
|
||||||
|
|
||||||
|
def warning(self, message: str = None):
|
||||||
|
if not message:
|
||||||
|
return
|
||||||
|
self.logg(level=logging.WARNING, msg=message)
|
||||||
|
|
||||||
|
def info(self, message: str = None):
|
||||||
|
if not message:
|
||||||
|
return
|
||||||
|
self.logg(level=logging.INFO, msg=message)
|
||||||
|
|
||||||
|
def debug(self, message: str = None):
|
||||||
|
if not message:
|
||||||
|
return
|
||||||
|
self.logg(level=logging.DEBUG, msg=message)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register_method_name(cls, methodname: str = None) -> None:
|
||||||
|
"""
|
||||||
|
Use this method to register names of functions you want to allow logging from
|
||||||
|
:param methodname:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if not methodname:
|
||||||
|
return
|
||||||
|
cls.method_name_list.append(methodname)
|
||||||
|
if methodname not in cls.tag_list:
|
||||||
|
cls.tag_list.append(methodname)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register_tag(cls, tag: str = None) -> None:
|
||||||
|
"""
|
||||||
|
Use this method to register keywords you want to allow logging from.
|
||||||
|
There are a list of reserved tags which will not be accepted.
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if not tag:
|
||||||
|
return
|
||||||
|
if tag in cls.tag_list:
|
||||||
|
return
|
||||||
|
if tag in cls.reserved_tags:
|
||||||
|
cls.logg(level=logging.ERROR,
|
||||||
|
msg=f"tag [{tag}] is reserved, ignoring")
|
||||||
|
# note: add directly to tag_list to append a reserved tag
|
||||||
|
cls.tag_list.append(tag)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def by_method(cls, msg: str = None) -> None:
|
||||||
|
"""
|
||||||
|
should only log if we're in the method_list
|
||||||
|
reminder: https://stackoverflow.com/a/13514318/11014343
|
||||||
|
import inspect
|
||||||
|
import types
|
||||||
|
from typing import cast
|
||||||
|
this_fn_name = cat(types.FrameType, inspect.currentframe()).f_code.co_name
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
caller = inspect.currentframe().f_back.f_code.co_name
|
||||||
|
|
||||||
|
if caller in cls.method_name_list:
|
||||||
|
cls.logg(level=cls.DEFAULT_LEVEL, msg=f"[{caller}] {msg}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
pprint(e)
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def by_tag(cls, tag: str = None, msg: str = None) -> None:
|
||||||
|
"""
|
||||||
|
should only log if we're in the method_list
|
||||||
|
reminder: https://stackoverflow.com/a/13514318/11014343
|
||||||
|
import inspect
|
||||||
|
import types
|
||||||
|
from typing import cast
|
||||||
|
this_fn_name = cat(types.FrameType, inspect.currentframe()).f_code.co_name
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if (not cls.tag_list) or (tag not in cls.tag_list):
|
||||||
|
return
|
||||||
|
|
||||||
|
cls.logg(level=cls.DEFAULT_LEVEL, msg=f"[{tag}] {msg}")
|
||||||
|
|
||||||
|
def enable(self, reserved_tag: str = None) -> None:
|
||||||
|
if (not reserved_tag) or (reserved_tag not in self.reserved_tags):
|
||||||
|
return
|
||||||
|
if reserved_tag in self.tag_list:
|
||||||
|
return
|
||||||
|
self.tag_list.append(reserved_tag)
|
||||||
|
|
||||||
20
lanforge_client/strutil.py
Normal file
20
lanforge_client/strutil.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
def iss(text: str) -> bool:
|
||||||
|
"""
|
||||||
|
|
||||||
|
:param text: string to test
|
||||||
|
:return: true if text is at lease one non-whitespace character
|
||||||
|
"""
|
||||||
|
if text is None:
|
||||||
|
return False
|
||||||
|
if (len(text) == 0) or (text.strip() == ""):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def nott(text: str) -> bool:
|
||||||
|
"""
|
||||||
|
|
||||||
|
:param text:
|
||||||
|
:return: opposite of is
|
||||||
|
"""
|
||||||
|
return not iss(text=text)
|
||||||
2
pipupgrade.sh
Executable file
2
pipupgrade.sh
Executable file
@@ -0,0 +1,2 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
pip3 install --user -r requirements.txt --upgrade
|
||||||
@@ -29,8 +29,8 @@ RecordInflux = InfluxRequest.RecordInflux
|
|||||||
|
|
||||||
|
|
||||||
class CSVReader:
|
class CSVReader:
|
||||||
def read_csv(self,
|
@staticmethod
|
||||||
file,
|
def read_csv(file,
|
||||||
sep='\t'):
|
sep='\t'):
|
||||||
df = open(file).read().split('\n')
|
df = open(file).read().split('\n')
|
||||||
rows = list()
|
rows = list()
|
||||||
@@ -39,8 +39,8 @@ class CSVReader:
|
|||||||
rows.append(x.split(sep))
|
rows.append(x.split(sep))
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
def get_column(self,
|
@staticmethod
|
||||||
df,
|
def get_column(df,
|
||||||
value):
|
value):
|
||||||
index = df[0].index(value)
|
index = df[0].index(value)
|
||||||
values = []
|
values = []
|
||||||
@@ -48,7 +48,8 @@ class CSVReader:
|
|||||||
values.append(row[index])
|
values.append(row[index])
|
||||||
return values
|
return values
|
||||||
|
|
||||||
def get_columns(self, df, targets):
|
@staticmethod
|
||||||
|
def get_columns(df, targets):
|
||||||
target_index = []
|
target_index = []
|
||||||
for item in targets:
|
for item in targets:
|
||||||
target_index.append(df[0].index(item))
|
target_index.append(df[0].index(item))
|
||||||
@@ -60,7 +61,8 @@ class CSVReader:
|
|||||||
results.append(row_data)
|
results.append(row_data)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def to_html(self, df):
|
@staticmethod
|
||||||
|
def to_html(df):
|
||||||
html = ''
|
html = ''
|
||||||
html = html + ('<table style="border:1px solid #ddd">'
|
html = html + ('<table style="border:1px solid #ddd">'
|
||||||
'<colgroup>'
|
'<colgroup>'
|
||||||
@@ -78,7 +80,8 @@ class CSVReader:
|
|||||||
'</table>')
|
'</table>')
|
||||||
return html
|
return html
|
||||||
|
|
||||||
def filter_df(self, df, column, expression, target):
|
@staticmethod
|
||||||
|
def filter_df(df, column, expression, target):
|
||||||
target_index = df[0].index(column)
|
target_index = df[0].index(column)
|
||||||
counter = 0
|
counter = 0
|
||||||
targets = [0]
|
targets = [0]
|
||||||
@@ -98,7 +101,8 @@ class CSVReader:
|
|||||||
counter += 1
|
counter += 1
|
||||||
return list(map(df.__getitem__, targets))
|
return list(map(df.__getitem__, targets))
|
||||||
|
|
||||||
def concat(self, dfs):
|
@staticmethod
|
||||||
|
def concat(dfs):
|
||||||
return list(itertools.chain.from_iterable(dfs))
|
return list(itertools.chain.from_iterable(dfs))
|
||||||
|
|
||||||
|
|
||||||
@@ -204,7 +208,6 @@ class GhostRequest:
|
|||||||
|
|
||||||
def custom_post(self,
|
def custom_post(self,
|
||||||
folder,
|
folder,
|
||||||
authors,
|
|
||||||
title='custom'):
|
title='custom'):
|
||||||
self.upload_images(folder)
|
self.upload_images(folder)
|
||||||
head = '''This is a custom post created via a script'''
|
head = '''This is a custom post created via a script'''
|
||||||
@@ -215,11 +218,9 @@ class GhostRequest:
|
|||||||
text=head)
|
text=head)
|
||||||
|
|
||||||
def kpi_to_ghost(self,
|
def kpi_to_ghost(self,
|
||||||
authors,
|
|
||||||
folders,
|
folders,
|
||||||
parent_folder=None,
|
parent_folder=None,
|
||||||
title=None,
|
title=None,
|
||||||
server_pull=None,
|
|
||||||
ghost_host=None,
|
ghost_host=None,
|
||||||
port=22,
|
port=22,
|
||||||
user_push=None,
|
user_push=None,
|
||||||
@@ -227,13 +228,12 @@ class GhostRequest:
|
|||||||
customer=None,
|
customer=None,
|
||||||
testbed=None,
|
testbed=None,
|
||||||
test_run=None,
|
test_run=None,
|
||||||
target_folders=list(),
|
target_folders=None,
|
||||||
grafana_token=None,
|
grafana_token=None,
|
||||||
grafana_host=None,
|
grafana_host=None,
|
||||||
grafana_port=3000,
|
grafana_port=3000,
|
||||||
grafana_datasource='InfluxDB',
|
grafana_datasource='InfluxDB',
|
||||||
grafana_bucket=None):
|
grafana_bucket=None):
|
||||||
global dut_hw, dut_sw, dut_model, dut_serial
|
|
||||||
|
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
|
|
||||||
@@ -440,7 +440,7 @@ class GhostRequest:
|
|||||||
# create Grafana Dashboard
|
# create Grafana Dashboard
|
||||||
target_files = []
|
target_files = []
|
||||||
for folder in target_folders:
|
for folder in target_folders:
|
||||||
target_file=folder.split('/')[-1] + '/kpi.csv'
|
target_file = folder.split('/')[-1] + '/kpi.csv'
|
||||||
try:
|
try:
|
||||||
open(target_file)
|
open(target_file)
|
||||||
target_files.append(target_file)
|
target_files.append(target_file)
|
||||||
@@ -502,7 +502,8 @@ class GhostRequest:
|
|||||||
Influx Host: %s<br />
|
Influx Host: %s<br />
|
||||||
Influx Port: %s<br />
|
Influx Port: %s<br />
|
||||||
Influx Organization: %s<br />
|
Influx Organization: %s<br />
|
||||||
Influx Bucket: %s<br />''' % (influx_error, self.influx_host, self.influx_port, self.influx_org, self.influx_bucket)
|
Influx Bucket: %s<br />''' % (
|
||||||
|
influx_error, self.influx_host, self.influx_port, self.influx_org, self.influx_bucket)
|
||||||
|
|
||||||
raw_test_tags = list()
|
raw_test_tags = list()
|
||||||
test_tag_table = ''
|
test_tag_table = ''
|
||||||
@@ -524,8 +525,8 @@ class GhostRequest:
|
|||||||
else:
|
else:
|
||||||
column_name = column
|
column_name = column
|
||||||
dut_table_columns += (
|
dut_table_columns += (
|
||||||
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td><td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' % (
|
'<tr><td style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td><td colspan="3" style="border-color: gray; border-style: solid; border-width: 1px; ">%s</td></tr>' %
|
||||||
column_name, duts[column])
|
(column_name, duts[column])
|
||||||
)
|
)
|
||||||
|
|
||||||
dut_table = '<table width="700px" border="1" cellpadding="2" cellspacing="0" ' \
|
dut_table = '<table width="700px" border="1" cellpadding="2" cellspacing="0" ' \
|
||||||
|
|||||||
@@ -3,9 +3,10 @@
|
|||||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||||
# Class holds default settings for json requests -
|
# Class holds default settings for json requests -
|
||||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||||
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import importlib
|
from pprint import pformat, PrettyPrinter
|
||||||
import urllib
|
import urllib
|
||||||
from urllib import request
|
from urllib import request
|
||||||
import json
|
import json
|
||||||
@@ -14,19 +15,19 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit()
|
exit()
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
|
||||||
|
|
||||||
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
debug_printer = PrettyPrinter(indent=2)
|
||||||
|
|
||||||
|
|
||||||
class LFRequest:
|
class LFRequest:
|
||||||
Default_Base_URL = "http://localhost:8080"
|
Default_Base_URL = "http://localhost:8080"
|
||||||
No_Data = {'No Data':0}
|
No_Data = {'No Data': 0}
|
||||||
requested_url = ""
|
requested_url = ""
|
||||||
post_data = No_Data
|
post_data = No_Data
|
||||||
default_headers = { 'Accept': 'application/json'}
|
default_headers = {'Accept': 'application/json'}
|
||||||
proxies = None
|
proxies = None
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def __init__(self, url=None,
|
def __init__(self, url=None,
|
||||||
uri=None,
|
uri=None,
|
||||||
@@ -62,7 +63,7 @@ class LFRequest:
|
|||||||
# pprint.pprint(self.proxies)
|
# pprint.pprint(self.proxies)
|
||||||
|
|
||||||
if not url.startswith("http://") and not url.startswith("https://"):
|
if not url.startswith("http://") and not url.startswith("https://"):
|
||||||
print("No http:// or https:// found, prepending http:// to "+url)
|
self.logger.warning("No http:// or https:// found, prepending http:// to " + url)
|
||||||
url = "http://" + url
|
url = "http://" + url
|
||||||
if uri is not None:
|
if uri is not None:
|
||||||
if not url.endswith('/') and not uri.startswith('/'):
|
if not url.endswith('/') and not uri.startswith('/'):
|
||||||
@@ -76,16 +77,16 @@ class LFRequest:
|
|||||||
|
|
||||||
if self.requested_url.find('//'):
|
if self.requested_url.find('//'):
|
||||||
protopos = self.requested_url.find("://")
|
protopos = self.requested_url.find("://")
|
||||||
self.requested_url = self.requested_url[:protopos + 2] + self.requested_url[protopos + 2:].replace("//", "/")
|
self.requested_url = self.requested_url[:protopos + 2] + self.requested_url[protopos + 2:].replace("//",
|
||||||
|
"/")
|
||||||
|
|
||||||
# finding '#' prolly indicates a macvlan (eth1#0)
|
# finding '#' prolly indicates a macvlan (eth1#0)
|
||||||
# finding ' ' prolly indicates a field name that should imply %20
|
# finding ' ' prolly indicates a field name that should imply %20
|
||||||
if (self.requested_url.find('#') >= 1):
|
if self.requested_url.find('#') >= 1:
|
||||||
self.requested_url = self.requested_url.replace('#', '%23')
|
self.requested_url = self.requested_url.replace('#', '%23')
|
||||||
if (self.requested_url.find(' ') >= 1):
|
if self.requested_url.find(' ') >= 1:
|
||||||
self.requested_url = self.requested_url.replace(' ', '+')
|
self.requested_url = self.requested_url.replace(' ', '+')
|
||||||
if self.debug:
|
self.logger.debug("new LFRequest[%s]" % self.requested_url)
|
||||||
print("new LFRequest[%s]" % self.requested_url )
|
|
||||||
|
|
||||||
# request first url on stack
|
# request first url on stack
|
||||||
def formPost(self, show_error=True, debug=False, die_on_error_=False):
|
def formPost(self, show_error=True, debug=False, die_on_error_=False):
|
||||||
@@ -94,7 +95,7 @@ class LFRequest:
|
|||||||
def form_post(self, show_error=True, debug=False, die_on_error_=False):
|
def form_post(self, show_error=True, debug=False, die_on_error_=False):
|
||||||
if self.die_on_error:
|
if self.die_on_error:
|
||||||
die_on_error_ = True
|
die_on_error_ = True
|
||||||
if (debug == False) and (self.debug == True):
|
if not debug and self.debug:
|
||||||
debug = True
|
debug = True
|
||||||
responses = []
|
responses = []
|
||||||
urlenc_data = ""
|
urlenc_data = ""
|
||||||
@@ -104,20 +105,17 @@ class LFRequest:
|
|||||||
opener = request.build_opener(request.ProxyHandler(self.proxies))
|
opener = request.build_opener(request.ProxyHandler(self.proxies))
|
||||||
request.install_opener(opener)
|
request.install_opener(opener)
|
||||||
|
|
||||||
|
self.logger.debug("formPost: url: " + self.requested_url)
|
||||||
if (debug):
|
if (self.post_data is not None) and (self.post_data is not self.No_Data):
|
||||||
print("formPost: url: "+self.requested_url)
|
|
||||||
if ((self.post_data != None) and (self.post_data is not self.No_Data)):
|
|
||||||
urlenc_data = urllib.parse.urlencode(self.post_data).encode("utf-8")
|
urlenc_data = urllib.parse.urlencode(self.post_data).encode("utf-8")
|
||||||
if (debug):
|
self.logger.debug("formPost: data looks like:" + str(urlenc_data))
|
||||||
print("formPost: data looks like:" + str(urlenc_data))
|
self.logger.debug("formPost: url: " + self.requested_url)
|
||||||
print("formPost: url: "+self.requested_url)
|
|
||||||
myrequest = request.Request(url=self.requested_url,
|
myrequest = request.Request(url=self.requested_url,
|
||||||
data=urlenc_data,
|
data=urlenc_data,
|
||||||
headers=self.default_headers)
|
headers=self.default_headers)
|
||||||
else:
|
else:
|
||||||
myrequest = request.Request(url=self.requested_url, headers=self.default_headers)
|
myrequest = request.Request(url=self.requested_url, headers=self.default_headers)
|
||||||
print("No data for this formPost?")
|
self.logger.error("No data for this formPost?")
|
||||||
|
|
||||||
myrequest.headers['Content-type'] = 'application/x-www-form-urlencoded'
|
myrequest.headers['Content-type'] = 'application/x-www-form-urlencoded'
|
||||||
|
|
||||||
@@ -143,15 +141,16 @@ class LFRequest:
|
|||||||
error_list_=self.error_list,
|
error_list_=self.error_list,
|
||||||
debug_=debug)
|
debug_=debug)
|
||||||
|
|
||||||
if (die_on_error_ == True) or (self.die_on_error == True):
|
if die_on_error_ or self.die_on_error:
|
||||||
exit(1)
|
exit(1)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def jsonPost(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None):
|
def jsonPost(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None):
|
||||||
return self.json_post(show_error=show_error, debug=debug, die_on_error_=die_on_error_, response_json_list_=response_json_list_)
|
return self.json_post(show_error=show_error, debug=debug, die_on_error_=die_on_error_,
|
||||||
|
response_json_list_=response_json_list_)
|
||||||
|
|
||||||
def json_post(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None, method_='POST'):
|
def json_post(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None, method_='POST'):
|
||||||
if (debug == False) and (self.debug == True):
|
if not debug and self.debug:
|
||||||
debug = True
|
debug = True
|
||||||
if self.die_on_error:
|
if self.die_on_error:
|
||||||
die_on_error_ = True
|
die_on_error_ = True
|
||||||
@@ -160,14 +159,14 @@ class LFRequest:
|
|||||||
opener = urllib.request.build_opener(request.ProxyHandler(self.proxies))
|
opener = urllib.request.build_opener(request.ProxyHandler(self.proxies))
|
||||||
urllib.request.install_opener(opener)
|
urllib.request.install_opener(opener)
|
||||||
|
|
||||||
if ((self.post_data != None) and (self.post_data is not self.No_Data)):
|
if (self.post_data is not None) and (self.post_data is not self.No_Data):
|
||||||
myrequest = request.Request(url=self.requested_url,
|
myrequest = request.Request(url=self.requested_url,
|
||||||
method=method_,
|
method=method_,
|
||||||
data=json.dumps(self.post_data).encode("utf-8"),
|
data=json.dumps(self.post_data).encode("utf-8"),
|
||||||
headers=self.default_headers)
|
headers=self.default_headers)
|
||||||
else:
|
else:
|
||||||
myrequest = request.Request(url=self.requested_url, headers=self.default_headers)
|
myrequest = request.Request(url=self.requested_url, headers=self.default_headers)
|
||||||
print("No data for this jsonPost?")
|
self.logger.error("No data for this jsonPost?")
|
||||||
|
|
||||||
myrequest.headers['Content-type'] = 'application/json'
|
myrequest.headers['Content-type'] = 'application/json'
|
||||||
|
|
||||||
@@ -176,23 +175,24 @@ class LFRequest:
|
|||||||
try:
|
try:
|
||||||
resp = urllib.request.urlopen(myrequest)
|
resp = urllib.request.urlopen(myrequest)
|
||||||
resp_data = resp.read().decode('utf-8')
|
resp_data = resp.read().decode('utf-8')
|
||||||
if (debug and die_on_error_):
|
if debug and die_on_error_:
|
||||||
print("----- LFRequest::json_post:128 debug: --------------------------------------------")
|
self.logger.debug("----- LFRequest::json_post:128 debug: --------------------------------------------")
|
||||||
print("URL: %s :%d "% (self.requested_url, resp.status))
|
self.logger.debug("URL: <%s> status: %d " % (self.requested_url, resp.status))
|
||||||
if resp.status != 200:
|
if resp.status != 200:
|
||||||
LFUtils.debug_printer.pprint(resp.getheaders())
|
self.logger.debug(pformat(resp.getheaders()))
|
||||||
print("----- resp_data:128 -------------------------------------------------")
|
self.logger.debug("----- resp_data:128 -------------------------------------------------")
|
||||||
print(resp_data)
|
self.logger.debug(resp_data)
|
||||||
print("-------------------------------------------------")
|
self.logger.debug("-------------------------------------------------")
|
||||||
responses.append(resp)
|
responses.append(resp)
|
||||||
if response_json_list_ is not None:
|
if response_json_list_ is not None:
|
||||||
if type(response_json_list_) is not list:
|
if type(response_json_list_) is not list:
|
||||||
raise ValueError("reponse_json_list_ needs to be type list")
|
raise ValueError("reponse_json_list_ needs to be type list")
|
||||||
j = json.loads(resp_data)
|
j = json.loads(resp_data)
|
||||||
if debug:
|
if debug:
|
||||||
print("----- LFRequest::json_post:140 debug: --------------------------------------------")
|
self.logger.debug(
|
||||||
LFUtils.debug_printer.pprint(j)
|
"----- LFRequest::json_post:140 debug: --------------------------------------------")
|
||||||
print("-------------------------------------------------")
|
self.logger.debug(pformat(j))
|
||||||
|
self.logger.debug("-------------------------------------------------")
|
||||||
response_json_list_.append(j)
|
response_json_list_.append(j)
|
||||||
return responses[0]
|
return responses[0]
|
||||||
|
|
||||||
@@ -210,39 +210,33 @@ class LFRequest:
|
|||||||
error_=uerror,
|
error_=uerror,
|
||||||
debug_=debug)
|
debug_=debug)
|
||||||
|
|
||||||
if die_on_error_ == True:
|
if die_on_error_:
|
||||||
exit(1)
|
exit(1)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def json_put(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None):
|
def json_put(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None):
|
||||||
return self.json_post(show_error=show_error,
|
return self.json_post(show_error=show_error,
|
||||||
debug=debug,
|
debug=debug,
|
||||||
die_on_error_=die_on_error_,
|
die_on_error_=die_on_error_,
|
||||||
response_json_list_=response_json_list_,
|
response_json_list_=response_json_list_,
|
||||||
method_='PUT')
|
method_='PUT')
|
||||||
|
|
||||||
def json_delete(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None):
|
def json_delete(self, show_error=True, debug=False, die_on_error_=False, response_json_list_=None):
|
||||||
return self.get_as_json(debug_=debug,
|
return self.get_as_json(method_='DELETE')
|
||||||
die_on_error_=die_on_error_,
|
|
||||||
method_='DELETE')
|
|
||||||
|
|
||||||
def get(self, debug=False, die_on_error_=False, method_='GET'):
|
def get(self, method_='GET'):
|
||||||
if self.debug == True:
|
if self.debug:
|
||||||
debug = True
|
self.logger.debug("LFUtils.get: url: " + self.requested_url)
|
||||||
if self.die_on_error == True:
|
|
||||||
die_on_error_ = True
|
|
||||||
if debug:
|
|
||||||
print("LFUtils.get: url: "+self.requested_url)
|
|
||||||
|
|
||||||
# https://stackoverflow.com/a/59635684/11014343
|
# https://stackoverflow.com/a/59635684/11014343
|
||||||
if (self.proxies is not None) and (len(self.proxies) > 0):
|
if (self.proxies is not None) and (len(self.proxies) > 0):
|
||||||
opener = request.build_opener(request.ProxyHandler(self.proxies))
|
opener = request.build_opener(request.ProxyHandler(self.proxies))
|
||||||
#opener = urllib.request.build_opener(myrequest.ProxyHandler(self.proxies))
|
# opener = urllib.request.build_opener(myrequest.ProxyHandler(self.proxies))
|
||||||
request.install_opener(opener)
|
request.install_opener(opener)
|
||||||
|
|
||||||
myrequest = request.Request(url=self.requested_url,
|
myrequest = request.Request(url=self.requested_url,
|
||||||
headers=self.default_headers,
|
headers=self.default_headers,
|
||||||
method=method_)
|
method=method_)
|
||||||
myresponses = []
|
myresponses = []
|
||||||
try:
|
try:
|
||||||
myresponses.append(request.urlopen(myrequest))
|
myresponses.append(request.urlopen(myrequest))
|
||||||
@@ -254,7 +248,7 @@ class LFRequest:
|
|||||||
responses_=myresponses,
|
responses_=myresponses,
|
||||||
error_=error,
|
error_=error,
|
||||||
error_list_=self.error_list,
|
error_list_=self.error_list,
|
||||||
debug_=debug)
|
debug_=self.debug)
|
||||||
|
|
||||||
except urllib.error.URLError as uerror:
|
except urllib.error.URLError as uerror:
|
||||||
print_diagnostics(url_=self.requested_url,
|
print_diagnostics(url_=self.requested_url,
|
||||||
@@ -262,26 +256,24 @@ class LFRequest:
|
|||||||
responses_=myresponses,
|
responses_=myresponses,
|
||||||
error_=uerror,
|
error_=uerror,
|
||||||
error_list_=self.error_list,
|
error_list_=self.error_list,
|
||||||
debug_=debug)
|
debug_=self.debug)
|
||||||
|
|
||||||
if die_on_error_ == True:
|
if self.die_on_error:
|
||||||
exit(1)
|
exit(1)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def getAsJson(self, die_on_error_=False, debug_=False):
|
def getAsJson(self):
|
||||||
return self.get_as_json(die_on_error_=die_on_error_, debug_=debug_)
|
return self.get_as_json()
|
||||||
|
|
||||||
def get_as_json(self, die_on_error_=False, debug_=False, method_='GET'):
|
def get_as_json(self, method_='GET'):
|
||||||
responses = []
|
responses = list()
|
||||||
j = self.get(debug=debug_, die_on_error_=die_on_error_, method_=method_)
|
responses.append(self.get(method_=method_))
|
||||||
responses.append(j)
|
|
||||||
if len(responses) < 1:
|
if len(responses) < 1:
|
||||||
if debug_ and self.has_errors():
|
if self.debug and self.has_errors():
|
||||||
self.print_errors()
|
self.print_errors()
|
||||||
return None
|
return None
|
||||||
if responses[0] == None:
|
if responses[0] is None:
|
||||||
if debug_:
|
self.logger.debug("No response from " + self.requested_url)
|
||||||
print("No response from "+self.requested_url)
|
|
||||||
return None
|
return None
|
||||||
json_data = json.loads(responses[0].read().decode('utf-8'))
|
json_data = json.loads(responses[0].read().decode('utf-8'))
|
||||||
return json_data
|
return json_data
|
||||||
@@ -298,14 +290,15 @@ class LFRequest:
|
|||||||
self.post_data = data
|
self.post_data = data
|
||||||
|
|
||||||
def has_errors(self):
|
def has_errors(self):
|
||||||
return (True, False)[len(self.error_list)>0]
|
return (True, False)[len(self.error_list) > 0]
|
||||||
|
|
||||||
def print_errors(self):
|
def print_errors(self):
|
||||||
if not self.has_errors:
|
if not self.has_errors:
|
||||||
print("---------- no errors ----------")
|
self.logger.debug("---------- no errors ----------")
|
||||||
return
|
return
|
||||||
for err in self.error_list:
|
for err in self.error_list:
|
||||||
print("error: %s" % err)
|
self.logger.error("error: %s" % err)
|
||||||
|
|
||||||
|
|
||||||
def plain_get(url_=None, debug_=False, die_on_error_=False, proxies_=None):
|
def plain_get(url_=None, debug_=False, die_on_error_=False, proxies_=None):
|
||||||
"""
|
"""
|
||||||
@@ -340,25 +333,25 @@ def plain_get(url_=None, debug_=False, die_on_error_=False, proxies_=None):
|
|||||||
error_=uerror,
|
error_=uerror,
|
||||||
debug_=debug_)
|
debug_=debug_)
|
||||||
|
|
||||||
if die_on_error_ == True:
|
if die_on_error_:
|
||||||
exit(1)
|
exit(1)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def print_diagnostics(url_=None, request_=None, responses_=None, error_=None, error_list_=None, debug_=False):
|
def print_diagnostics(url_=None, request_=None, responses_=None, error_=None, error_list_=None, debug_=False):
|
||||||
if debug_:
|
logger = logging.getLogger(__name__)
|
||||||
print("LFRequest::print_diagnostics: error_.__class__: %s"%error_.__class__)
|
# logger.error("LFRequest::print_diagnostics: error_.__class__: %s"%error_.__class__)
|
||||||
LFUtils.debug_printer.pprint(error_)
|
# logger.error(pformat(error_))
|
||||||
|
|
||||||
if url_ is None:
|
if url_ is None:
|
||||||
print("WARNING LFRequest::print_diagnostics: url_ is None")
|
logger.warning("WARNING LFRequest::print_diagnostics: url_ is None")
|
||||||
if request_ is None:
|
if request_ is None:
|
||||||
print("WARNING LFRequest::print_diagnostics: request_ is None")
|
logger.warning("WARNING LFRequest::print_diagnostics: request_ is None")
|
||||||
if error_ is None:
|
if error_ is None:
|
||||||
print("WARNING LFRequest::print_diagnostics: error_ is None")
|
logger.warning("WARNING LFRequest::print_diagnostics: error_ is None")
|
||||||
|
|
||||||
method = 'NA'
|
method = 'NA'
|
||||||
if (hasattr(request_, 'method')):
|
if hasattr(request_, 'method'):
|
||||||
method = request_.method
|
method = request_.method
|
||||||
err_code = 0
|
err_code = 0
|
||||||
err_reason = 'NA'
|
err_reason = 'NA'
|
||||||
@@ -376,52 +369,52 @@ def print_diagnostics(url_=None, request_=None, responses_=None, error_=None, er
|
|||||||
if err_code == 404:
|
if err_code == 404:
|
||||||
xerrors.append("[%s HTTP %s] <%s> : %s" % (method, err_code, err_full_url, err_reason))
|
xerrors.append("[%s HTTP %s] <%s> : %s" % (method, err_code, err_full_url, err_reason))
|
||||||
else:
|
else:
|
||||||
if (len(err_headers) > 0):
|
if len(err_headers) > 0:
|
||||||
for headername in sorted(err_headers.keys()):
|
for headername in sorted(err_headers.keys()):
|
||||||
if headername.startswith("X-Error-"):
|
if headername.startswith("X-Error-"):
|
||||||
xerrors.append("%s: %s" % (headername, err_headers.get(headername)))
|
xerrors.append("%s: %s" % (headername, err_headers.get(headername)))
|
||||||
if len(xerrors) > 0:
|
if len(xerrors) > 0:
|
||||||
print(" = = LANforge Error Messages = =")
|
logger.error(" = = LANforge Error Messages = =")
|
||||||
|
logger.error(" = = URL: %s" % err_full_url)
|
||||||
for xerr in xerrors:
|
for xerr in xerrors:
|
||||||
print(xerr)
|
logger.error(xerr)
|
||||||
if (error_list_ is not None) and isinstance(error_list_, list):
|
if (error_list_ is not None) and isinstance(error_list_, list):
|
||||||
error_list_.append(xerr)
|
error_list_.append(xerr)
|
||||||
print(" = = = = = = = = = = = = = = = =")
|
logger.error(" = = = = = = = = = = = = = = = =")
|
||||||
|
|
||||||
if (error_.__class__ is urllib.error.HTTPError):
|
if error_.__class__ is urllib.error.HTTPError:
|
||||||
if debug_:
|
logger.debug("----- LFRequest: HTTPError: --------------------------------------------")
|
||||||
print("----- LFRequest: HTTPError: --------------------------------------------")
|
logger.debug("%s <%s> HTTP %s: %s" % (method, err_full_url, err_code, err_reason))
|
||||||
print("%s <%s> HTTP %s: %s" % (method, err_full_url, err_code, err_reason))
|
|
||||||
|
|
||||||
if err_code == 404:
|
if err_code == 404:
|
||||||
if (error_list_ is not None) and isinstance(error_list_, list):
|
if (error_list_ is not None) and isinstance(error_list_, list):
|
||||||
error_list_.append("[%s HTTP %s] <%s> : %s" % (method, err_code, err_full_url, err_reason))
|
error_list_.append("[%s HTTP %s] <%s> : %s" % (method, err_code, err_full_url, err_reason))
|
||||||
else:
|
else:
|
||||||
if debug_:
|
logger.debug(
|
||||||
print(" Content-type:[%s] Accept[%s]" % (request_.get_header('Content-type'), request_.get_header('Accept')))
|
" Content-type:[%s] Accept[%s]" % (request_.get_header('Content-type'), request_.get_header('Accept')))
|
||||||
|
|
||||||
if hasattr(request_, "data") and (request_.data is not None):
|
if hasattr(request_, "data") and (request_.data is not None):
|
||||||
print(" Data:")
|
logger.debug(" Data:")
|
||||||
LFUtils.debug_printer.pprint(request_.data)
|
logger.debug(debug_printer.pformat(request_.data))
|
||||||
elif debug_:
|
elif debug_:
|
||||||
print(" <no request data>")
|
logger.debug(" <no request data>")
|
||||||
|
|
||||||
if debug_ and (len(err_headers) > 0):
|
if len(err_headers) > 0:
|
||||||
# the HTTPError is of type HTTPMessage a subclass of email.message
|
# the HTTPError is of type HTTPMessage a subclass of email.message
|
||||||
print(" Response Headers: ")
|
logger.debug(" Response Headers: ")
|
||||||
for headername in sorted(err_headers.keys()):
|
for headername in sorted(err_headers.keys()):
|
||||||
print(" %s: %s" % (headername, err_headers.get(headername)))
|
logger.debug(" %s: %s" % (headername, err_headers.get(headername)))
|
||||||
|
|
||||||
if len(responses_) > 0:
|
if len(responses_) > 0:
|
||||||
print("----- Response: --------------------------------------------------------")
|
logger.debug("----- Response: --------------------------------------------------------")
|
||||||
LFUtils.debug_printer.pprint(responses_[0].reason)
|
logger.debug(debug_printer.pformat(responses_[0].reason))
|
||||||
if debug_:
|
|
||||||
print("------------------------------------------------------------------------")
|
logger.debug("------------------------------------------------------------------------")
|
||||||
return
|
return
|
||||||
|
|
||||||
if (error_.__class__ is urllib.error.URLError):
|
if error_.__class__ is urllib.error.URLError:
|
||||||
print("----- LFRequest: URLError: ---------------------------------------------")
|
logger.error("----- LFRequest: URLError: ---------------------------------------------")
|
||||||
print("%s <%s> HTTP %s: %s" % (method, err_full_url, err_code, err_reason))
|
logger.error("%s <%s> HTTP %s: %s" % (method, err_full_url, err_code, err_reason))
|
||||||
print("------------------------------------------------------------------------")
|
logger.error("------------------------------------------------------------------------")
|
||||||
|
|
||||||
# ~LFRequest
|
# ~LFRequest
|
||||||
|
|||||||
@@ -12,15 +12,16 @@ from time import sleep
|
|||||||
from random import seed, randint
|
from random import seed, randint
|
||||||
import re
|
import re
|
||||||
import ipaddress
|
import ipaddress
|
||||||
|
import logging
|
||||||
|
|
||||||
if sys.version_info[0] != 3:
|
if sys.version_info[0] != 3:
|
||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit()
|
exit()
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
|
||||||
|
|
||||||
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
|
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
|
||||||
|
Logg = importlib.import_module("lanforge_client.logg") # .Logg
|
||||||
|
|
||||||
debug_printer = pprint.PrettyPrinter(indent=2)
|
debug_printer = pprint.PrettyPrinter(indent=2)
|
||||||
|
|
||||||
@@ -30,7 +31,8 @@ ADD_STA_FLAGS_DOWN_WPA2 = 68719477760
|
|||||||
REPORT_TIMER_MS_FAST = 1500
|
REPORT_TIMER_MS_FAST = 1500
|
||||||
REPORT_TIMER_MS_SLOW = 3000
|
REPORT_TIMER_MS_SLOW = 3000
|
||||||
|
|
||||||
#Used for Speed
|
|
||||||
|
# Used for Speed
|
||||||
def parse_size_bps(size_val):
|
def parse_size_bps(size_val):
|
||||||
if isinstance(size_val, str):
|
if isinstance(size_val, str):
|
||||||
size_val.upper()
|
size_val.upper()
|
||||||
@@ -52,7 +54,8 @@ def parse_size_bps(size_val):
|
|||||||
else:
|
else:
|
||||||
return size_val
|
return size_val
|
||||||
|
|
||||||
#Used for Size of file
|
|
||||||
|
# Used for Size of file
|
||||||
def parse_size(size_val):
|
def parse_size(size_val):
|
||||||
if isinstance(size_val, str):
|
if isinstance(size_val, str):
|
||||||
size_val.upper()
|
size_val.upper()
|
||||||
@@ -80,22 +83,14 @@ class PortEID:
|
|||||||
port_id = 0
|
port_id = 0
|
||||||
port_name = ""
|
port_name = ""
|
||||||
|
|
||||||
def __init__(self, p_resource=1, p_port_id=0, p_port_name=""):
|
|
||||||
resource = p_resource
|
|
||||||
port_id = p_port_id
|
|
||||||
port_name = p_port_name
|
|
||||||
|
|
||||||
def __init__(self, json_response):
|
def __init__(self, json_response):
|
||||||
if json_response == None:
|
if json_response is None:
|
||||||
raise Exception("No json input")
|
raise Exception("No json input")
|
||||||
json_s = json_response
|
json_s = json_response
|
||||||
if json_response['interface'] != None:
|
if json_response['interface'] is not None:
|
||||||
json_s = json_response['interface']
|
json_s = json_response['interface']
|
||||||
|
|
||||||
debug_printer(json_s)
|
debug_printer(json_s)
|
||||||
resource = json_s['resource']
|
|
||||||
port_id = json_s['id']
|
|
||||||
port_name = json_s['name']
|
|
||||||
|
|
||||||
|
|
||||||
# end class PortEID
|
# end class PortEID
|
||||||
@@ -103,6 +98,7 @@ class PortEID:
|
|||||||
def staNewDownStaRequest(sta_name, resource_id=1, radio="wiphy0", ssid="", passphrase="", debug_on=False):
|
def staNewDownStaRequest(sta_name, resource_id=1, radio="wiphy0", ssid="", passphrase="", debug_on=False):
|
||||||
return sta_new_down_sta_request(sta_name, resource_id, radio, ssid, passphrase, debug_on)
|
return sta_new_down_sta_request(sta_name, resource_id, radio, ssid, passphrase, debug_on)
|
||||||
|
|
||||||
|
|
||||||
def sta_new_down_sta_request(sta_name, resource_id=1, radio="wiphy0", ssid="", passphrase="", debug_on=False):
|
def sta_new_down_sta_request(sta_name, resource_id=1, radio="wiphy0", ssid="", passphrase="", debug_on=False):
|
||||||
"""
|
"""
|
||||||
For use with add_sta. If you don't want to generate mac addresses via patterns (xx:xx:xx:xx:81:*)
|
For use with add_sta. If you don't want to generate mac addresses via patterns (xx:xx:xx:xx:81:*)
|
||||||
@@ -132,6 +128,7 @@ def sta_new_down_sta_request(sta_name, resource_id=1, radio="wiphy0", ssid="", p
|
|||||||
def portSetDhcpDownRequest(resource_id, port_name, debug_on=False):
|
def portSetDhcpDownRequest(resource_id, port_name, debug_on=False):
|
||||||
return port_set_dhcp_down_request(resource_id, port_name, debug_on)
|
return port_set_dhcp_down_request(resource_id, port_name, debug_on)
|
||||||
|
|
||||||
|
|
||||||
def port_set_dhcp_down_request(resource_id, port_name, debug_on=False):
|
def port_set_dhcp_down_request(resource_id, port_name, debug_on=False):
|
||||||
"""
|
"""
|
||||||
See http://localhost:8080/help/set_port
|
See http://localhost:8080/help/set_port
|
||||||
@@ -156,6 +153,7 @@ def port_set_dhcp_down_request(resource_id, port_name, debug_on=False):
|
|||||||
def portDhcpUpRequest(resource_id, port_name, debug_on=False):
|
def portDhcpUpRequest(resource_id, port_name, debug_on=False):
|
||||||
return port_dhcp_up_request(resource_id, port_name, debug_on)
|
return port_dhcp_up_request(resource_id, port_name, debug_on)
|
||||||
|
|
||||||
|
|
||||||
def port_dhcp_up_request(resource_id, port_name, debug_on=False):
|
def port_dhcp_up_request(resource_id, port_name, debug_on=False):
|
||||||
"""
|
"""
|
||||||
See http://localhost:8080/help/set_port
|
See http://localhost:8080/help/set_port
|
||||||
@@ -181,6 +179,7 @@ def port_dhcp_up_request(resource_id, port_name, debug_on=False):
|
|||||||
def portUpRequest(resource_id, port_name, debug_on=False):
|
def portUpRequest(resource_id, port_name, debug_on=False):
|
||||||
return port_up_request(resource_id, port_name, debug_on)
|
return port_up_request(resource_id, port_name, debug_on)
|
||||||
|
|
||||||
|
|
||||||
def port_up_request(resource_id, port_name, debug_on=False):
|
def port_up_request(resource_id, port_name, debug_on=False):
|
||||||
"""
|
"""
|
||||||
See http://localhost:8080/help/set_port
|
See http://localhost:8080/help/set_port
|
||||||
@@ -201,9 +200,11 @@ def port_up_request(resource_id, port_name, debug_on=False):
|
|||||||
debug_printer.pprint(data)
|
debug_printer.pprint(data)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def portDownRequest(resource_id, port_name, debug_on=False):
|
def portDownRequest(resource_id, port_name, debug_on=False):
|
||||||
return port_down_request(resource_id, port_name, debug_on)
|
return port_down_request(resource_id, port_name, debug_on)
|
||||||
|
|
||||||
|
|
||||||
def port_down_request(resource_id, port_name, debug_on=False):
|
def port_down_request(resource_id, port_name, debug_on=False):
|
||||||
"""
|
"""
|
||||||
Does not change the use_dhcp flag
|
Does not change the use_dhcp flag
|
||||||
@@ -212,7 +213,7 @@ def port_down_request(resource_id, port_name, debug_on=False):
|
|||||||
:param port_name:
|
:param port_name:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"shelf": 1,
|
"shelf": 1,
|
||||||
"resource": resource_id,
|
"resource": resource_id,
|
||||||
@@ -226,6 +227,7 @@ def port_down_request(resource_id, port_name, debug_on=False):
|
|||||||
debug_printer.pprint(data)
|
debug_printer.pprint(data)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def port_reset_request(resource_id, port_name, debug_on=False):
|
def port_reset_request(resource_id, port_name, debug_on=False):
|
||||||
"""
|
"""
|
||||||
Does not change the use_dhcp flag
|
Does not change the use_dhcp flag
|
||||||
@@ -234,7 +236,7 @@ def port_reset_request(resource_id, port_name, debug_on=False):
|
|||||||
:param port_name:
|
:param port_name:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"shelf": 1,
|
"shelf": 1,
|
||||||
"resource": resource_id,
|
"resource": resource_id,
|
||||||
@@ -249,6 +251,7 @@ def port_reset_request(resource_id, port_name, debug_on=False):
|
|||||||
def generateMac(parent_mac, random_octet, debug=False):
|
def generateMac(parent_mac, random_octet, debug=False):
|
||||||
return generate_mac(parent_mac=parent_mac, random_octet=random_octet, debug=debug)
|
return generate_mac(parent_mac=parent_mac, random_octet=random_octet, debug=debug)
|
||||||
|
|
||||||
|
|
||||||
def generate_mac(parent_mac, random_octet, debug=False):
|
def generate_mac(parent_mac, random_octet, debug=False):
|
||||||
if debug:
|
if debug:
|
||||||
print("************ random_octet: %s **************" % (random_octet))
|
print("************ random_octet: %s **************" % (random_octet))
|
||||||
@@ -272,7 +275,8 @@ def portNameSeries(prefix_="sta", start_id_=0, end_id_=1, padding_number_=10000,
|
|||||||
:param padding_number_:
|
:param padding_number_:
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
return port_name_series(prefix=prefix_, start_id=start_id_, end_id=end_id_, padding_number=padding_number_, radio=radio)
|
return port_name_series(prefix=prefix_, start_id=start_id_, end_id=end_id_, padding_number=padding_number_,
|
||||||
|
radio=radio)
|
||||||
|
|
||||||
|
|
||||||
def port_name_series(prefix="sta", start_id=0, end_id=1, padding_number=10000, radio=None):
|
def port_name_series(prefix="sta", start_id=0, end_id=1, padding_number=10000, radio=None):
|
||||||
@@ -291,7 +295,7 @@ def port_name_series(prefix="sta", start_id=0, end_id=1, padding_number=10000, r
|
|||||||
eid = None
|
eid = None
|
||||||
if radio is not None:
|
if radio is not None:
|
||||||
eid = name_to_eid(radio)
|
eid = name_to_eid(radio)
|
||||||
|
|
||||||
name_list = []
|
name_list = []
|
||||||
for i in range((padding_number + start_id), (padding_number + end_id + 1)):
|
for i in range((padding_number + start_id), (padding_number + end_id + 1)):
|
||||||
sta_name = "%s%s" % (prefix, str(i)[1:])
|
sta_name = "%s%s" % (prefix, str(i)[1:])
|
||||||
@@ -312,9 +316,11 @@ def gen_ip_series(ip_addr, netmask, num_ips=None):
|
|||||||
chosen_ips.append(ip_list[i])
|
chosen_ips.append(ip_list[i])
|
||||||
return chosen_ips
|
return chosen_ips
|
||||||
|
|
||||||
|
|
||||||
def generateRandomHex():
|
def generateRandomHex():
|
||||||
return generate_random_hex()
|
return generate_random_hex()
|
||||||
|
|
||||||
|
|
||||||
# generate random hex if you need it for mac addresses
|
# generate random hex if you need it for mac addresses
|
||||||
def generate_random_hex():
|
def generate_random_hex():
|
||||||
# generate a few random numbers and convert them into hex:
|
# generate a few random numbers and convert them into hex:
|
||||||
@@ -370,6 +376,7 @@ def port_list_to_alias_map(json_list, debug_=False):
|
|||||||
|
|
||||||
return reverse_map
|
return reverse_map
|
||||||
|
|
||||||
|
|
||||||
def list_to_alias_map(json_list=None, from_element=None, debug_=False):
|
def list_to_alias_map(json_list=None, from_element=None, debug_=False):
|
||||||
reverse_map = {}
|
reverse_map = {}
|
||||||
if (json_list is None) or (len(json_list) < 1):
|
if (json_list is None) or (len(json_list) < 1):
|
||||||
@@ -419,9 +426,9 @@ def find_port_eids(resource_id=1, base_url="http://localhost:8080", port_names=(
|
|||||||
port_url = "/port/1"
|
port_url = "/port/1"
|
||||||
for port_name in port_names:
|
for port_name in port_names:
|
||||||
uri = "%s/%s/%s" % (port_url, resource_id, port_name)
|
uri = "%s/%s/%s" % (port_url, resource_id, port_name)
|
||||||
lf_r = LFRequest.LFRequest(base_url, uri)
|
lf_r = LFRequest.LFRequest(base_url, uri, debug_=debug)
|
||||||
try:
|
try:
|
||||||
response = lf_r.getAsJson(debug)
|
response = lf_r.getAsJson()
|
||||||
if response is None:
|
if response is None:
|
||||||
continue
|
continue
|
||||||
port_eids.append(PortEID(response))
|
port_eids.append(PortEID(response))
|
||||||
@@ -443,9 +450,9 @@ def wait_until_ports_admin_down(resource_id=1, base_url="http://localhost:8080",
|
|||||||
up_stations = []
|
up_stations = []
|
||||||
for port_name in port_list:
|
for port_name in port_list:
|
||||||
uri = "%s/%s/%s?fields=device,down" % (port_url, resource_id, port_name)
|
uri = "%s/%s/%s?fields=device,down" % (port_url, resource_id, port_name)
|
||||||
lf_r = LFRequest.LFRequest(base_url, uri)
|
lf_r = LFRequest.LFRequest(base_url, uri, debug_=debug_)
|
||||||
json_response = lf_r.getAsJson(debug_=False)
|
json_response = lf_r.getAsJson()
|
||||||
if json_response == None:
|
if json_response is None:
|
||||||
if debug_:
|
if debug_:
|
||||||
print("port %s disappeared" % port_name)
|
print("port %s disappeared" % port_name)
|
||||||
continue
|
continue
|
||||||
@@ -460,6 +467,7 @@ def wait_until_ports_admin_down(resource_id=1, base_url="http://localhost:8080",
|
|||||||
def waitUntilPortsAdminUp(resource_id=1, base_url="http://localhost:8080", port_list=()):
|
def waitUntilPortsAdminUp(resource_id=1, base_url="http://localhost:8080", port_list=()):
|
||||||
return wait_until_ports_admin_up(resource_id=resource_id, base_url=base_url, port_list=port_list)
|
return wait_until_ports_admin_up(resource_id=resource_id, base_url=base_url, port_list=port_list)
|
||||||
|
|
||||||
|
|
||||||
def wait_until_ports_admin_up(resource_id=1, base_url="http://localhost:8080", port_list=(), debug_=False):
|
def wait_until_ports_admin_up(resource_id=1, base_url="http://localhost:8080", port_list=(), debug_=False):
|
||||||
print("Waiting until ports appear admin-up...")
|
print("Waiting until ports appear admin-up...")
|
||||||
down_stations = port_list.copy()
|
down_stations = port_list.copy()
|
||||||
@@ -470,9 +478,9 @@ def wait_until_ports_admin_up(resource_id=1, base_url="http://localhost:8080", p
|
|||||||
down_stations = []
|
down_stations = []
|
||||||
for port_name in port_list:
|
for port_name in port_list:
|
||||||
uri = "%s/%s/%s?fields=device,down" % (port_url, resource_id, port_name)
|
uri = "%s/%s/%s?fields=device,down" % (port_url, resource_id, port_name)
|
||||||
lf_r = LFRequest.LFRequest(base_url, uri)
|
lf_r = LFRequest.LFRequest(base_url, uri, debug_=debug_)
|
||||||
json_response = lf_r.getAsJson(debug_=False)
|
json_response = lf_r.getAsJson()
|
||||||
if json_response == None:
|
if json_response is None:
|
||||||
if debug_:
|
if debug_:
|
||||||
print("port %s appeared" % port_name)
|
print("port %s appeared" % port_name)
|
||||||
continue
|
continue
|
||||||
@@ -483,9 +491,11 @@ def wait_until_ports_admin_up(resource_id=1, base_url="http://localhost:8080", p
|
|||||||
sleep(1)
|
sleep(1)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def waitUntilPortsDisappear(base_url="http://localhost:8080", port_list=(), debug=False):
|
def waitUntilPortsDisappear(base_url="http://localhost:8080", port_list=(), debug=False):
|
||||||
wait_until_ports_disappear(base_url, port_list, debug)
|
wait_until_ports_disappear(base_url, port_list, debug)
|
||||||
|
|
||||||
|
|
||||||
def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=(), debug=False):
|
def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=(), debug=False):
|
||||||
if (port_list is None) or (len(port_list) < 1):
|
if (port_list is None) or (len(port_list) < 1):
|
||||||
if debug:
|
if debug:
|
||||||
@@ -499,8 +509,8 @@ def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=(), d
|
|||||||
else:
|
else:
|
||||||
found_stations = [port_list]
|
found_stations = [port_list]
|
||||||
|
|
||||||
temp_names_by_resource = {1:[]}
|
temp_names_by_resource = {1: []}
|
||||||
temp_query_by_resource = {1:""}
|
temp_query_by_resource = {1: ""}
|
||||||
for port_eid in port_list:
|
for port_eid in port_list:
|
||||||
eid = name_to_eid(port_eid)
|
eid = name_to_eid(port_eid)
|
||||||
# shelf = eid[0]
|
# shelf = eid[0]
|
||||||
@@ -511,7 +521,8 @@ def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=(), d
|
|||||||
temp_names_by_resource[resource_id] = []
|
temp_names_by_resource[resource_id] = []
|
||||||
port_name = eid[2]
|
port_name = eid[2]
|
||||||
temp_names_by_resource[resource_id].append(port_name)
|
temp_names_by_resource[resource_id].append(port_name)
|
||||||
temp_query_by_resource[resource_id] = "%s/%s/%s?fields=alias" % (url, resource_id, ",".join(temp_names_by_resource[resource_id]))
|
temp_query_by_resource[resource_id] = "%s/%s/%s?fields=alias" % (
|
||||||
|
url, resource_id, ",".join(temp_names_by_resource[resource_id]))
|
||||||
if debug:
|
if debug:
|
||||||
pprint.pprint(("temp_query_by_resource", temp_query_by_resource))
|
pprint.pprint(("temp_query_by_resource", temp_query_by_resource))
|
||||||
while len(found_stations) > 0:
|
while len(found_stations) > 0:
|
||||||
@@ -523,8 +534,8 @@ def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=(), d
|
|||||||
("check_url", check_url),
|
("check_url", check_url),
|
||||||
])
|
])
|
||||||
lf_r = LFRequest.LFRequest(base_url, check_url, debug_=debug)
|
lf_r = LFRequest.LFRequest(base_url, check_url, debug_=debug)
|
||||||
json_response = lf_r.get_as_json(debug_=debug, die_on_error_=False)
|
json_response = lf_r.get_as_json()
|
||||||
if (json_response == None):
|
if json_response is None:
|
||||||
print("LFUtils::wait_until_ports_disappear:: Request returned None: [{}]".format(base_url + check_url))
|
print("LFUtils::wait_until_ports_disappear:: Request returned None: [{}]".format(base_url + check_url))
|
||||||
else:
|
else:
|
||||||
if debug:
|
if debug:
|
||||||
@@ -540,7 +551,7 @@ def wait_until_ports_disappear(base_url="http://localhost:8080", port_list=(), d
|
|||||||
if len(found_stations) > 0:
|
if len(found_stations) > 0:
|
||||||
if debug:
|
if debug:
|
||||||
pprint.pprint(("wait_until_ports_disappear found_stations:", found_stations))
|
pprint.pprint(("wait_until_ports_disappear found_stations:", found_stations))
|
||||||
sleep(1) # safety
|
sleep(1) # safety
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
@@ -554,37 +565,39 @@ def waitUntilPortsAppear(base_url="http://localhost:8080", port_list=(), debug=F
|
|||||||
"""
|
"""
|
||||||
return wait_until_ports_appear(base_url, port_list, debug=debug)
|
return wait_until_ports_appear(base_url, port_list, debug=debug)
|
||||||
|
|
||||||
def name_to_eid(input, non_port=False):
|
|
||||||
|
def name_to_eid(eid_input, non_port=False):
|
||||||
rv = [1, 1, "", ""]
|
rv = [1, 1, "", ""]
|
||||||
info = []
|
info = []
|
||||||
if (input is None) or (input == ""):
|
if (eid_input is None) or (eid_input == ""):
|
||||||
raise ValueError("name_to_eid wants eid like 1.1.sta0 but given[%s]" % input)
|
raise ValueError("name_to_eid wants eid like 1.1.sta0 but given[%s]" % eid_input)
|
||||||
if type(input) is not str:
|
if type(eid_input) is not str:
|
||||||
raise ValueError("name_to_eid wants string formatted like '1.2.name', not a tuple or list or [%s]" % type(input))
|
raise ValueError(
|
||||||
|
"name_to_eid wants string formatted like '1.2.name', not a tuple or list or [%s]" % type(eid_input))
|
||||||
|
|
||||||
info = input.split('.')
|
info = eid_input.split('.')
|
||||||
if len(info) == 1:
|
if len(info) == 1:
|
||||||
rv[2] = info[0] # just port name
|
rv[2] = info[0] # just port name
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
if (len(info) == 2) and info[0].isnumeric() and not info[1].isnumeric(): # resource.port-name
|
if (len(info) == 2) and info[0].isnumeric() and not info[1].isnumeric(): # resource.port-name
|
||||||
rv[1] = int(info[0])
|
rv[1] = int(info[0])
|
||||||
rv[2] = info[1]
|
rv[2] = info[1]
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
elif (len(info) == 2) and not info[0].isnumeric(): # port-name.qvlan
|
elif (len(info) == 2) and not info[0].isnumeric(): # port-name.qvlan
|
||||||
rv[2] = info[0]+"."+info[1]
|
rv[2] = info[0] + "." + info[1]
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
if (len(info) == 3) and info[0].isnumeric() and info[1].isnumeric(): # shelf.resource.port-name
|
if (len(info) == 3) and info[0].isnumeric() and info[1].isnumeric(): # shelf.resource.port-name
|
||||||
rv[0] = int(info[0])
|
rv[0] = int(info[0])
|
||||||
rv[1] = int(info[1])
|
rv[1] = int(info[1])
|
||||||
rv[2] = info[2]
|
rv[2] = info[2]
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
elif (len(info) == 3) and info[0].isnumeric() and not info[1].isnumeric(): # resource.port-name.qvlan
|
elif (len(info) == 3) and info[0].isnumeric() and not info[1].isnumeric(): # resource.port-name.qvlan
|
||||||
rv[1] = int(info[0])
|
rv[1] = int(info[0])
|
||||||
rv[2] = info[1]+"."+info[2]
|
rv[2] = info[1] + "." + info[2]
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
if non_port:
|
if non_port:
|
||||||
@@ -595,14 +608,15 @@ def name_to_eid(input, non_port=False):
|
|||||||
if (len(info) >= 4):
|
if (len(info) >= 4):
|
||||||
rv[3] = int(info[3])
|
rv[3] = int(info[3])
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
if len(info) == 4: # shelf.resource.port-name.qvlan
|
if len(info) == 4: # shelf.resource.port-name.qvlan
|
||||||
rv[0] = int(info[0])
|
rv[0] = int(info[0])
|
||||||
rv[1] = int(info[1])
|
rv[1] = int(info[1])
|
||||||
rv[2] = info[2]+"."+info[3]
|
rv[2] = info[2] + "." + info[3]
|
||||||
|
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
def wait_until_ports_appear(base_url="http://localhost:8080", port_list=(), debug=False):
|
def wait_until_ports_appear(base_url="http://localhost:8080", port_list=(), debug=False):
|
||||||
"""
|
"""
|
||||||
Use this method to pause until the LANforge system has caught up and implemented the
|
Use this method to pause until the LANforge system has caught up and implemented the
|
||||||
@@ -633,21 +647,22 @@ def wait_until_ports_appear(base_url="http://localhost:8080", port_list=(), debu
|
|||||||
port_name = eid[2]
|
port_name = eid[2]
|
||||||
# print("waiting for sta sta "+port_eid)
|
# print("waiting for sta sta "+port_eid)
|
||||||
uri = "%s/%s/%s" % (port_url, resource_id, port_name)
|
uri = "%s/%s/%s" % (port_url, resource_id, port_name)
|
||||||
lf_r = LFRequest.LFRequest(base_url, uri)
|
lf_r = LFRequest.LFRequest(base_url, uri, debug_=debug)
|
||||||
json_response = lf_r.getAsJson(debug_=False)
|
json_response = lf_r.getAsJson()
|
||||||
if (json_response != None):
|
if json_response is not None:
|
||||||
found_stations.append(port_name)
|
found_stations.append(port_name)
|
||||||
else:
|
else:
|
||||||
lf_r = LFRequest.LFRequest(base_url, ncshow_url)
|
lf_r = LFRequest.LFRequest(base_url, ncshow_url, debug_=debug)
|
||||||
lf_r.addPostData({"shelf": shelf, "resource": resource_id, "port": port_name, "probe_flags": 5})
|
lf_r.addPostData({"shelf": shelf, "resource": resource_id, "port": port_name, "probe_flags": 5})
|
||||||
lf_r.jsonPost()
|
lf_r.jsonPost()
|
||||||
if (len(found_stations) < len(port_list)):
|
if len(found_stations) < len(port_list):
|
||||||
sleep(2)
|
sleep(2)
|
||||||
|
|
||||||
if debug:
|
if debug:
|
||||||
print("These stations appeared: " + ", ".join(found_stations))
|
print("These stations appeared: " + ", ".join(found_stations))
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
def wait_until_endps(base_url="http://localhost:8080", endp_list=(), debug=False):
|
def wait_until_endps(base_url="http://localhost:8080", endp_list=(), debug=False):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -672,14 +687,14 @@ def wait_until_endps(base_url="http://localhost:8080", endp_list=(), debug=False
|
|||||||
shelf = eid[0]
|
shelf = eid[0]
|
||||||
resource_id = eid[1]
|
resource_id = eid[1]
|
||||||
port_name = eid[2]
|
port_name = eid[2]
|
||||||
|
|
||||||
uri = "%s/%s/%s" % (port_url, resource_id, port_name)
|
uri = "%s/%s/%s" % (port_url, resource_id, port_name)
|
||||||
lf_r = LFRequest.LFRequest(base_url, uri)
|
lf_r = LFRequest.LFRequest(base_url, uri, debug_=debug)
|
||||||
json_response = lf_r.getAsJson(debug_=False)
|
json_response = lf_r.getAsJson()
|
||||||
if (json_response != None):
|
if json_response is not None:
|
||||||
found_stations.append(port_name)
|
found_stations.append(port_name)
|
||||||
else:
|
else:
|
||||||
lf_r = LFRequest.LFRequest(base_url, ncshow_url)
|
lf_r = LFRequest.LFRequest(base_url, ncshow_url, debug_=debug)
|
||||||
lf_r.addPostData({"shelf": shelf, "resource": resource_id, "port": port_name, "flags": 1})
|
lf_r.addPostData({"shelf": shelf, "resource": resource_id, "port": port_name, "flags": 1})
|
||||||
lf_r.formPost()
|
lf_r.formPost()
|
||||||
if (len(found_stations) < len(endp_list)):
|
if (len(found_stations) < len(endp_list)):
|
||||||
@@ -698,7 +713,7 @@ def remove_port(resource, port_name, baseurl="http://localhost:8080/", debug=Fal
|
|||||||
if debug:
|
if debug:
|
||||||
print("Removing port %d.%s" % (resource, port_name))
|
print("Removing port %d.%s" % (resource, port_name))
|
||||||
url = "/cli-json/rm_vlan"
|
url = "/cli-json/rm_vlan"
|
||||||
lf_r = LFRequest.LFRequest(baseurl, url)
|
lf_r = LFRequest.LFRequest(baseurl, url, debug_=debug)
|
||||||
lf_r.addPostData({
|
lf_r.addPostData({
|
||||||
"shelf": 1,
|
"shelf": 1,
|
||||||
"resource": resource,
|
"resource": resource,
|
||||||
@@ -720,7 +735,7 @@ def remove_cx(baseurl, cx_names, debug=False):
|
|||||||
"test_mgr": "all",
|
"test_mgr": "all",
|
||||||
"cx_name": name
|
"cx_name": name
|
||||||
}
|
}
|
||||||
lf_r = LFRequest.LFRequest(baseurl, url)
|
lf_r = LFRequest.LFRequest(baseurl, url, debug_=debug)
|
||||||
lf_r.addPostData(data)
|
lf_r.addPostData(data)
|
||||||
lf_r.jsonPost(debug)
|
lf_r.jsonPost(debug)
|
||||||
|
|
||||||
@@ -733,7 +748,7 @@ def remove_endps(baseurl, endp_names, debug=False):
|
|||||||
if debug:
|
if debug:
|
||||||
print("Removing endp %s" % ", ".join(endp_names))
|
print("Removing endp %s" % ", ".join(endp_names))
|
||||||
url = "/cli-json/rm_endp"
|
url = "/cli-json/rm_endp"
|
||||||
lf_r = LFRequest.LFRequest(baseurl, url)
|
lf_r = LFRequest.LFRequest(baseurl, url, debug_=debug)
|
||||||
for name in endp_names:
|
for name in endp_names:
|
||||||
data = {
|
data = {
|
||||||
"endp_name": name
|
"endp_name": name
|
||||||
@@ -791,11 +806,11 @@ def expand_endp_histogram(distribution_payload=None):
|
|||||||
raise ValueError("Unexpected histogram format.")
|
raise ValueError("Unexpected histogram format.")
|
||||||
multiplier = int(distribution_payload["histo_category_width"])
|
multiplier = int(distribution_payload["histo_category_width"])
|
||||||
formatted_dict = {
|
formatted_dict = {
|
||||||
#"00000 <= x <= 00001" : "0"
|
# "00000 <= x <= 00001" : "0"
|
||||||
}
|
}
|
||||||
for bucket_index in range(len(distribution_payload["histogram"]) - 1):
|
for bucket_index in range(len(distribution_payload["histogram"]) - 1):
|
||||||
pow1 = (2**bucket_index) * multiplier
|
pow1 = (2 ** bucket_index) * multiplier
|
||||||
pow2 = (2**(bucket_index+1)) * multiplier
|
pow2 = (2 ** (bucket_index + 1)) * multiplier
|
||||||
if bucket_index == 0:
|
if bucket_index == 0:
|
||||||
category_name = "00000 <= x <= {:-05.0f}".format(pow2)
|
category_name = "00000 <= x <= {:-05.0f}".format(pow2)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -10,23 +10,32 @@ import random
|
|||||||
import string
|
import string
|
||||||
import datetime
|
import datetime
|
||||||
import argparse
|
import argparse
|
||||||
|
import re
|
||||||
|
import logging
|
||||||
|
|
||||||
if sys.version_info[0] != 3:
|
if sys.version_info[0] != 3:
|
||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit()
|
exit()
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
|
||||||
|
|
||||||
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
debug_printer = pprint.PrettyPrinter(indent=2)
|
||||||
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
|
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
|
||||||
|
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
||||||
|
Logg = importlib.import_module("lanforge_client.logg")
|
||||||
|
|
||||||
|
if os.environ.get("LF_USE_AUTOGEN") == 1:
|
||||||
|
lanforge_api = importlib.import_module("lanforge_client.lanforge_api")
|
||||||
|
LFSession = lanforge_api.LFSession
|
||||||
|
|
||||||
|
|
||||||
class LFCliBase:
|
class LFCliBase:
|
||||||
|
SHOULD_RUN = 0 # indicates normal operation
|
||||||
|
SHOULD_QUIT = 1 # indicates to quit loops, close files, send SIGQUIT to threads and return
|
||||||
|
SHOULD_HALT = 2 # indicates to quit loops, send SIGABRT to threads and exit
|
||||||
|
|
||||||
SHOULD_RUN = 0 # indicates normal operation
|
# - LOGGING -
|
||||||
SHOULD_QUIT = 1 # indicates to quit loops, close files, send SIGQUIT to threads and return
|
_logger = logging.getLogger(__name__)
|
||||||
SHOULD_HALT = 2 # indicates to quit loops, send SIGABRT to threads and exit
|
|
||||||
|
|
||||||
# do not use `super(LFCLiBase,self).__init__(self, host, port, _debug)
|
# do not use `super(LFCLiBase,self).__init__(self, host, port, _debug)
|
||||||
# that is py2 era syntax and will force self into the host variable, making you
|
# that is py2 era syntax and will force self into the host variable, making you
|
||||||
@@ -37,7 +46,9 @@ class LFCliBase:
|
|||||||
_exit_on_fail=False,
|
_exit_on_fail=False,
|
||||||
_local_realm=None,
|
_local_realm=None,
|
||||||
_proxy_str=None,
|
_proxy_str=None,
|
||||||
_capture_signal_list=[]):
|
_capture_signal_list=None):
|
||||||
|
if _capture_signal_list is None:
|
||||||
|
_capture_signal_list = []
|
||||||
self.fail_pref = "FAILED: "
|
self.fail_pref = "FAILED: "
|
||||||
self.pass_pref = "PASSED: "
|
self.pass_pref = "PASSED: "
|
||||||
self.lfclient_host = _lfjson_host
|
self.lfclient_host = _lfjson_host
|
||||||
@@ -48,7 +59,7 @@ class LFCliBase:
|
|||||||
self.proxy = {}
|
self.proxy = {}
|
||||||
self.adjust_proxy(_proxy_str)
|
self.adjust_proxy(_proxy_str)
|
||||||
|
|
||||||
if (_local_realm is not None):
|
if _local_realm:
|
||||||
self.local_realm = _local_realm
|
self.local_realm = _local_realm
|
||||||
|
|
||||||
# if (_debug):
|
# if (_debug):
|
||||||
@@ -69,7 +80,7 @@ class LFCliBase:
|
|||||||
|
|
||||||
if len(_capture_signal_list) > 0:
|
if len(_capture_signal_list) > 0:
|
||||||
for zignal in _capture_signal_list:
|
for zignal in _capture_signal_list:
|
||||||
self.captured_signal(zignal, self.my_captured_signal)
|
self.captured_signal(zignal)
|
||||||
#
|
#
|
||||||
|
|
||||||
def _finish(self):
|
def _finish(self):
|
||||||
@@ -136,7 +147,8 @@ class LFCliBase:
|
|||||||
print("sending signal %s to thread %s" % (signum, name))
|
print("sending signal %s to thread %s" % (signum, name))
|
||||||
# do a thing
|
# do a thing
|
||||||
|
|
||||||
def my_captured_signal(self, signum):
|
@staticmethod
|
||||||
|
def my_captured_signal(signum):
|
||||||
"""
|
"""
|
||||||
Override me to process signals, otherwise superclass signal handler is called.
|
Override me to process signals, otherwise superclass signal handler is called.
|
||||||
You may use _finish() or _halt() to indicate finishing soon or halting immediately.
|
You may use _finish() or _halt() to indicate finishing soon or halting immediately.
|
||||||
@@ -164,6 +176,42 @@ class LFCliBase:
|
|||||||
def clear_test_results(self):
|
def clear_test_results(self):
|
||||||
self.test_results.clear()
|
self.test_results.clear()
|
||||||
|
|
||||||
|
# - LOGGING - we want to remove old logging code
|
||||||
|
def log_register_method_name(self, method_name=None):
|
||||||
|
if not method_name:
|
||||||
|
return
|
||||||
|
if os.environ.get("LF_USE_AUTOGEN") == 1:
|
||||||
|
Logg.register_method_name(method_name=method_name)
|
||||||
|
else:
|
||||||
|
if method_name not in self._method_name_list:
|
||||||
|
self._method_name_list.append(method_name)
|
||||||
|
if method_name not in self._tag_list:
|
||||||
|
self._tag_list.append(method_name)
|
||||||
|
|
||||||
|
def log_register_tag(self, tag=None):
|
||||||
|
if not tag:
|
||||||
|
return
|
||||||
|
if os.environ.get("LF_USE_AUTOGEN") == 1:
|
||||||
|
Logg.register_tag(tag=tag)
|
||||||
|
else:
|
||||||
|
if tag not in self._tag_list:
|
||||||
|
self._tag_list.append(tag)
|
||||||
|
self._logger.register_method_name(tag=tag)
|
||||||
|
|
||||||
|
def log_enable(self, reserved_tag=None):
|
||||||
|
if os.environ.get("LF_USE_AUTOGEN") == 1:
|
||||||
|
Logg.enable(reserved_tag=reserved_tag)
|
||||||
|
else:
|
||||||
|
self.log_register_tag(reserved_tag)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def log_set_filename(filename=None):
|
||||||
|
if not filename:
|
||||||
|
return
|
||||||
|
logging.basicConfig(filename=filename)
|
||||||
|
|
||||||
|
# - END LOGGING -
|
||||||
|
|
||||||
def json_post(self, _req_url, _data, debug_=False, suppress_related_commands_=None, response_json_list_=None):
|
def json_post(self, _req_url, _data, debug_=False, suppress_related_commands_=None, response_json_list_=None):
|
||||||
"""
|
"""
|
||||||
send json to the LANforge client
|
send json to the LANforge client
|
||||||
@@ -191,7 +239,7 @@ class LFCliBase:
|
|||||||
del _data['suppress_postexec_cli']
|
del _data['suppress_postexec_cli']
|
||||||
if 'suppress_postexec_method' in _data:
|
if 'suppress_postexec_method' in _data:
|
||||||
del _data['suppress_postexec_method']
|
del _data['suppress_postexec_method']
|
||||||
elif suppress_related_commands_ == False:
|
elif not suppress_related_commands_:
|
||||||
_data['suppress_preexec_cli'] = False
|
_data['suppress_preexec_cli'] = False
|
||||||
_data['suppress_preexec_method'] = False
|
_data['suppress_preexec_method'] = False
|
||||||
_data['suppress_postexec_cli'] = False
|
_data['suppress_postexec_cli'] = False
|
||||||
@@ -204,11 +252,11 @@ class LFCliBase:
|
|||||||
|
|
||||||
lf_r.addPostData(_data)
|
lf_r.addPostData(_data)
|
||||||
if debug_:
|
if debug_:
|
||||||
LFUtils.debug_printer.pprint(_data)
|
debug_printer.pprint(_data)
|
||||||
json_response = lf_r.json_post(show_error=debug_,
|
json_response = lf_r.json_post(show_error=debug_,
|
||||||
debug=debug_,
|
debug=debug_,
|
||||||
response_json_list_=response_json_list_,
|
response_json_list_=response_json_list_,
|
||||||
die_on_error_=self.exit_on_error)
|
die_on_error_=self.exit_on_error)
|
||||||
if debug_ and (response_json_list_ is not None):
|
if debug_ and (response_json_list_ is not None):
|
||||||
pprint.pprint(response_json_list_)
|
pprint.pprint(response_json_list_)
|
||||||
except Exception as x:
|
except Exception as x:
|
||||||
@@ -242,7 +290,7 @@ class LFCliBase:
|
|||||||
die_on_error_=self.exit_on_error)
|
die_on_error_=self.exit_on_error)
|
||||||
lf_r.addPostData(_data)
|
lf_r.addPostData(_data)
|
||||||
if debug_:
|
if debug_:
|
||||||
LFUtils.debug_printer.pprint(_data)
|
debug_printer.pprint(_data)
|
||||||
json_response = lf_r.json_put(show_error=self.debug,
|
json_response = lf_r.json_put(show_error=self.debug,
|
||||||
debug=debug_,
|
debug=debug_,
|
||||||
response_json_list_=response_json_list_,
|
response_json_list_=response_json_list_,
|
||||||
@@ -259,23 +307,22 @@ class LFCliBase:
|
|||||||
exit(1)
|
exit(1)
|
||||||
return json_response
|
return json_response
|
||||||
|
|
||||||
def json_get(self, _req_url, debug_=False):
|
def json_get(self, _req_url, debug_=None):
|
||||||
debug_ |= self.debug
|
|
||||||
# if debug_:
|
# if debug_:
|
||||||
# print("json_get: "+_req_url)
|
# print("json_get: "+_req_url)
|
||||||
# print("json_get: proxies:")
|
# print("json_get: proxies:")
|
||||||
# pprint.pprint(self.proxy)
|
# pprint.pprint(self.proxy)
|
||||||
|
if debug_ is None:
|
||||||
|
debug_ = self.debug
|
||||||
json_response = None
|
json_response = None
|
||||||
# print("----- GET ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ")
|
|
||||||
try:
|
try:
|
||||||
lf_r = LFRequest.LFRequest(url=self.lfclient_url,
|
lf_r = LFRequest.LFRequest(url=self.lfclient_url,
|
||||||
uri=_req_url,
|
uri=_req_url,
|
||||||
proxies_=self.proxy,
|
proxies_=self.proxy,
|
||||||
debug_=debug_,
|
debug_=debug_,
|
||||||
die_on_error_=self.exit_on_error)
|
die_on_error_=self.exit_on_error)
|
||||||
json_response = lf_r.get_as_json(debug_=debug_, die_on_error_=False)
|
json_response = lf_r.get_as_json()
|
||||||
#debug_printer.pprint(json_response)
|
if json_response is None:
|
||||||
if (json_response is None):
|
|
||||||
if debug_:
|
if debug_:
|
||||||
if hasattr(lf_r, 'print_errors'):
|
if hasattr(lf_r, 'print_errors'):
|
||||||
lf_r.print_errors()
|
lf_r.print_errors()
|
||||||
@@ -296,7 +343,7 @@ class LFCliBase:
|
|||||||
def json_delete(self, _req_url, debug_=False):
|
def json_delete(self, _req_url, debug_=False):
|
||||||
debug_ |= self.debug
|
debug_ |= self.debug
|
||||||
if debug_:
|
if debug_:
|
||||||
print("DELETE: "+_req_url)
|
print("DELETE: " + _req_url)
|
||||||
json_response = None
|
json_response = None
|
||||||
try:
|
try:
|
||||||
# print("----- DELETE ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ")
|
# print("----- DELETE ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ")
|
||||||
@@ -307,7 +354,7 @@ class LFCliBase:
|
|||||||
die_on_error_=self.exit_on_error)
|
die_on_error_=self.exit_on_error)
|
||||||
json_response = lf_r.json_delete(debug=debug_, die_on_error_=False)
|
json_response = lf_r.json_delete(debug=debug_, die_on_error_=False)
|
||||||
print(json_response)
|
print(json_response)
|
||||||
#debug_printer.pprint(json_response)
|
# debug_printer.pprint(json_response)
|
||||||
if (json_response is None) and debug_:
|
if (json_response is None) and debug_:
|
||||||
print("LFCliBase.json_delete: no entity/response, probabily status 404")
|
print("LFCliBase.json_delete: no entity/response, probabily status 404")
|
||||||
return None
|
return None
|
||||||
@@ -352,7 +399,8 @@ class LFCliBase:
|
|||||||
|
|
||||||
return reverse_map
|
return reverse_map
|
||||||
|
|
||||||
def error(self, exception):
|
@staticmethod
|
||||||
|
def error(exception):
|
||||||
# print("lfcli_base error: %s" % exception)
|
# print("lfcli_base error: %s" % exception)
|
||||||
pprint.pprint(exception)
|
pprint.pprint(exception)
|
||||||
traceback.print_exception(Exception, exception, exception.__traceback__, chain=True)
|
traceback.print_exception(Exception, exception, exception.__traceback__, chain=True)
|
||||||
@@ -372,11 +420,11 @@ class LFCliBase:
|
|||||||
print("Could not connect to LANforge GUI")
|
print("Could not connect to LANforge GUI")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
#return ALL messages in list form
|
# return ALL messages in list form
|
||||||
def get_result_list(self):
|
def get_result_list(self):
|
||||||
return self.test_results
|
return self.test_results
|
||||||
|
|
||||||
#return ALL fail messages in list form
|
# return ALL fail messages in list form
|
||||||
def get_failed_result_list(self):
|
def get_failed_result_list(self):
|
||||||
fail_list = []
|
fail_list = []
|
||||||
for result in self.test_results:
|
for result in self.test_results:
|
||||||
@@ -384,7 +432,7 @@ class LFCliBase:
|
|||||||
fail_list.append(result)
|
fail_list.append(result)
|
||||||
return fail_list
|
return fail_list
|
||||||
|
|
||||||
#return ALL pass messages in list form
|
# return ALL pass messages in list form
|
||||||
def get_passed_result_list(self):
|
def get_passed_result_list(self):
|
||||||
pass_list = []
|
pass_list = []
|
||||||
for result in self.test_results:
|
for result in self.test_results:
|
||||||
@@ -403,7 +451,7 @@ class LFCliBase:
|
|||||||
def get_all_message(self):
|
def get_all_message(self):
|
||||||
return "\n".join(self.test_results)
|
return "\n".join(self.test_results)
|
||||||
|
|
||||||
#determines if overall test passes via comparing passes vs. fails
|
# determines if overall test passes via comparing passes vs. fails
|
||||||
def passes(self):
|
def passes(self):
|
||||||
pass_counter = 0
|
pass_counter = 0
|
||||||
fail_counter = 0
|
fail_counter = 0
|
||||||
@@ -416,11 +464,11 @@ class LFCliBase:
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
#EXIT script with a fail
|
# EXIT script with a fail
|
||||||
def exit_fail(self, message="%d out of %d tests failed. Exiting script with script failure."):
|
def exit_fail(self, message="%d out of %d tests failed. Exiting script with script failure."):
|
||||||
total_len=len(self.get_result_list())
|
total_len = len(self.get_result_list())
|
||||||
fail_len=len(self.get_failed_result_list())
|
fail_len = len(self.get_failed_result_list())
|
||||||
print(message %(fail_len,total_len))
|
print(message % (fail_len, total_len))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# use this inside the class to log a failure result and print it if wished
|
# use this inside the class to log a failure result and print it if wished
|
||||||
@@ -431,17 +479,17 @@ class LFCliBase:
|
|||||||
if self.exit_on_fail:
|
if self.exit_on_fail:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
#EXIT script with a success
|
# EXIT script with a success
|
||||||
def exit_success(self,message="%d out of %d tests passed successfully. Exiting script with script success."):
|
def exit_success(self, message="%d out of %d tests passed successfully. Exiting script with script success."):
|
||||||
num_total=len(self.get_result_list())
|
num_total = len(self.get_result_list())
|
||||||
num_passing=len(self.get_passed_result_list())
|
num_passing = len(self.get_passed_result_list())
|
||||||
print(message %(num_passing,num_total))
|
print(message % (num_passing, num_total))
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
def success(self,message="%d out of %d tests passed successfully."):
|
def success(self, message="%d out of %d tests passed successfully."):
|
||||||
num_total=len(self.get_result_list())
|
num_total = len(self.get_result_list())
|
||||||
num_passing=len(self.get_passed_result_list())
|
num_passing = len(self.get_passed_result_list())
|
||||||
print(message %(num_passing,num_total))
|
print(message % (num_passing, num_total))
|
||||||
|
|
||||||
# use this inside the class to log a pass result and print if wished.
|
# use this inside the class to log a pass result and print if wished.
|
||||||
def _pass(self, message, print_=False):
|
def _pass(self, message, print_=False):
|
||||||
@@ -465,27 +513,35 @@ class LFCliBase:
|
|||||||
# print("lfclibase::self.proxy: ")
|
# print("lfclibase::self.proxy: ")
|
||||||
# pprint.pprint(self.proxy)
|
# pprint.pprint(self.proxy)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
def logg2(self, level="debug", mesg=None):
|
def logg2(level="debug", mesg=None):
|
||||||
if (mesg is None) or (mesg == ""):
|
if (mesg is None) or (mesg == ""):
|
||||||
return
|
return
|
||||||
print("[{level}]: {msg}".format(level=level, msg=mesg))
|
print("[{level}]: {msg}".format(level=level, msg=mesg))
|
||||||
|
|
||||||
def logg(self,
|
@staticmethod
|
||||||
level=None,
|
def logg(level=None,
|
||||||
mesg=None,
|
mesg=None,
|
||||||
filename=None,
|
filename=None,
|
||||||
scriptname=None):
|
scriptname=None):
|
||||||
|
"""
|
||||||
|
This method is used by vr_profile2, lf_create_bcast, and shadowed by base_profile.py
|
||||||
|
:param level:
|
||||||
|
:param mesg:
|
||||||
|
:param filename:
|
||||||
|
:param scriptname:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
if (mesg is None) or (mesg == "") or (level is None):
|
if (mesg is None) or (mesg == "") or (level is None):
|
||||||
return
|
return
|
||||||
userhome=os.path.expanduser('~')
|
userhome = os.path.expanduser('~')
|
||||||
session = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-h-%M-m-%S-s")).replace(':','-')
|
session = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-h-%M-m-%S-s")).replace(':', '-')
|
||||||
if filename == None:
|
if filename is None:
|
||||||
try:
|
if not os.path.isdir("%s/report-data/%s" % (userhome, session)):
|
||||||
|
if not os.path.isdir('%s/report-data' % userhome):
|
||||||
|
os.mkdir('%s/report-data' % userhome)
|
||||||
os.mkdir("%s/report-data/%s" % (userhome, session))
|
os.mkdir("%s/report-data/%s" % (userhome, session))
|
||||||
except:
|
filename = ("%s/report-data/%s/%s.log" % (userhome, session, scriptname))
|
||||||
pass
|
|
||||||
filename = ("%s/report-data/%s/%s.log" % (userhome,session,scriptname))
|
|
||||||
import logging
|
import logging
|
||||||
logging.basicConfig(filename=filename, level=logging.DEBUG)
|
logging.basicConfig(filename=filename, level=logging.DEBUG)
|
||||||
if level == "debug":
|
if level == "debug":
|
||||||
@@ -496,7 +552,7 @@ class LFCliBase:
|
|||||||
logging.warning(mesg)
|
logging.warning(mesg)
|
||||||
elif level == "error":
|
elif level == "error":
|
||||||
logging.error(mesg)
|
logging.error(mesg)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_time(time_string):
|
def parse_time(time_string):
|
||||||
if isinstance(time_string, str):
|
if isinstance(time_string, str):
|
||||||
@@ -547,10 +603,20 @@ class LFCliBase:
|
|||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
optional = parser.add_argument_group('optional arguments')
|
optional = parser.add_argument_group('optional arguments')
|
||||||
required = parser.add_argument_group('required arguments')
|
required = parser.add_argument_group('required arguments')
|
||||||
optional.add_argument('--mgr', help='hostname for where LANforge GUI is running', default='localhost')
|
optional.add_argument('--mgr',
|
||||||
optional.add_argument('--mgr_port', help='port LANforge GUI HTTP service is running on', default=8080)
|
default='localhost',
|
||||||
optional.add_argument('--debug', '-d', help='Enable debugging', default=False, action="store_true")
|
help='hostname for where LANforge GUI is running')
|
||||||
optional.add_argument('--proxy', nargs='?', default=None, # action=ProxyAction,
|
optional.add_argument('--mgr_port',
|
||||||
|
default=8080,
|
||||||
|
help='port LANforge GUI HTTP service is running on')
|
||||||
|
optional.add_argument('--debug',
|
||||||
|
'-d',
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help='Enable debugging')
|
||||||
|
optional.add_argument('--proxy',
|
||||||
|
nargs='?',
|
||||||
|
default=None, # action=ProxyAction,
|
||||||
help='Connection proxy like http://proxy.localnet:80 or https://user:pass@proxy.localnet:3128')
|
help='Connection proxy like http://proxy.localnet:80 or https://user:pass@proxy.localnet:3128')
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
@@ -574,36 +640,74 @@ class LFCliBase:
|
|||||||
optional = parser.add_argument_group('optional arguments')
|
optional = parser.add_argument_group('optional arguments')
|
||||||
required = parser.add_argument_group('required arguments')
|
required = parser.add_argument_group('required arguments')
|
||||||
|
|
||||||
#Optional Args
|
# Optional Args
|
||||||
optional.add_argument('--mgr', help='hostname for where LANforge GUI is running', default='localhost')
|
optional.add_argument('--mgr',
|
||||||
optional.add_argument('--mgr_port', help='port LANforge GUI HTTP service is running on', default=8080)
|
default='localhost',
|
||||||
optional.add_argument('-u', '--upstream_port',
|
help='hostname for where LANforge GUI is running')
|
||||||
help='non-station port that generates traffic: <resource>.<port>, e.g: 1.eth1',
|
optional.add_argument('--mgr_port',
|
||||||
default='1.eth1')
|
default=8080,
|
||||||
optional.add_argument('--num_stations', help='Number of stations to create', default=0)
|
help='port LANforge GUI HTTP service is running on')
|
||||||
optional.add_argument('--test_id', help='Test ID (intended to use for ws events)', default="webconsole")
|
optional.add_argument('-u',
|
||||||
optional.add_argument('--debug', help='Enable debugging', default=False, action="store_true")
|
'--upstream_port',
|
||||||
optional.add_argument('--proxy', nargs='?', default=None,
|
default='1.eth1',
|
||||||
help='Connection proxy like http://proxy.localnet:80 or https://user:pass@proxy.localnet:3128')
|
help='non-station port that generates traffic: <resource>.<port>, e.g: 1.eth1')
|
||||||
|
optional.add_argument('--num_stations',
|
||||||
|
type=int,
|
||||||
|
default=0,
|
||||||
|
help='Number of stations to create')
|
||||||
|
optional.add_argument('--test_id',
|
||||||
|
default="webconsole",
|
||||||
|
help='Test ID (intended to use for ws events)')
|
||||||
|
optional.add_argument('-d',
|
||||||
|
'--debug',
|
||||||
|
action="store_true",
|
||||||
|
help='Enable debugging')
|
||||||
|
optional.add_argument('--proxy',
|
||||||
|
nargs='?',
|
||||||
|
default=None,
|
||||||
|
help="Connection proxy like http://proxy.localnet:80 \n"
|
||||||
|
+ " or https://user:pass@proxy.localnet:3128")
|
||||||
|
optional.add_argument('--debugging',
|
||||||
|
nargs="+",
|
||||||
|
action="append",
|
||||||
|
help="Indicate what areas you would like express debug output:\n"
|
||||||
|
+ " - digest - print terse indications of lanforge_api calls\n"
|
||||||
|
+ " - json - print url and json data\n"
|
||||||
|
+ " - http - print HTTP headers\n"
|
||||||
|
+ " - gui - ask the GUI for extra debugging in responses\n"
|
||||||
|
+ " - method:method_name - enable by_method() debugging (if present)\n"
|
||||||
|
+ " - tag:tagname - enable matching by_tag() debug output\n"
|
||||||
|
)
|
||||||
|
optional.add_argument('--debug_log',
|
||||||
|
default=None,
|
||||||
|
help="Specify a file to send debug output to")
|
||||||
if more_optional is not None:
|
if more_optional is not None:
|
||||||
for x in more_optional:
|
for argument in more_optional:
|
||||||
if 'default' in x.keys():
|
if 'default' in argument.keys():
|
||||||
optional.add_argument(x['name'], help=x['help'], default=x['default'])
|
optional.add_argument(argument['name'], help=argument['help'], default=argument['default'])
|
||||||
else:
|
else:
|
||||||
optional.add_argument(x['name'], help=x['help'])
|
optional.add_argument(argument['name'], help=argument['help'])
|
||||||
|
|
||||||
#Required Args
|
# Required Args
|
||||||
required.add_argument('--radio', help='radio EID, e.g: 1.wiphy2')
|
required.add_argument('--radio',
|
||||||
required.add_argument('--security', help='WiFi Security protocol: < open | wep | wpa | wpa2 | wpa3 >', default="open")
|
help='radio EID, e.g: 1.wiphy2')
|
||||||
required.add_argument('--ssid', help='WiFi SSID for script objects to associate to')
|
required.add_argument('--security',
|
||||||
required.add_argument('--passwd', '--password' ,'--key', help='WiFi passphrase/password/key', default="[BLANK]")
|
default="open",
|
||||||
|
help='WiFi Security protocol: < open | wep | wpa | wpa2 | wpa3 >')
|
||||||
|
required.add_argument('--ssid',
|
||||||
|
help='WiFi SSID for script objects to associate to')
|
||||||
|
required.add_argument('--passwd',
|
||||||
|
'--password',
|
||||||
|
'--key',
|
||||||
|
default="[BLANK]",
|
||||||
|
help='WiFi passphrase/password/key')
|
||||||
|
|
||||||
if more_required is not None:
|
if more_required is not None:
|
||||||
for x in more_required:
|
for argument in more_required:
|
||||||
if 'default' in x.keys():
|
if 'default' in argument.keys():
|
||||||
required.add_argument(x['name'], help=x['help'], default=x['default'])
|
required.add_argument(argument['name'], help=argument['help'], default=argument['default'])
|
||||||
else:
|
else:
|
||||||
required.add_argument(x['name'], help=x['help'])
|
required.add_argument(argument['name'], help=argument['help'])
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
@@ -622,24 +726,29 @@ class LFCliBase:
|
|||||||
}
|
}
|
||||||
self.json_post("/cli-json/add_event", data, debug_=debug_)
|
self.json_post("/cli-json/add_event", data, debug_=debug_)
|
||||||
|
|
||||||
def read_file(self, filename):
|
@staticmethod
|
||||||
|
def read_file(filename):
|
||||||
filename = open(filename, 'r')
|
filename = open(filename, 'r')
|
||||||
return [line.split(',') for line in filename.readlines()]
|
return [line.split(',') for line in filename.readlines()]
|
||||||
|
|
||||||
#Function creates random characters made of letters
|
# Function creates random characters made of letters
|
||||||
def random_chars(self, size, chars=None):
|
@staticmethod
|
||||||
|
def random_chars(size, chars=None):
|
||||||
if chars is None:
|
if chars is None:
|
||||||
chars = string.ascii_letters
|
chars = string.ascii_letters
|
||||||
return ''.join(random.choice(chars) for x in range(size))
|
return ''.join(random.choice(chars) for x in range(size))
|
||||||
|
|
||||||
def get_milliseconds(self, timestamp):
|
@staticmethod
|
||||||
return (timestamp - datetime.datetime(1970,1,1)).total_seconds()*1000
|
def get_milliseconds(timestamp):
|
||||||
|
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds() * 1000
|
||||||
|
|
||||||
def get_seconds(self, timestamp):
|
@staticmethod
|
||||||
return (timestamp - datetime.datetime(1970,1,1)).total_seconds()
|
def get_seconds(timestamp):
|
||||||
|
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds()
|
||||||
|
|
||||||
def replace_special_char(self, str):
|
@staticmethod
|
||||||
return str.replace('+', ' ').replace('_', ' ').strip(' ')
|
def replace_special_char(special_str):
|
||||||
|
return special_str.replace('+', ' ').replace('_', ' ').strip(' ')
|
||||||
|
|
||||||
Help_Mode = """Station WiFi modes: use the number value below:
|
Help_Mode = """Station WiFi modes: use the number value below:
|
||||||
auto : 0,
|
auto : 0,
|
||||||
@@ -656,4 +765,4 @@ class LFCliBase:
|
|||||||
bgnAC : 11,
|
bgnAC : 11,
|
||||||
abgnAX : 12,
|
abgnAX : 12,
|
||||||
bgnAX : 13
|
bgnAX : 13
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -81,7 +81,7 @@ class pandas_extensions:
|
|||||||
print(for_loop_df1.at[0, col])
|
print(for_loop_df1.at[0, col])
|
||||||
print(for_loop_df2.at[0, col])
|
print(for_loop_df2.at[0, col])
|
||||||
if type(for_loop_df1.at[0, col]) == str and type(for_loop_df2.at[0, col]) == str:
|
if type(for_loop_df1.at[0, col]) == str and type(for_loop_df2.at[0, col]) == str:
|
||||||
if (' ' in for_loop_df1.at[0, col]) == True:
|
if (' ' in for_loop_df1.at[0, col]):
|
||||||
# do subtraction
|
# do subtraction
|
||||||
new_value = float(for_loop_df1.at[0, col].split(" ")[0]) - float(
|
new_value = float(for_loop_df1.at[0, col].split(" ")[0]) - float(
|
||||||
for_loop_df2.at[0, col].split(" ")[0])
|
for_loop_df2.at[0, col].split(" ")[0])
|
||||||
|
|||||||
@@ -6,17 +6,16 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit()
|
exit()
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../../")))
|
|
||||||
|
|
||||||
lf_json_autogen = importlib.import_module("py-json.LANforge.lf_json_autogen")
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../")))
|
||||||
LFJsonPost = lf_json_autogen.LFJsonPost
|
|
||||||
|
|
||||||
|
|
||||||
if os.environ.get("LF_USE_AUTOGEN") == 1:
|
if os.environ.get("LF_USE_AUTOGEN") == 1:
|
||||||
set_port_current_flags = LFJsonPost.SetPortCurrentFlags.__members__
|
lanforge_api = importlib.import_module("lanforge_client.lanforge_api")
|
||||||
set_port_cmd_flags = LFJsonPost.SetPortCmdFlags.__members__
|
LFJsonCommand = lanforge_api.LFJsonCommand
|
||||||
set_port_interest_flags = LFJsonPost.SetPortInterest.__members__
|
set_port_current_flags = LFJsonCommand.SetPortCurrentFlags.__members__
|
||||||
|
set_port_cmd_flags = LFJsonCommand.SetPortCmdFlags.__members__
|
||||||
|
set_port_interest_flags = LFJsonCommand.SetPortInterest.__members__
|
||||||
|
|
||||||
else:
|
else:
|
||||||
set_port_current_flags = {
|
set_port_current_flags = {
|
||||||
|
|||||||
@@ -2,6 +2,12 @@
|
|||||||
# Create and modify WAN Links Using LANforge JSON AP : http://www.candelatech.com/cookbook.php?vol=cli&book=JSON:+Managing+WANlinks+using+JSON+and+Python
|
# Create and modify WAN Links Using LANforge JSON AP : http://www.candelatech.com/cookbook.php?vol=cli&book=JSON:+Managing+WANlinks+using+JSON+and+Python
|
||||||
# Written by Candela Technologies Inc.
|
# Written by Candela Technologies Inc.
|
||||||
# Updated by: Erin Grimes
|
# Updated by: Erin Grimes
|
||||||
|
|
||||||
|
"""
|
||||||
|
sample command:
|
||||||
|
./test_wanlink.py --name my_wanlink4 --latency_A 20 --latency_B 69 --rate 1000 --jitter_A 53 --jitter_B 73 --jitter_freq 6 --drop_A 12 --drop_B 11
|
||||||
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import urllib
|
import urllib
|
||||||
import importlib
|
import importlib
|
||||||
@@ -13,19 +19,23 @@ import os
|
|||||||
from time import sleep
|
from time import sleep
|
||||||
from urllib import error
|
from urllib import error
|
||||||
import pprint
|
import pprint
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
|
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
|
||||||
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
||||||
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
|
LFCliBase = lfcli_base.LFCliBase
|
||||||
|
|
||||||
j_printer = pprint.PrettyPrinter(indent=2)
|
j_printer = pprint.PrettyPrinter(indent=2)
|
||||||
# todo: this needs to change
|
# todo: this needs to change
|
||||||
resource_id = 1
|
resource_id = 1
|
||||||
|
|
||||||
|
|
||||||
def main(base_url, args={}):
|
def main(args):
|
||||||
|
base_url = 'http://'+args['host']+':8080'
|
||||||
print(base_url)
|
print(base_url)
|
||||||
json_post = ""
|
json_post = ""
|
||||||
json_response = ""
|
json_response = ""
|
||||||
@@ -36,7 +46,7 @@ def main(base_url, args={}):
|
|||||||
print(lf_r.get_as_json())
|
print(lf_r.get_as_json())
|
||||||
|
|
||||||
# remove old wanlinks
|
# remove old wanlinks
|
||||||
if (num_wanlinks > 0):
|
if num_wanlinks > 0:
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_cx")
|
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_cx")
|
||||||
lf_r.addPostData({
|
lf_r.addPostData({
|
||||||
'test_mgr': 'all',
|
'test_mgr': 'all',
|
||||||
@@ -49,10 +59,10 @@ def main(base_url, args={}):
|
|||||||
json_response = lf_r.getAsJson()
|
json_response = lf_r.getAsJson()
|
||||||
LFUtils.debug_printer.pprint(json_response)
|
LFUtils.debug_printer.pprint(json_response)
|
||||||
for key, value in json_response.items():
|
for key, value in json_response.items():
|
||||||
if (isinstance(value, dict) and "_links" in value):
|
if isinstance(value, dict) and "_links" in value:
|
||||||
num_wanlinks = 1
|
num_wanlinks = 1
|
||||||
except urllib.error.HTTPError as error:
|
except urllib.error.HTTPError as error:
|
||||||
print("Error code "+error.code)
|
print("Error code %s" % error.code)
|
||||||
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp")
|
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp")
|
||||||
lf_r.addPostData({
|
lf_r.addPostData({
|
||||||
@@ -129,18 +139,18 @@ def main(base_url, args={}):
|
|||||||
|
|
||||||
# start wanlink once we see it
|
# start wanlink once we see it
|
||||||
seen = 0
|
seen = 0
|
||||||
while (seen < 1):
|
while seen < 1:
|
||||||
sleep(1)
|
sleep(1)
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,state,_links")
|
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,state,_links")
|
||||||
try:
|
try:
|
||||||
json_response = lf_r.getAsJson()
|
json_response = lf_r.getAsJson()
|
||||||
if (json_response is None):
|
if json_response is None:
|
||||||
continue
|
continue
|
||||||
LFUtils.debug_printer.pprint(json_response)
|
LFUtils.debug_printer.pprint(json_response)
|
||||||
for key, value in json_response.items():
|
for key, value in json_response.items():
|
||||||
if (isinstance(value, dict)):
|
if isinstance(value, dict):
|
||||||
if ("_links" in value):
|
if "_links" in value:
|
||||||
if (value["name"] == args['name']):
|
if value["name"] == args['name']:
|
||||||
seen = 1
|
seen = 1
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
@@ -152,71 +162,71 @@ def main(base_url, args={}):
|
|||||||
# print("value not a dict")
|
# print("value not a dict")
|
||||||
|
|
||||||
except urllib.error.HTTPError as error:
|
except urllib.error.HTTPError as error:
|
||||||
print("Error code "+error.code)
|
print("Error code %s " % error.code)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print("starting wanlink:")
|
# print("starting wanlink:")
|
||||||
# print("the latency is {laten}".format(laten=latency))
|
# # print("the latency is {laten}".format(laten=latency))
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
|
# lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
|
||||||
lf_r.addPostData({
|
# lf_r.addPostData({
|
||||||
'test_mgr': 'all',
|
# 'test_mgr': 'all',
|
||||||
'cx_name': args['name'],
|
# 'cx_name': args['name'],
|
||||||
'cx_state': 'RUNNING'
|
# 'cx_state': 'RUNNING'
|
||||||
})
|
# })
|
||||||
lf_r.jsonPost()
|
# lf_r.jsonPost()
|
||||||
|
|
||||||
running = 0
|
running = 0
|
||||||
while (running < 1):
|
while running < 1:
|
||||||
sleep(1)
|
sleep(1)
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,state,_links")
|
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,state,_links")
|
||||||
try:
|
try:
|
||||||
json_response = lf_r.getAsJson()
|
json_response = lf_r.getAsJson()
|
||||||
if (json_response is None):
|
if json_response is None:
|
||||||
continue
|
continue
|
||||||
for key, value in json_response.items():
|
for key, value in json_response.items():
|
||||||
if (isinstance(value, dict)):
|
if isinstance(value, dict):
|
||||||
if ("_links" in value):
|
if "_links" in value:
|
||||||
if (value["name"] == args['name']):
|
if value["name"] == args['name']:
|
||||||
if (value["state"].startswith("Run")):
|
if value["state"].startswith("Run"):
|
||||||
LFUtils.debug_printer.pprint(json_response)
|
LFUtils.debug_printer.pprint(json_response)
|
||||||
running = 1
|
running = 1
|
||||||
|
|
||||||
except urllib.error.HTTPError as error:
|
except urllib.error.HTTPError as error:
|
||||||
print("Error code "+error.code)
|
print("Error code %s" % error.code)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print("Wanlink is running")
|
print("Wanlink is running")
|
||||||
|
|
||||||
# stop wanlink
|
# # stop wanlink
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
|
# lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
|
||||||
lf_r.addPostData({
|
# lf_r.addPostData({
|
||||||
'test_mgr': 'all',
|
# 'test_mgr': 'all',
|
||||||
'cx_name': args['name'],
|
# 'cx_name': args['name'],
|
||||||
'cx_state': 'STOPPED'
|
# 'cx_state': 'STOPPED'
|
||||||
})
|
# })
|
||||||
lf_r.jsonPost()
|
# lf_r.jsonPost()
|
||||||
running = 1
|
# running = 1
|
||||||
while (running > 0):
|
# while (running > 0):
|
||||||
sleep(1)
|
# sleep(1)
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,eid,state,_links")
|
# lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,eid,state,_links")
|
||||||
LFUtils.debug_printer.pprint(json_response)
|
# LFUtils.debug_printer.pprint(json_response)
|
||||||
try:
|
# try:
|
||||||
json_response = lf_r.getAsJson()
|
# json_response = lf_r.getAsJson()
|
||||||
if (json_response is None):
|
# if (json_response is None):
|
||||||
continue
|
# continue
|
||||||
for key, value in json_response.items():
|
# for key, value in json_response.items():
|
||||||
if (isinstance(value, dict)):
|
# if (isinstance(value, dict)):
|
||||||
if ("_links" in value):
|
# if ("_links" in value):
|
||||||
if (value["name"] == args['name']):
|
# if (value["name"] == args['name']):
|
||||||
if (value["state"].startswith("Stop")):
|
# if (value["state"].startswith("Stop")):
|
||||||
LFUtils.debug_printer.pprint(json_response)
|
# LFUtils.debug_printer.pprint(json_response)
|
||||||
running = 0
|
# running = 0
|
||||||
|
|
||||||
except urllib.error.HTTPError as error:
|
# except urllib.error.HTTPError as error:
|
||||||
print("Error code "+error.code)
|
# print("Error code "+error.code)
|
||||||
continue
|
# continue
|
||||||
|
|
||||||
print("Wanlink is stopped.")
|
# print("Wanlink is stopped.")
|
||||||
|
|
||||||
# print("Wanlink info:")
|
# print("Wanlink info:")
|
||||||
# lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1")
|
# lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1")
|
||||||
@@ -235,4 +245,67 @@ def main(base_url, args={}):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
parser = LFCliBase.create_basic_argparse(
|
||||||
|
prog='create_wanlink.py',
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter)
|
||||||
|
for group in parser._action_groups:
|
||||||
|
if group.title == "required arguments":
|
||||||
|
required_args = group
|
||||||
|
break
|
||||||
|
|
||||||
|
optional_args = None
|
||||||
|
for group in parser._action_groups:
|
||||||
|
if group.title == "optional arguments":
|
||||||
|
optional_args = group
|
||||||
|
break
|
||||||
|
if optional_args is not None:
|
||||||
|
optional_args.add_argument('--host', help='The resource IP address', default="localhost")
|
||||||
|
optional_args.add_argument('--port_A', help='Endpoint A', default="eth1")
|
||||||
|
optional_args.add_argument('--port_B', help='Endpoint B', default="eth2")
|
||||||
|
optional_args.add_argument('--name', help='The name of the wanlink', default="wl_eg1")
|
||||||
|
optional_args.add_argument('--rate', help='The maximum rate of transfer at both endpoints (bits/s)', default=1000000)
|
||||||
|
optional_args.add_argument('--rate_A', help='The max rate of transfer at endpoint A (bits/s)', default=None)
|
||||||
|
optional_args.add_argument('--rate_B', help='The maximum rate of transfer (bits/s)', default=None)
|
||||||
|
optional_args.add_argument('--latency', help='The delay of both ports', default=20)
|
||||||
|
optional_args.add_argument('--latency_A', help='The delay of port A', default=None)
|
||||||
|
optional_args.add_argument('--latency_B', help='The delay of port B', default=None)
|
||||||
|
optional_args.add_argument('--jitter', help='The max jitter of both ports (ms)', default=None)
|
||||||
|
optional_args.add_argument('--jitter_A', help='The max jitter of port A (ms)', default=None)
|
||||||
|
optional_args.add_argument('--jitter_B', help='The max jitter of port B (ms)', default=None)
|
||||||
|
optional_args.add_argument('--jitter_freq', help='The jitter frequency of both ports (%%)', default=None)
|
||||||
|
optional_args.add_argument('--jitter_freq_A', help='The jitter frequency of port A (%%)', default=None)
|
||||||
|
optional_args.add_argument('--jitter_freq_B', help='The jitter frequency of port B (%%)', default=None)
|
||||||
|
optional_args.add_argument('--drop', help='The drop frequency of both ports (%%)', default=None)
|
||||||
|
optional_args.add_argument('--drop_A', help='The drop frequency of port A (%%)', default=None)
|
||||||
|
optional_args.add_argument('--drop_B', help='The drop frequency of port B (%%)', default=None)
|
||||||
|
# todo: packet loss A and B
|
||||||
|
# todo: jitter A and B
|
||||||
|
for group in parser._action_groups:
|
||||||
|
if group.title == "optional arguments":
|
||||||
|
optional_args = group
|
||||||
|
break
|
||||||
|
parseargs = parser.parse_args()
|
||||||
|
args = {
|
||||||
|
"host": parseargs.mgr,
|
||||||
|
"port": parseargs.mgr_port,
|
||||||
|
"name": parseargs.name,
|
||||||
|
"port_A": parseargs.port_A,
|
||||||
|
"port_B": parseargs.port_B,
|
||||||
|
"latency": parseargs.latency,
|
||||||
|
"latency_A": (parseargs.latency_A if parseargs.latency_A is not None else parseargs.latency),
|
||||||
|
"latency_B": (parseargs.latency_B if parseargs.latency_B is not None else parseargs.latency),
|
||||||
|
"rate": parseargs.rate,
|
||||||
|
"rate_A": (parseargs.rate_A if parseargs.rate_A is not None else parseargs.rate),
|
||||||
|
"rate_B": (parseargs.rate_B if parseargs.rate_B is not None else parseargs.rate),
|
||||||
|
"jitter": parseargs.jitter,
|
||||||
|
"jitter_A": (parseargs.jitter_A if parseargs.jitter_A is not None else parseargs.jitter),
|
||||||
|
"jitter_B": (parseargs.jitter_B if parseargs.jitter_B is not None else parseargs.jitter),
|
||||||
|
"jitter_freq": parseargs.jitter,
|
||||||
|
"jitter_freq_A": (parseargs.jitter_freq_A if parseargs.jitter_freq_A is not None else parseargs.jitter_freq),
|
||||||
|
"jitter_freq_B": (parseargs.jitter_freq_B if parseargs.jitter_freq_B is not None else parseargs.jitter_freq),
|
||||||
|
"drop": parseargs.drop,
|
||||||
|
"drop_A": (parseargs.drop_A if parseargs.drop_A is not None else parseargs.drop),
|
||||||
|
"drop_B": (parseargs.drop_B if parseargs.drop_B is not None else parseargs.drop),
|
||||||
|
}
|
||||||
|
|
||||||
|
main(args)
|
||||||
|
|||||||
@@ -40,9 +40,12 @@ class cv_dut(LFCliBase):
|
|||||||
self.lan_port = "[BLANK]"
|
self.lan_port = "[BLANK]"
|
||||||
self.api_id = "0"
|
self.api_id = "0"
|
||||||
self.flags_mask = "NA"
|
self.flags_mask = "NA"
|
||||||
if desired_dut_flags is not None:
|
if desired_dut_flags:
|
||||||
self.dut_flags = desired_dut_flags
|
self.dut_flags = desired_dut_flags
|
||||||
self.dut_flags_mask = desired_dut_flags_mask
|
self.dut_flags_mask = desired_dut_flags_mask
|
||||||
|
if self.dut_flags:
|
||||||
|
self.flags = self.add_named_flags(self.dut_flags, add_dut_flags)
|
||||||
|
self.flags_mask = self.add_named_flags(self.dut_flags_mask, add_dut_flags)
|
||||||
|
|
||||||
def add_named_flags(self, desired_list, command_ref):
|
def add_named_flags(self, desired_list, command_ref):
|
||||||
if desired_list is None:
|
if desired_list is None:
|
||||||
@@ -80,11 +83,6 @@ class cv_dut(LFCliBase):
|
|||||||
top_left_x="NA",
|
top_left_x="NA",
|
||||||
top_left_y="NA",
|
top_left_y="NA",
|
||||||
):
|
):
|
||||||
try:
|
|
||||||
self.flags = self.add_named_flags(self.dut_flags, add_dut_flags)
|
|
||||||
self.flags_mask = self.add_named_flags(self.dut_flags_mask, add_dut_flags)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
response_json = []
|
response_json = []
|
||||||
req_url = "/cli-json/add_dut"
|
req_url = "/cli-json/add_dut"
|
||||||
data = {
|
data = {
|
||||||
@@ -129,13 +127,13 @@ class cv_dut(LFCliBase):
|
|||||||
ssid_flags=0,
|
ssid_flags=0,
|
||||||
ssid_flags_mask=0xFFFFFFFF):
|
ssid_flags_mask=0xFFFFFFFF):
|
||||||
req_url = "/cli-json/add_dut_ssid"
|
req_url = "/cli-json/add_dut_ssid"
|
||||||
print("name:" + dut_name,
|
print("name: %s" % dut_name,
|
||||||
"ssid_idx:" + ssid_idx,
|
"ssid_idx: %s" % ssid_idx,
|
||||||
"ssid:" + ssid,
|
"ssid: %s" % ssid,
|
||||||
"passwd:" + passwd,
|
"passwd: %s" % passwd,
|
||||||
"bssid:" + bssid,
|
"bssid: %s" % bssid,
|
||||||
"ssid_flags:" + str(ssid_flags),
|
"ssid_flags: %s" % ssid_flags,
|
||||||
"ssid_flags_mask:" + str(ssid_flags_mask))
|
"ssid_flags_mask: %s" % ssid_flags_mask)
|
||||||
|
|
||||||
self.json_post(req_url, {
|
self.json_post(req_url, {
|
||||||
"name": dut_name,
|
"name": dut_name,
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import importlib
|
|||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
import argparse
|
|
||||||
|
|
||||||
if sys.version_info[0] != 3:
|
if sys.version_info[0] != 3:
|
||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
@@ -56,9 +55,9 @@ def cv_add_base_parser(parser):
|
|||||||
parser.add_argument("-c", "--config_name", type=str, default="cv_dflt_cfg",
|
parser.add_argument("-c", "--config_name", type=str, default="cv_dflt_cfg",
|
||||||
help="Config file name")
|
help="Config file name")
|
||||||
|
|
||||||
parser.add_argument("-r", "--pull_report", default=False, action='store_true',
|
parser.add_argument("-r", "--pull_report", action='store_true',
|
||||||
help="pull reports from lanforge (by default: False)")
|
help="pull reports from lanforge (by default: False)")
|
||||||
parser.add_argument("--load_old_cfg", default=False, action='store_true',
|
parser.add_argument("--load_old_cfg", action='store_true',
|
||||||
help="Should we first load defaults from previous run of the capacity test? Default is False")
|
help="Should we first load defaults from previous run of the capacity test? Default is False")
|
||||||
|
|
||||||
parser.add_argument("--enable", action='append', nargs=1, default=[],
|
parser.add_argument("--enable", action='append', nargs=1, default=[],
|
||||||
@@ -86,7 +85,7 @@ class cv_test(Realm):
|
|||||||
def __init__(self,
|
def __init__(self,
|
||||||
lfclient_host="localhost",
|
lfclient_host="localhost",
|
||||||
lfclient_port=8080,
|
lfclient_port=8080,
|
||||||
lf_report_dir=""
|
lf_report_dir=None
|
||||||
):
|
):
|
||||||
super().__init__(lfclient_host=lfclient_host,
|
super().__init__(lfclient_host=lfclient_host,
|
||||||
lfclient_port=lfclient_port)
|
lfclient_port=lfclient_port)
|
||||||
@@ -105,7 +104,7 @@ class cv_test(Realm):
|
|||||||
|
|
||||||
print("adding- " + text + " " + "to test config")
|
print("adding- " + text + " " + "to test config")
|
||||||
|
|
||||||
rsp = self.json_post(req_url, data)
|
self.json_post(req_url, data)
|
||||||
# time.sleep(1)
|
# time.sleep(1)
|
||||||
|
|
||||||
# Tell LANforge GUI Chamber View to launch a test
|
# Tell LANforge GUI Chamber View to launch a test
|
||||||
@@ -138,24 +137,6 @@ class cv_test(Realm):
|
|||||||
cmd = "cv click '%s' Cancel" % instance
|
cmd = "cv click '%s' Cancel" % instance
|
||||||
self.run_cv_cmd(cmd)
|
self.run_cv_cmd(cmd)
|
||||||
|
|
||||||
# Send chamber view commands to the LANforge GUI
|
|
||||||
def run_cv_cmd(self, command):
|
|
||||||
response_json = []
|
|
||||||
req_url = "/gui-json/cmd"
|
|
||||||
data = {
|
|
||||||
"cmd": command
|
|
||||||
}
|
|
||||||
debug_par = ""
|
|
||||||
rsp = self.json_post("/gui-json/cmd%s" % debug_par, data, debug_=False, response_json_list_=response_json)
|
|
||||||
try:
|
|
||||||
if response_json[0]["LAST"]["warnings"].startswith("Unknown"):
|
|
||||||
print("Unknown command?\n");
|
|
||||||
pprint(response_json)
|
|
||||||
except:
|
|
||||||
# Ignore un-handled structs at this point, let calling code deal with it.
|
|
||||||
pass
|
|
||||||
return response_json
|
|
||||||
|
|
||||||
# For auto save report
|
# For auto save report
|
||||||
def auto_save_report(self, instance):
|
def auto_save_report(self, instance):
|
||||||
cmd = "cv click %s 'Auto Save Report'" % instance
|
cmd = "cv click %s 'Auto Save Report'" % instance
|
||||||
@@ -165,16 +146,6 @@ class cv_test(Realm):
|
|||||||
def get_report_location(self, instance):
|
def get_report_location(self, instance):
|
||||||
cmd = "cv get %s 'Report Location:'" % instance
|
cmd = "cv get %s 'Report Location:'" % instance
|
||||||
location = self.run_cv_cmd(cmd)
|
location = self.run_cv_cmd(cmd)
|
||||||
var = 1
|
|
||||||
while var != 0:
|
|
||||||
try:
|
|
||||||
data = json.dumps(location[0]["LAST"]["response"])
|
|
||||||
var = 0
|
|
||||||
except Exception as e:
|
|
||||||
var += 1
|
|
||||||
time.sleep(2)
|
|
||||||
if var > 5:
|
|
||||||
break
|
|
||||||
return location
|
return location
|
||||||
|
|
||||||
# To get if test is running or not
|
# To get if test is running or not
|
||||||
@@ -216,7 +187,7 @@ class cv_test(Realm):
|
|||||||
if self.get_exists(instance):
|
if self.get_exists(instance):
|
||||||
print("Waiting %i/60 for test instance: %s to be deleted." % (tries, instance))
|
print("Waiting %i/60 for test instance: %s to be deleted." % (tries, instance))
|
||||||
tries += 1
|
tries += 1
|
||||||
if (tries > 60):
|
if tries > 60:
|
||||||
break
|
break
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
else:
|
else:
|
||||||
@@ -226,9 +197,9 @@ class cv_test(Realm):
|
|||||||
tries = 0
|
tries = 0
|
||||||
while True:
|
while True:
|
||||||
if not self.get_cv_is_built():
|
if not self.get_cv_is_built():
|
||||||
print("Waiting %i/60 for Chamber-View to be built." % (tries))
|
print("Waiting %i/60 for Chamber-View to be built." % tries)
|
||||||
tries += 1
|
tries += 1
|
||||||
if (tries > 60):
|
if tries > 60:
|
||||||
break
|
break
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
else:
|
else:
|
||||||
@@ -258,17 +229,18 @@ class cv_test(Realm):
|
|||||||
"type": "Plugin-Settings",
|
"type": "Plugin-Settings",
|
||||||
"name": str(blob_test_name + config_name), # config name
|
"name": str(blob_test_name + config_name), # config name
|
||||||
}
|
}
|
||||||
rsp = self.json_post(req_url, data)
|
self.json_post(req_url, data)
|
||||||
|
|
||||||
def rm_cv_text_blob(self, type="Network-Connectivity", name=None):
|
def rm_cv_text_blob(self, cv_type="Network-Connectivity", name=None):
|
||||||
req_url = "/cli-json/rm_text_blob"
|
req_url = "/cli-json/rm_text_blob"
|
||||||
data = {
|
data = {
|
||||||
"type": type,
|
"type": cv_type,
|
||||||
"name": name, # config name
|
"name": name, # config name
|
||||||
}
|
}
|
||||||
rsp = self.json_post(req_url, data)
|
self.json_post(req_url, data)
|
||||||
|
|
||||||
def apply_cfg_options(self, cfg_options, enables, disables, raw_lines, raw_lines_file):
|
@staticmethod
|
||||||
|
def apply_cfg_options(cfg_options, enables, disables, raw_lines, raw_lines_file):
|
||||||
|
|
||||||
# Read in calibration data and whatever else.
|
# Read in calibration data and whatever else.
|
||||||
if raw_lines_file != "":
|
if raw_lines_file != "":
|
||||||
@@ -315,7 +287,7 @@ class cv_test(Realm):
|
|||||||
# cv_cmds: Array of raw chamber-view commands, such as "cv click 'button-name'"
|
# cv_cmds: Array of raw chamber-view commands, such as "cv click 'button-name'"
|
||||||
# These (and the sets) are applied after the test is created and before it is started.
|
# These (and the sets) are applied after the test is created and before it is started.
|
||||||
def create_and_run_test(self, load_old_cfg, test_name, instance_name, config_name, sets,
|
def create_and_run_test(self, load_old_cfg, test_name, instance_name, config_name, sets,
|
||||||
pull_report, lf_host, lf_user, lf_password, cv_cmds, local_lf_report_dir="", ssh_port=22,
|
pull_report, lf_host, lf_user, lf_password, cv_cmds, local_lf_report_dir=None, ssh_port=22,
|
||||||
graph_groups_file=None):
|
graph_groups_file=None):
|
||||||
load_old = "false"
|
load_old = "false"
|
||||||
if load_old_cfg:
|
if load_old_cfg:
|
||||||
@@ -327,7 +299,7 @@ class cv_test(Realm):
|
|||||||
if response[0]["LAST"]["response"] == "OK":
|
if response[0]["LAST"]["response"] == "OK":
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
print("Could not create test, try: %i/60:\n" % (start_try))
|
print("Could not create test, try: %i/60:\n" % start_try)
|
||||||
pprint(response)
|
pprint(response)
|
||||||
start_try += 1
|
start_try += 1
|
||||||
if start_try > 60:
|
if start_try > 60:
|
||||||
@@ -349,7 +321,7 @@ class cv_test(Realm):
|
|||||||
|
|
||||||
response = self.start_test(instance_name)
|
response = self.start_test(instance_name)
|
||||||
if response[0]["LAST"]["response"].__contains__("Could not find instance:"):
|
if response[0]["LAST"]["response"].__contains__("Could not find instance:"):
|
||||||
print("ERROR: start_test failed: ", response[0]["LAST"]["response"], "\n");
|
print("ERROR: start_test failed: ", response[0]["LAST"]["response"], "\n")
|
||||||
# pprint(response)
|
# pprint(response)
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
@@ -357,16 +329,12 @@ class cv_test(Realm):
|
|||||||
while True:
|
while True:
|
||||||
cmd = "cv get_and_close_dialog"
|
cmd = "cv get_and_close_dialog"
|
||||||
dialog = self.run_cv_cmd(cmd)
|
dialog = self.run_cv_cmd(cmd)
|
||||||
try:
|
if dialog[0]["LAST"]["response"] != "NO-DIALOG":
|
||||||
if dialog[0]["LAST"]["response"] != "NO-DIALOG":
|
print("Popup Dialog:\n")
|
||||||
print("Popup Dialog:\n")
|
print(dialog[0]["LAST"]["response"])
|
||||||
print(dialog[0]["LAST"]["response"])
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
check = self.get_report_location(instance_name)
|
check = self.get_report_location(instance_name)
|
||||||
location = json.dumps(check[0]["LAST"]["response"])
|
location = json.dumps(check[0]["LAST"]["response"])
|
||||||
|
|
||||||
if location != '\"Report Location:::\"':
|
if location != '\"Report Location:::\"':
|
||||||
print(location)
|
print(location)
|
||||||
location = location.replace('\"Report Location:::', '')
|
location = location.replace('\"Report Location:::', '')
|
||||||
@@ -385,25 +353,25 @@ class cv_test(Realm):
|
|||||||
self.lf_report_dir = location
|
self.lf_report_dir = location
|
||||||
if pull_report:
|
if pull_report:
|
||||||
try:
|
try:
|
||||||
print(lf_host)
|
print("Pulling report to directory: %s from %s@%s/%s" %
|
||||||
|
(local_lf_report_dir, lf_user, lf_host, location))
|
||||||
report.pull_reports(hostname=lf_host, username=lf_user, password=lf_password,
|
report.pull_reports(hostname=lf_host, username=lf_user, password=lf_password,
|
||||||
port=ssh_port, report_dir=local_lf_report_dir,
|
port=ssh_port, report_dir=local_lf_report_dir,
|
||||||
report_location=location)
|
report_location=location)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("SCP failed, user %s, password %s, dest %s", (lf_user, lf_password, lf_host))
|
print("SCP failed, user %s, password %s, dest %s" % (lf_user, lf_password, lf_host))
|
||||||
raise e # Exception("Could not find Reports")
|
raise e # Exception("Could not find Reports")
|
||||||
break
|
break
|
||||||
|
else:
|
||||||
|
print('Not reporting to kpi file')
|
||||||
|
|
||||||
# Of if test stopped for some reason and could not generate report.
|
# Of if test stopped for some reason and could not generate report.
|
||||||
try:
|
if not self.get_is_running(instance_name):
|
||||||
if not self.get_is_running(instance_name):
|
print("Detected test is not running.")
|
||||||
print("Detected test is not running.")
|
not_running += 1
|
||||||
not_running += 1
|
if not_running > 5:
|
||||||
if not_running > 5:
|
break
|
||||||
break
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
self.report_name = self.get_report_location(instance_name)
|
self.report_name = self.get_report_location(instance_name)
|
||||||
# Ensure test is closed and cleaned up
|
# Ensure test is closed and cleaned up
|
||||||
@@ -411,20 +379,17 @@ class cv_test(Realm):
|
|||||||
|
|
||||||
# Clean up any remaining popups.
|
# Clean up any remaining popups.
|
||||||
while True:
|
while True:
|
||||||
dialog = self.run_cv_cmd(cmd);
|
dialog = self.run_cv_cmd(cmd)
|
||||||
if dialog[0]["LAST"]["response"] != "NO-DIALOG":
|
if dialog[0]["LAST"]["response"] != "NO-DIALOG":
|
||||||
print("Popup Dialog:\n")
|
print("Popup Dialog:\n")
|
||||||
print(dialog[0]["LAST"]["response"])
|
print(dialog[0]["LAST"]["response"])
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
def a(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Takes cmd-line args struct or something that looks like it.
|
# Takes cmd-line args struct or something that looks like it.
|
||||||
# See csv_to_influx.py::influx_add_parser_args for options, or --help.
|
# See csv_to_influx.py::influx_add_parser_args for options, or --help.
|
||||||
def check_influx_kpi(self, args):
|
def check_influx_kpi(self, args):
|
||||||
if self.lf_report_dir == "":
|
if self.lf_report_dir is None:
|
||||||
# Nothing to report on.
|
# Nothing to report on.
|
||||||
print("Not submitting to influx, no report-dir.\n")
|
print("Not submitting to influx, no report-dir.\n")
|
||||||
return
|
return
|
||||||
@@ -446,12 +411,12 @@ class cv_test(Realm):
|
|||||||
|
|
||||||
# lf_wifi_capacity_test.py may be run / initiated by a remote system against a lanforge
|
# lf_wifi_capacity_test.py may be run / initiated by a remote system against a lanforge
|
||||||
# the local_lf_report_dir is where data is stored, if there is no local_lf_report_dir then the test is run directly on lanforge
|
# the local_lf_report_dir is where data is stored, if there is no local_lf_report_dir then the test is run directly on lanforge
|
||||||
if self.local_lf_report_dir == "":
|
if self.lf_report_dir:
|
||||||
csv_path = "%s/kpi.csv" % (self.lf_report_dir)
|
csv_path = "%s/kpi.csv" % self.lf_report_dir
|
||||||
else:
|
else:
|
||||||
kpi_location = self.local_lf_report_dir + "/" + os.path.basename(self.lf_report_dir)
|
kpi_location = self.lf_report_dir + "/" + os.path.basename(self.lf_report_dir)
|
||||||
# the local_lf_report_dir is the parent directory, need to get the directory name
|
# the lf_report_dir is the parent directory, need to get the directory name
|
||||||
csv_path = "%s/kpi.csv" % (kpi_location)
|
csv_path = "%s/kpi.csv" % kpi_location
|
||||||
|
|
||||||
print("Attempt to submit kpi: ", csv_path)
|
print("Attempt to submit kpi: ", csv_path)
|
||||||
print("Posting to influx...\n")
|
print("Posting to influx...\n")
|
||||||
@@ -483,7 +448,7 @@ class cv_test(Realm):
|
|||||||
"text": text_blob
|
"text": text_blob
|
||||||
}
|
}
|
||||||
|
|
||||||
rsp = self.json_post(req_url, data)
|
self.json_post(req_url, data)
|
||||||
|
|
||||||
def pass_raw_lines_to_cv(self,
|
def pass_raw_lines_to_cv(self,
|
||||||
scenario_name="Automation",
|
scenario_name="Automation",
|
||||||
@@ -494,7 +459,7 @@ class cv_test(Realm):
|
|||||||
"name": scenario_name,
|
"name": scenario_name,
|
||||||
"text": Rawline
|
"text": Rawline
|
||||||
}
|
}
|
||||||
rsp = self.json_post(req_url, data)
|
self.json_post(req_url, data)
|
||||||
|
|
||||||
# This is for chamber view buttons
|
# This is for chamber view buttons
|
||||||
|
|
||||||
@@ -520,18 +485,17 @@ class cv_test(Realm):
|
|||||||
def run_cv_cmd(self, command): # Send chamber view commands
|
def run_cv_cmd(self, command): # Send chamber view commands
|
||||||
response_json = []
|
response_json = []
|
||||||
req_url = "/gui-json/cmd"
|
req_url = "/gui-json/cmd"
|
||||||
data = {
|
data = {"cmd": command}
|
||||||
"cmd": command
|
self.json_post(req_url, data, debug_=False, response_json_list_=response_json)
|
||||||
}
|
|
||||||
rsp = self.json_post(req_url, data, debug_=False, response_json_list_=response_json)
|
|
||||||
return response_json
|
return response_json
|
||||||
|
|
||||||
def get_response_string(self, response):
|
@staticmethod
|
||||||
|
def get_response_string(response):
|
||||||
return response[0]["LAST"]["response"]
|
return response[0]["LAST"]["response"]
|
||||||
|
|
||||||
def get_popup_info_and_close(self):
|
def get_popup_info_and_close(self):
|
||||||
cmd = "cv get_and_close_dialog"
|
cmd = "cv get_and_close_dialog"
|
||||||
dialog = self.run_cv_cmd(cmd);
|
dialog = self.run_cv_cmd(cmd)
|
||||||
if dialog[0]["LAST"]["response"] != "NO-DIALOG":
|
if dialog[0]["LAST"]["response"] != "NO-DIALOG":
|
||||||
print("Popup Dialog:\n")
|
print("Popup Dialog:\n")
|
||||||
print(dialog[0]["LAST"]["response"])
|
print(dialog[0]["LAST"]["response"])
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
import paramiko
|
import paramiko
|
||||||
from scp import SCPClient
|
from scp import SCPClient
|
||||||
|
|
||||||
|
|
||||||
class lanforge_reports:
|
class lanforge_reports:
|
||||||
|
|
||||||
def pull_reports(self, hostname="localhost", port=22, username="lanforge", password="lanforge",
|
@staticmethod
|
||||||
|
def pull_reports(hostname="localhost", port=22, username="lanforge", password="lanforge",
|
||||||
report_location="/home/lanforge/html-reports/",
|
report_location="/home/lanforge/html-reports/",
|
||||||
report_dir="../../../reports/"):
|
report_dir="../../../reports/"):
|
||||||
ssh = paramiko.SSHClient()
|
ssh = paramiko.SSHClient()
|
||||||
@@ -14,4 +16,3 @@ class lanforge_reports:
|
|||||||
with SCPClient(ssh.get_transport()) as scp:
|
with SCPClient(ssh.get_transport()) as scp:
|
||||||
scp.get(remote_path=report_location, local_path=report_dir, recursive=True)
|
scp.get(remote_path=report_location, local_path=report_dir, recursive=True)
|
||||||
scp.close()
|
scp.close()
|
||||||
|
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ class DataPlaneTest(ChamberViewBase):
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
obj = DataPlaneTest(lfclient_host="localhost", lfclient_port=8080, debug_=True)
|
DataPlaneTest(lfclient_host="localhost", lfclient_port=8080, debug_=True)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
from pprint import pprint
|
from pprint import pformat
|
||||||
|
|
||||||
|
|
||||||
class BaseProfile:
|
class BaseProfile:
|
||||||
def __init__(self, local_realm, debug=False):
|
def __init__(self, local_realm, debug=False):
|
||||||
@@ -11,9 +12,8 @@ class BaseProfile:
|
|||||||
self.debug = debug or local_realm.debug
|
self.debug = debug or local_realm.debug
|
||||||
self.profiles = []
|
self.profiles = []
|
||||||
|
|
||||||
|
|
||||||
def json_get(self, _req_url, debug_=False):
|
def json_get(self, _req_url, debug_=False):
|
||||||
return self.parent_realm.json_get(_req_url, debug_=False)
|
return self.parent_realm.json_get(_req_url, debug_=debug_)
|
||||||
|
|
||||||
def json_post(self, req_url=None, data=None, debug_=False, suppress_related_commands_=None):
|
def json_post(self, req_url=None, data=None, debug_=False, suppress_related_commands_=None):
|
||||||
return self.parent_realm.json_post(_req_url=req_url,
|
return self.parent_realm.json_post(_req_url=req_url,
|
||||||
@@ -34,52 +34,56 @@ class BaseProfile:
|
|||||||
return self.parent_realm.rm_cx(cx_name)
|
return self.parent_realm.rm_cx(cx_name)
|
||||||
|
|
||||||
def rm_endp(self, ename, debug_=False, suppress_related_commands_=True):
|
def rm_endp(self, ename, debug_=False, suppress_related_commands_=True):
|
||||||
self.parent_realm.rm_endp(ename, debug_=False, suppress_related_commands_=True)
|
self.parent_realm.rm_endp(ename, debug_=debug_, suppress_related_commands_=suppress_related_commands_)
|
||||||
|
|
||||||
def name_to_eid(self, eid):
|
def name_to_eid(self, eid):
|
||||||
return self.parent_realm.name_to_eid(eid)
|
return self.parent_realm.name_to_eid(eid)
|
||||||
|
|
||||||
def set_endp_tos(self, ename, _tos, debug_=False, suppress_related_commands_=True):
|
def set_endp_tos(self, ename, _tos, debug_=False, suppress_related_commands_=True):
|
||||||
return self.parent_realm.set_endp_tos(ename, _tos, debug_=False, suppress_related_commands_=True)
|
return self.parent_realm.set_endp_tos(ename, _tos, debug_=debug_, suppress_related_commands_=suppress_related_commands_)
|
||||||
|
|
||||||
def wait_until_endps_appear(self, these_endp, debug=False):
|
def wait_until_endps_appear(self, these_endp, debug=False):
|
||||||
return self.parent_realm.wait_until_endps_appear(these_endp, debug=False)
|
return self.parent_realm.wait_until_endps_appear(these_endp, debug=debug)
|
||||||
|
|
||||||
def wait_until_cxs_appear(self, these_cx, debug=False):
|
def wait_until_cxs_appear(self, these_cx, debug=False):
|
||||||
return self.parent_realm.wait_until_cxs_appear(these_cx, debug=False)
|
return self.parent_realm.wait_until_cxs_appear(these_cx, debug=debug)
|
||||||
|
|
||||||
def logg(self, message=None, audit_list=None):
|
def logg(self, message=None, audit_list=None):
|
||||||
if audit_list is None:
|
if audit_list is None:
|
||||||
self.parent_realm.logg(message)
|
self.parent_realm.logg(message)
|
||||||
for item in audit_list:
|
for item in audit_list:
|
||||||
if (item is None):
|
if item is None:
|
||||||
continue
|
continue
|
||||||
message += ("\n" + pprint.pformat(item, indent=4))
|
message += ("\n" + pformat(item, indent=4))
|
||||||
self.parent_realm.logg(message)
|
self.parent_realm.logg(message)
|
||||||
|
|
||||||
def replace_special_char(self, str):
|
@staticmethod
|
||||||
return str.replace('+', ' ').replace('_', ' ').strip(' ')
|
def replace_special_char(original):
|
||||||
|
return original.replace('+', ' ').replace('_', ' ').strip(' ')
|
||||||
|
|
||||||
# @deprecate me
|
# @deprecate me
|
||||||
def get_milliseconds(self, timestamp):
|
@staticmethod
|
||||||
return (timestamp - datetime.datetime(1970,1,1)).total_seconds()*1000
|
def get_milliseconds(timestamp):
|
||||||
|
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds() * 1000
|
||||||
|
|
||||||
# @deprecate me
|
# @deprecate me
|
||||||
def get_seconds(self, timestamp):
|
@staticmethod
|
||||||
return (timestamp - datetime.datetime(1970,1,1)).total_seconds()
|
def get_seconds(timestamp):
|
||||||
|
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds()
|
||||||
|
|
||||||
def read_file(self, filename):
|
@staticmethod
|
||||||
|
def read_file(filename):
|
||||||
filename = open(filename, 'r')
|
filename = open(filename, 'r')
|
||||||
return [line.split(',') for line in filename.readlines()]
|
return [line.split(',') for line in filename.readlines()]
|
||||||
|
|
||||||
#Function to create random characters made of letters
|
# Function to create random characters made of letters
|
||||||
def random_chars(self, size, chars=None):
|
@staticmethod
|
||||||
|
def random_chars(size, chars=None):
|
||||||
if chars is None:
|
if chars is None:
|
||||||
chars = string.ascii_letters
|
chars = string.ascii_letters
|
||||||
return ''.join(random.choice(chars) for x in range(size))
|
return ''.join(random.choice(chars) for _ in range(size))
|
||||||
|
|
||||||
|
# --------------- create file path / find file path code - to be put into functions
|
||||||
#--------------- create file path / find file path code - to be put into functions
|
|
||||||
# #Find file path to save data/csv to:
|
# #Find file path to save data/csv to:
|
||||||
# if args.report_file is None:
|
# if args.report_file is None:
|
||||||
# new_file_path = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-h-%M-m-%S-s")).replace(':',
|
# new_file_path = str(datetime.datetime.now().strftime("%Y-%m-%d-%H-h-%M-m-%S-s")).replace(':',
|
||||||
@@ -100,7 +104,7 @@ class BaseProfile:
|
|||||||
# print('Not supporting this report format or cannot find report format provided. Defaulting to csv data file output type, naming it data.csv.')
|
# print('Not supporting this report format or cannot find report format provided. Defaulting to csv data file output type, naming it data.csv.')
|
||||||
# report_f = str(path) + '/data.csv'
|
# report_f = str(path) + '/data.csv'
|
||||||
# output = 'csv'
|
# output = 'csv'
|
||||||
|
|
||||||
# else:
|
# else:
|
||||||
# report_f = args.report_file
|
# report_f = args.report_file
|
||||||
# if args.output_format is None:
|
# if args.output_format is None:
|
||||||
@@ -118,5 +122,3 @@ class BaseProfile:
|
|||||||
# exit(1)
|
# exit(1)
|
||||||
# else:
|
# else:
|
||||||
# compared_rept=args.compared_report
|
# compared_rept=args.compared_report
|
||||||
|
|
||||||
|
|
||||||
@@ -611,9 +611,9 @@ class L3CXProfile2(BaseProfile):
|
|||||||
raise ValueError("L3CXProfile::monitor wants duration_sec > 1 second")
|
raise ValueError("L3CXProfile::monitor wants duration_sec > 1 second")
|
||||||
if (duration_sec <= monitor_interval_ms):
|
if (duration_sec <= monitor_interval_ms):
|
||||||
raise ValueError("L3CXProfile::monitor wants duration_sec > monitor_interval")
|
raise ValueError("L3CXProfile::monitor wants duration_sec > monitor_interval")
|
||||||
if report_file == None:
|
if report_file is None:
|
||||||
raise ValueError("Monitor requires an output file to be defined")
|
raise ValueError("Monitor requires an output file to be defined")
|
||||||
if created_cx == None:
|
if created_cx is None:
|
||||||
raise ValueError("Monitor needs a list of Layer 3 connections")
|
raise ValueError("Monitor needs a list of Layer 3 connections")
|
||||||
if (monitor_interval_ms is None) or (monitor_interval_ms < 1):
|
if (monitor_interval_ms is None) or (monitor_interval_ms < 1):
|
||||||
raise ValueError("L3CXProfile::monitor wants monitor_interval >= 1 second")
|
raise ValueError("L3CXProfile::monitor wants monitor_interval >= 1 second")
|
||||||
@@ -74,7 +74,7 @@ class VRProfile(BaseProfile):
|
|||||||
|
|
||||||
def vr_eid_to_url(self, eid_str=None, debug=False):
|
def vr_eid_to_url(self, eid_str=None, debug=False):
|
||||||
debug |= self.debug
|
debug |= self.debug
|
||||||
if (eid_str is None) or ("" == eid_str) or (eid_str.index(".") < 1):
|
if (eid_str is None) or (eid_str == "") or (eid_str.index(".") < 1):
|
||||||
raise ValueError("vr_eid_to_url cannot read eid[%s]" % eid_str)
|
raise ValueError("vr_eid_to_url cannot read eid[%s]" % eid_str)
|
||||||
hunks = eid_str.split(".")
|
hunks = eid_str.split(".")
|
||||||
if len(hunks) > 3:
|
if len(hunks) > 3:
|
||||||
@@ -111,7 +111,7 @@ class VRProfile(BaseProfile):
|
|||||||
resource=1,
|
resource=1,
|
||||||
debug=False):
|
debug=False):
|
||||||
debug |= self.debug
|
debug |= self.debug
|
||||||
if (resource is None) or (resource == 0) or ("" == resource):
|
if (resource is None) or (resource == 0) or (resource == ""):
|
||||||
raise ValueError("resource needs to be a number greater than 1")
|
raise ValueError("resource needs to be a number greater than 1")
|
||||||
|
|
||||||
router_map = self.router_list(resource=resource, debug=debug)
|
router_map = self.router_list(resource=resource, debug=debug)
|
||||||
@@ -334,9 +334,9 @@ class VRProfile(BaseProfile):
|
|||||||
:return: True if area is inside listed virtual router(s)
|
:return: True if area is inside listed virtual router(s)
|
||||||
"""
|
"""
|
||||||
debug |= self.debug
|
debug |= self.debug
|
||||||
if (resource is None) or (resource == 0) or ("" == resource):
|
if (resource is None) or (resource == 0) or (resource == ""):
|
||||||
raise ValueError("resource needs to be a number greater than 1")
|
raise ValueError("resource needs to be a number greater than 1")
|
||||||
if (vrcx_rect is None) or type(vrcx_rect ) or ("" == resource):
|
if (vrcx_rect is None) or type(vrcx_rect) or (resource == ""):
|
||||||
raise ValueError("resource needs to be a number greater than 1")
|
raise ValueError("resource needs to be a number greater than 1")
|
||||||
router_list = self.router_list(resource=resource, debug=debug)
|
router_list = self.router_list(resource=resource, debug=debug)
|
||||||
#router_list = self.json_get("/vr/1/%s/%s?fields=eid,x,y,height,width")
|
#router_list = self.json_get("/vr/1/%s/%s?fields=eid,x,y,height,width")
|
||||||
@@ -50,10 +50,10 @@ class DUTProfile(LFCliBase):
|
|||||||
self.append = []
|
self.append = []
|
||||||
|
|
||||||
def set_param(self, name, value):
|
def set_param(self, name, value):
|
||||||
if (name in self.__dict__):
|
if name in self.__dict__:
|
||||||
self.__dict__[name] = value
|
self.__dict__[name] = value
|
||||||
|
|
||||||
def create(self, name=None, param_=None, flags=None, flags_mask=None, notes=None):
|
def create(self, name=None, flags=None, flags_mask=None):
|
||||||
data = {}
|
data = {}
|
||||||
if (name is not None) and (name != ""):
|
if (name is not None) and (name != ""):
|
||||||
data["name"] = name
|
data["name"] = name
|
||||||
@@ -63,7 +63,7 @@ class DUTProfile(LFCliBase):
|
|||||||
raise ValueError("cannot create/update DUT record lacking a name")
|
raise ValueError("cannot create/update DUT record lacking a name")
|
||||||
|
|
||||||
for param in add_dut.dut_params:
|
for param in add_dut.dut_params:
|
||||||
if (param.name in self.__dict__):
|
if param.name in self.__dict__:
|
||||||
if (self.__dict__[param.name] is not None) \
|
if (self.__dict__[param.name] is not None) \
|
||||||
and (self.__dict__[param.name] != "NA"):
|
and (self.__dict__[param.name] != "NA"):
|
||||||
data[param.name] = self.__dict__[param.name]
|
data[param.name] = self.__dict__[param.name]
|
||||||
@@ -97,7 +97,6 @@ class DUTProfile(LFCliBase):
|
|||||||
"dut": self.name,
|
"dut": self.name,
|
||||||
"text": "[BLANK]"
|
"text": "[BLANK]"
|
||||||
}, self.debug)
|
}, self.debug)
|
||||||
notebytes = None
|
|
||||||
for line in self.notes:
|
for line in self.notes:
|
||||||
notebytes = base64.b64encode(line.encode('ascii'))
|
notebytes = base64.b64encode(line.encode('ascii'))
|
||||||
if self.debug:
|
if self.debug:
|
||||||
@@ -110,7 +109,6 @@ class DUTProfile(LFCliBase):
|
|||||||
"text-64": notebytes.decode('ascii')
|
"text-64": notebytes.decode('ascii')
|
||||||
}, self.debug)
|
}, self.debug)
|
||||||
if (self.append is not None) and (len(self.append) > 0):
|
if (self.append is not None) and (len(self.append) > 0):
|
||||||
notebytes = None
|
|
||||||
for line in self.append:
|
for line in self.append:
|
||||||
notebytes = base64.b64encode(line.encode('ascii'))
|
notebytes = base64.b64encode(line.encode('ascii'))
|
||||||
if self.debug:
|
if self.debug:
|
||||||
|
|||||||
@@ -121,7 +121,9 @@ class FIOEndpProfile(LFCliBase):
|
|||||||
self.json_post(req_url, data)
|
self.json_post(req_url, data)
|
||||||
# pprint(data)
|
# pprint(data)
|
||||||
|
|
||||||
def create(self, ports=[], connections_per_port=1, sleep_time=.5, debug_=False, suppress_related_commands_=None):
|
def create(self, ports=None, connections_per_port=1, sleep_time=.5, debug_=False, suppress_related_commands_=None):
|
||||||
|
if ports is None:
|
||||||
|
ports = []
|
||||||
cx_post_data = []
|
cx_post_data = []
|
||||||
for port_name in ports:
|
for port_name in ports:
|
||||||
for num_connection in range(connections_per_port):
|
for num_connection in range(connections_per_port):
|
||||||
@@ -173,8 +175,6 @@ class FIOEndpProfile(LFCliBase):
|
|||||||
self.local_realm.json_post("/cli-json/nc_show_endpoints", {"endpoint": "all"})
|
self.local_realm.json_post("/cli-json/nc_show_endpoints", {"endpoint": "all"})
|
||||||
for port_name in ports:
|
for port_name in ports:
|
||||||
for num_connection in range(connections_per_port):
|
for num_connection in range(connections_per_port):
|
||||||
shelf = self.local_realm.name_to_eid(port_name)[0]
|
|
||||||
resource = self.local_realm.name_to_eid(port_name)[1]
|
|
||||||
name = self.local_realm.name_to_eid(port_name)[2]
|
name = self.local_realm.name_to_eid(port_name)[2]
|
||||||
|
|
||||||
endp_data = {
|
endp_data = {
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import time
|
|||||||
import datetime
|
import datetime
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -49,7 +48,7 @@ class GenCXProfile(LFCliBase):
|
|||||||
if self.cmd == "":
|
if self.cmd == "":
|
||||||
raise ValueError("Please ensure cmd has been set correctly")
|
raise ValueError("Please ensure cmd has been set correctly")
|
||||||
elif self.type == "speedtest":
|
elif self.type == "speedtest":
|
||||||
self.cmd = "vrf_exec.bash %s speedtest-cli --json --share" % (sta_name)
|
self.cmd = "vrf_exec.bash %s speedtest-cli --json --share" % sta_name
|
||||||
elif self.type == "iperf3" and self.dest is not None:
|
elif self.type == "iperf3" and self.dest is not None:
|
||||||
self.cmd = "iperf3 --forceflush --format k --precision 4 -c %s -t 60 --tos 0 -b 1K --bind_dev %s -i 1 " \
|
self.cmd = "iperf3 --forceflush --format k --precision 4 -c %s -t 60 --tos 0 -b 1K --bind_dev %s -i 1 " \
|
||||||
"--pidfile /tmp/lf_helper_iperf3_%s.pid" % (self.dest, sta_name, gen_name)
|
"--pidfile /tmp/lf_helper_iperf3_%s.pid" % (self.dest, sta_name, gen_name)
|
||||||
@@ -132,7 +131,7 @@ class GenCXProfile(LFCliBase):
|
|||||||
if self.cmd == "":
|
if self.cmd == "":
|
||||||
raise ValueError("Please ensure cmd has been set correctly")
|
raise ValueError("Please ensure cmd has been set correctly")
|
||||||
elif self.type == "speedtest":
|
elif self.type == "speedtest":
|
||||||
self.cmd = "vrf_exec.bash %s speedtest-cli --json --share" % (sta_name)
|
self.cmd = "vrf_exec.bash %s speedtest-cli --json --share" % sta_name
|
||||||
elif self.type == "iperf3" and self.dest is not None:
|
elif self.type == "iperf3" and self.dest is not None:
|
||||||
self.cmd = "iperf3 --forceflush --format k --precision 4 -c %s -t 60 --tos 0 -b 1K --bind_dev %s -i 1 " \
|
self.cmd = "iperf3 --forceflush --format k --precision 4 -c %s -t 60 --tos 0 -b 1K --bind_dev %s -i 1 " \
|
||||||
"--pidfile /tmp/lf_helper_iperf3_test.pid" % (self.dest, sta_name)
|
"--pidfile /tmp/lf_helper_iperf3_test.pid" % (self.dest, sta_name)
|
||||||
@@ -176,7 +175,7 @@ class GenCXProfile(LFCliBase):
|
|||||||
resource = port_info[1]
|
resource = port_info[1]
|
||||||
shelf = port_info[0]
|
shelf = port_info[0]
|
||||||
name = port_info[2]
|
name = port_info[2]
|
||||||
except:
|
except ValueError:
|
||||||
raise ValueError("Unexpected name for port_name %s" % port_name)
|
raise ValueError("Unexpected name for port_name %s" % port_name)
|
||||||
|
|
||||||
# this naming convention follows what you see when you use
|
# this naming convention follows what you see when you use
|
||||||
@@ -278,7 +277,9 @@ class GenCXProfile(LFCliBase):
|
|||||||
})
|
})
|
||||||
time.sleep(sleep_time)
|
time.sleep(sleep_time)
|
||||||
|
|
||||||
def create(self, ports=[], sleep_time=.5, debug_=False, suppress_related_commands_=None):
|
def create(self, ports=None, sleep_time=.5, debug_=False, suppress_related_commands_=None):
|
||||||
|
if ports is None:
|
||||||
|
ports = []
|
||||||
if self.debug:
|
if self.debug:
|
||||||
debug_ = True
|
debug_ = True
|
||||||
post_data = []
|
post_data = []
|
||||||
@@ -396,12 +397,11 @@ class GenCXProfile(LFCliBase):
|
|||||||
gen_results = self.json_get("generic/list?fields=name,last+results", debug_=self.debug)
|
gen_results = self.json_get("generic/list?fields=name,last+results", debug_=self.debug)
|
||||||
if gen_results['endpoints'] is not None:
|
if gen_results['endpoints'] is not None:
|
||||||
pprint(gen_results['endpoints'])
|
pprint(gen_results['endpoints'])
|
||||||
#for name in gen_results['endpoints']:
|
# for name in gen_results['endpoints']:
|
||||||
# pprint(name.items)
|
# pprint(name.items)
|
||||||
#for k,v in name.items():
|
# for k,v in name.items():
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
def choose_speedtest_command(self):
|
def choose_speedtest_command(self):
|
||||||
gen_results = self.json_get("generic/list?fields=name,last+results", debug_=self.debug)
|
gen_results = self.json_get("generic/list?fields=name,last+results", debug_=self.debug)
|
||||||
if gen_results['endpoints'] is not None:
|
if gen_results['endpoints'] is not None:
|
||||||
@@ -412,13 +412,13 @@ class GenCXProfile(LFCliBase):
|
|||||||
if last_results['download'] is None and last_results['upload'] is None and last_results['ping'] is None:
|
if last_results['download'] is None and last_results['upload'] is None and last_results['ping'] is None:
|
||||||
return False, v['name']
|
return False, v['name']
|
||||||
elif last_results['download'] >= self.speedtest_min_dl and \
|
elif last_results['download'] >= self.speedtest_min_dl and \
|
||||||
last_results['upload'] >= self.speedtest_min_up and \
|
last_results['upload'] >= self.speedtest_min_up and \
|
||||||
last_results['ping'] <= self.speedtest_max_ping:
|
last_results['ping'] <= self.speedtest_max_ping:
|
||||||
return True, v['name']
|
return True, v['name']
|
||||||
|
|
||||||
def choose_generic_command(self):
|
def choose_generic_command(self):
|
||||||
gen_results = self.json_get("generic/list?fields=name,last+results", debug_=self.debug)
|
gen_results = self.json_get("generic/list?fields=name,last+results", debug_=self.debug)
|
||||||
if (gen_results['endpoints'] is not None):
|
if gen_results['endpoints'] is not None:
|
||||||
for name in gen_results['endpoints']:
|
for name in gen_results['endpoints']:
|
||||||
for k, v in name.items():
|
for k, v in name.items():
|
||||||
if v['name'] in self.created_endp and not v['name'].endswith('1'):
|
if v['name'] in self.created_endp and not v['name'].endswith('1'):
|
||||||
@@ -444,10 +444,10 @@ class GenCXProfile(LFCliBase):
|
|||||||
debug=False):
|
debug=False):
|
||||||
try:
|
try:
|
||||||
duration_sec = self.parse_time(duration_sec).seconds
|
duration_sec = self.parse_time(duration_sec).seconds
|
||||||
except:
|
except ValueError:
|
||||||
if (duration_sec is None) or (duration_sec <= 1):
|
if (duration_sec is None) or (duration_sec <= 1):
|
||||||
raise ValueError("GenCXProfile::monitor wants duration_sec > 1 second")
|
raise ValueError("GenCXProfile::monitor wants duration_sec > 1 second")
|
||||||
if (duration_sec <= monitor_interval_ms):
|
if duration_sec <= monitor_interval_ms:
|
||||||
raise ValueError("GenCXProfile::monitor wants duration_sec > monitor_interval")
|
raise ValueError("GenCXProfile::monitor wants duration_sec > monitor_interval")
|
||||||
if report_file is None:
|
if report_file is None:
|
||||||
raise ValueError("Monitor requires an output file to be defined")
|
raise ValueError("Monitor requires an output file to be defined")
|
||||||
@@ -602,7 +602,9 @@ class GenCXProfile(LFCliBase):
|
|||||||
exit(1)
|
exit(1)
|
||||||
# append compared df to created one
|
# append compared df to created one
|
||||||
if output_format.lower() != 'csv':
|
if output_format.lower() != 'csv':
|
||||||
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
|
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format,
|
||||||
|
save_path=report_file)
|
||||||
else:
|
else:
|
||||||
if output_format.lower() != 'csv':
|
if output_format.lower() != 'csv':
|
||||||
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format, save_path=report_file)
|
pandas_extensions.df_to_file(dataframe=pd.read_csv(report_file), output_f=output_format,
|
||||||
|
save_path=report_file)
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import os
|
|||||||
import importlib
|
import importlib
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
port_utils = importlib.import_module("py-json.port_utils")
|
port_utils = importlib.import_module("py-json.port_utils")
|
||||||
@@ -26,7 +25,8 @@ class HTTPProfile(LFCliBase):
|
|||||||
self.direction = "dl"
|
self.direction = "dl"
|
||||||
self.dest = "/dev/null"
|
self.dest = "/dev/null"
|
||||||
self.port_util = PortUtils(self.local_realm)
|
self.port_util = PortUtils(self.local_realm)
|
||||||
self.max_speed = 0 #infinity
|
self.max_speed = 0 # infinity
|
||||||
|
self.quiesce_after = 0 # infinity
|
||||||
|
|
||||||
def check_errors(self, debug=False):
|
def check_errors(self, debug=False):
|
||||||
fields_list = ["!conn", "acc.+denied", "bad-proto", "bad-url", "other-err", "total-err", "rslv-p", "rslv-h",
|
fields_list = ["!conn", "acc.+denied", "bad-proto", "bad-url", "other-err", "total-err", "rslv-p", "rslv-h",
|
||||||
@@ -91,7 +91,9 @@ class HTTPProfile(LFCliBase):
|
|||||||
self.json_post(req_url, data)
|
self.json_post(req_url, data)
|
||||||
# pprint(data)
|
# pprint(data)
|
||||||
|
|
||||||
def map_sta_ips(self, sta_list=[]):
|
def map_sta_ips(self, sta_list=None):
|
||||||
|
if sta_list is None:
|
||||||
|
sta_list = []
|
||||||
for sta_eid in sta_list:
|
for sta_eid in sta_list:
|
||||||
eid = self.local_realm.name_to_eid(sta_eid)
|
eid = self.local_realm.name_to_eid(sta_eid)
|
||||||
sta_list = self.json_get("/port/%s/%s/%s?fields=alias,ip" %
|
sta_list = self.json_get("/port/%s/%s/%s?fields=alias,ip" %
|
||||||
@@ -99,8 +101,11 @@ class HTTPProfile(LFCliBase):
|
|||||||
if sta_list['interface'] is not None:
|
if sta_list['interface'] is not None:
|
||||||
self.ip_map[sta_list['interface']['alias']] = sta_list['interface']['ip']
|
self.ip_map[sta_list['interface']['alias']] = sta_list['interface']['ip']
|
||||||
|
|
||||||
def create(self, ports=[], sleep_time=.5, debug_=False, suppress_related_commands_=None, http=False, ftp=False,
|
def create(self, ports=None, sleep_time=.5, debug_=False, suppress_related_commands_=None, http=False, ftp=False,
|
||||||
https=False, user=None, passwd=None, source=None, ftp_ip=None, upload_name=None, http_ip=None, https_ip=None):
|
https=False, user=None, passwd=None, source=None, ftp_ip=None, upload_name=None, http_ip=None,
|
||||||
|
https_ip=None):
|
||||||
|
if ports is None:
|
||||||
|
ports = []
|
||||||
cx_post_data = []
|
cx_post_data = []
|
||||||
self.map_sta_ips(ports)
|
self.map_sta_ips(ports)
|
||||||
print("Create CXs...")
|
print("Create CXs...")
|
||||||
@@ -121,7 +126,7 @@ class HTTPProfile(LFCliBase):
|
|||||||
resource = self.local_realm.name_to_eid(port_name)[1]
|
resource = self.local_realm.name_to_eid(port_name)[1]
|
||||||
name = self.local_realm.name_to_eid(port_name)[2]
|
name = self.local_realm.name_to_eid(port_name)[2]
|
||||||
|
|
||||||
if upload_name != None:
|
if upload_name is not None:
|
||||||
name = upload_name
|
name = upload_name
|
||||||
|
|
||||||
if http:
|
if http:
|
||||||
@@ -143,7 +148,7 @@ class HTTPProfile(LFCliBase):
|
|||||||
self.port_util.set_ftp(port_name=name, resource=resource, on=True)
|
self.port_util.set_ftp(port_name=name, resource=resource, on=True)
|
||||||
if user is not None and passwd is not None and source is not None:
|
if user is not None and passwd is not None and source is not None:
|
||||||
if ftp_ip is not None:
|
if ftp_ip is not None:
|
||||||
ip_addr=ftp_ip
|
ip_addr = ftp_ip
|
||||||
url = "%s ftp://%s:%s@%s%s %s" % (self.direction, user, passwd, ip_addr, source, self.dest)
|
url = "%s ftp://%s:%s@%s%s %s" % (self.direction, user, passwd, ip_addr, source, self.dest)
|
||||||
print("###### url:{}".format(url))
|
print("###### url:{}".format(url))
|
||||||
else:
|
else:
|
||||||
@@ -154,7 +159,7 @@ class HTTPProfile(LFCliBase):
|
|||||||
if (url is None) or (url == ""):
|
if (url is None) or (url == ""):
|
||||||
raise ValueError("HTTPProfile::create: url unset")
|
raise ValueError("HTTPProfile::create: url unset")
|
||||||
|
|
||||||
if upload_name ==None:
|
if upload_name is None:
|
||||||
endp_data = {
|
endp_data = {
|
||||||
"alias": name + "_l4",
|
"alias": name + "_l4",
|
||||||
"shelf": shelf,
|
"shelf": shelf,
|
||||||
@@ -164,7 +169,9 @@ class HTTPProfile(LFCliBase):
|
|||||||
"timeout": 10,
|
"timeout": 10,
|
||||||
"url_rate": self.requests_per_ten,
|
"url_rate": self.requests_per_ten,
|
||||||
"url": url,
|
"url": url,
|
||||||
"proxy_auth_type": 0x200
|
"proxy_auth_type": 0x200,
|
||||||
|
"quiesce_after": self.quiesce_after,
|
||||||
|
"max_speed": self.max_speed
|
||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
endp_data = {
|
endp_data = {
|
||||||
@@ -179,7 +186,8 @@ class HTTPProfile(LFCliBase):
|
|||||||
"ssl_cert_fname": "ca-bundle.crt",
|
"ssl_cert_fname": "ca-bundle.crt",
|
||||||
"proxy_port": 0,
|
"proxy_port": 0,
|
||||||
"max_speed": self.max_speed,
|
"max_speed": self.max_speed,
|
||||||
"proxy_auth_type": 0x200
|
"proxy_auth_type": 0x200,
|
||||||
|
"quiesce_after": self.quiesce_after
|
||||||
}
|
}
|
||||||
url = "cli-json/add_l4_endp"
|
url = "cli-json/add_l4_endp"
|
||||||
self.local_realm.json_post(url, endp_data, debug_=debug_,
|
self.local_realm.json_post(url, endp_data, debug_=debug_,
|
||||||
|
|||||||
@@ -2,18 +2,18 @@
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import importlib
|
import importlib
|
||||||
from pprint import pprint
|
import pprint
|
||||||
import csv
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
LFCliBase = lfcli_base.LFCliBase
|
LFCliBase = lfcli_base.LFCliBase
|
||||||
pandas_extensions = importlib.import_module("py-json.LANforge.pandas_extensions")
|
pandas_extensions = importlib.import_module("py-json.LANforge.pandas_extensions")
|
||||||
|
port_probe = importlib.import_module("py-json.port_probe")
|
||||||
|
ProbePort = port_probe.ProbePort
|
||||||
|
|
||||||
|
|
||||||
class L3CXProfile(LFCliBase):
|
class L3CXProfile(LFCliBase):
|
||||||
@@ -21,8 +21,8 @@ class L3CXProfile(LFCliBase):
|
|||||||
lfclient_host,
|
lfclient_host,
|
||||||
lfclient_port,
|
lfclient_port,
|
||||||
local_realm,
|
local_realm,
|
||||||
side_a_min_bps=None,
|
side_a_min_bps=256000,
|
||||||
side_b_min_bps=None,
|
side_b_min_bps=256000,
|
||||||
side_a_max_bps=0,
|
side_a_max_bps=0,
|
||||||
side_b_max_bps=0,
|
side_b_max_bps=0,
|
||||||
side_a_min_pdu=-1,
|
side_a_min_pdu=-1,
|
||||||
@@ -76,10 +76,10 @@ class L3CXProfile(LFCliBase):
|
|||||||
return self.created_cx.keys()
|
return self.created_cx.keys()
|
||||||
|
|
||||||
def get_cx_report(self):
|
def get_cx_report(self):
|
||||||
self.data = {}
|
data = dict()
|
||||||
for cx_name in self.get_cx_names():
|
for cx_name in self.get_cx_names():
|
||||||
self.data[cx_name] = self.json_get("/cx/" + cx_name).get(cx_name)
|
data[cx_name] = self.json_get("/cx/" + cx_name).get(cx_name)
|
||||||
return self.data
|
return data
|
||||||
|
|
||||||
def __get_rx_values(self):
|
def __get_rx_values(self):
|
||||||
cx_list = self.json_get("endp?fields=name,rx+bytes")
|
cx_list = self.json_get("endp?fields=name,rx+bytes")
|
||||||
@@ -95,7 +95,8 @@ class L3CXProfile(LFCliBase):
|
|||||||
cx_rx_map[item] = value_rx
|
cx_rx_map[item] = value_rx
|
||||||
return cx_rx_map
|
return cx_rx_map
|
||||||
|
|
||||||
def __compare_vals(self, old_list, new_list):
|
@staticmethod
|
||||||
|
def __compare_vals(old_list, new_list):
|
||||||
passes = 0
|
passes = 0
|
||||||
expected_passes = 0
|
expected_passes = 0
|
||||||
if len(old_list) == len(new_list):
|
if len(old_list) == len(new_list):
|
||||||
@@ -121,7 +122,6 @@ class L3CXProfile(LFCliBase):
|
|||||||
layer3_cols=None,
|
layer3_cols=None,
|
||||||
port_mgr_cols=None,
|
port_mgr_cols=None,
|
||||||
created_cx=None,
|
created_cx=None,
|
||||||
monitor=True,
|
|
||||||
report_file=None,
|
report_file=None,
|
||||||
systeminfopath=None,
|
systeminfopath=None,
|
||||||
output_format=None,
|
output_format=None,
|
||||||
@@ -129,24 +129,23 @@ class L3CXProfile(LFCliBase):
|
|||||||
arguments=None,
|
arguments=None,
|
||||||
compared_report=None,
|
compared_report=None,
|
||||||
debug=False):
|
debug=False):
|
||||||
try:
|
if duration_sec:
|
||||||
duration_sec = self.parse_time(duration_sec).seconds
|
duration_sec = self.parse_time(duration_sec).seconds
|
||||||
except:
|
else:
|
||||||
if (duration_sec is None) or (duration_sec <= 1):
|
raise ValueError("L3CXProfile::monitor wants duration_sec > 1 second")
|
||||||
raise ValueError("L3CXProfile::monitor wants duration_sec > 1 second")
|
if duration_sec <= monitor_interval_ms:
|
||||||
if (duration_sec <= monitor_interval_ms):
|
raise ValueError("L3CXProfile::monitor wants duration_sec > monitor_interval")
|
||||||
raise ValueError("L3CXProfile::monitor wants duration_sec > monitor_interval")
|
if report_file is None:
|
||||||
if report_file == None:
|
|
||||||
raise ValueError("Monitor requires an output file to be defined")
|
raise ValueError("Monitor requires an output file to be defined")
|
||||||
if systeminfopath == None:
|
if systeminfopath is None:
|
||||||
raise ValueError("Monitor requires a system info path to be defined")
|
raise ValueError("Monitor requires a system info path to be defined")
|
||||||
if created_cx == None:
|
if created_cx is None:
|
||||||
raise ValueError("Monitor needs a list of Layer 3 connections")
|
raise ValueError("Monitor needs a list of Layer 3 connections")
|
||||||
if (monitor_interval_ms is None) or (monitor_interval_ms < 1):
|
if (monitor_interval_ms is None) or (monitor_interval_ms < 1):
|
||||||
raise ValueError("L3CXProfile::monitor wants monitor_interval >= 1 second")
|
raise ValueError("L3CXProfile::monitor wants monitor_interval >= 1 second")
|
||||||
if layer3_cols is None:
|
if layer3_cols is None:
|
||||||
raise ValueError("L3CXProfile::monitor wants a list of column names to monitor")
|
raise ValueError("L3CXProfile::monitor wants a list of column names to monitor")
|
||||||
if output_format is not None:
|
if output_format:
|
||||||
if output_format.lower() != report_file.split('.')[-1]:
|
if output_format.lower() != report_file.split('.')[-1]:
|
||||||
raise ValueError('Filename %s has an extension that does not match output format %s .' % (
|
raise ValueError('Filename %s has an extension that does not match output format %s .' % (
|
||||||
report_file, output_format))
|
report_file, output_format))
|
||||||
@@ -163,13 +162,16 @@ class L3CXProfile(LFCliBase):
|
|||||||
layer3_fields = ",".join(layer3_cols)
|
layer3_fields = ",".join(layer3_cols)
|
||||||
default_cols = ['Timestamp', 'Timestamp milliseconds epoch', 'Timestamp seconds epoch', 'Duration elapsed']
|
default_cols = ['Timestamp', 'Timestamp milliseconds epoch', 'Timestamp seconds epoch', 'Duration elapsed']
|
||||||
default_cols.extend(layer3_cols)
|
default_cols.extend(layer3_cols)
|
||||||
if port_mgr_cols is not None:
|
# append alias to port_mgr_cols if not present needed later
|
||||||
|
if port_mgr_cols:
|
||||||
|
if 'alias' not in port_mgr_cols:
|
||||||
|
port_mgr_cols.append('alias')
|
||||||
|
|
||||||
|
if port_mgr_cols:
|
||||||
default_cols.extend(port_mgr_cols)
|
default_cols.extend(port_mgr_cols)
|
||||||
header_row = default_cols
|
header_row = default_cols
|
||||||
|
|
||||||
# csvwriter.writerow([systeminfo['VersionInfo']['BuildVersion'], script_name, str(arguments)])
|
if port_mgr_cols:
|
||||||
|
|
||||||
if port_mgr_cols is not None:
|
|
||||||
port_mgr_cols = [self.replace_special_char(x) for x in port_mgr_cols]
|
port_mgr_cols = [self.replace_special_char(x) for x in port_mgr_cols]
|
||||||
port_mgr_cols_labelled = []
|
port_mgr_cols_labelled = []
|
||||||
for col_name in port_mgr_cols:
|
for col_name in port_mgr_cols:
|
||||||
@@ -193,59 +195,25 @@ class L3CXProfile(LFCliBase):
|
|||||||
expected_passes = 0
|
expected_passes = 0
|
||||||
old_cx_rx_values = self.__get_rx_values()
|
old_cx_rx_values = self.__get_rx_values()
|
||||||
|
|
||||||
# instantiate csv file here, add specified column headers
|
|
||||||
csvfile = open(str(report_file), 'w')
|
|
||||||
csvwriter = csv.writer(csvfile, delimiter=",")
|
|
||||||
csvwriter.writerow(header_row)
|
|
||||||
|
|
||||||
# wait 10 seconds to get proper port data
|
# wait 10 seconds to get proper port data
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|
||||||
# for x in range(0,int(round(iterations,0))):
|
# for x in range(0,int(round(iterations,0))):
|
||||||
initial_starttime = datetime.datetime.now()
|
initial_starttime = datetime.datetime.now()
|
||||||
|
timestamp_data = list()
|
||||||
while datetime.datetime.now() < end_time:
|
while datetime.datetime.now() < end_time:
|
||||||
t = datetime.datetime.now()
|
t = datetime.datetime.now()
|
||||||
timestamp = t.strftime("%m/%d/%Y %I:%M:%S")
|
timestamp = t.strftime("%m/%d/%Y %I:%M:%S")
|
||||||
t_to_millisec_epoch = int(self.get_milliseconds(t))
|
t_to_millisec_epoch = int(self.get_milliseconds(t))
|
||||||
t_to_sec_epoch = int(self.get_seconds(t))
|
t_to_sec_epoch = int(self.get_seconds(t))
|
||||||
time_elapsed = int(self.get_seconds(t)) - int(self.get_seconds(initial_starttime))
|
time_elapsed = int(self.get_seconds(t)) - int(self.get_seconds(initial_starttime))
|
||||||
basecolumns = [timestamp, t_to_millisec_epoch, t_to_sec_epoch, time_elapsed]
|
stations = [station.split('.')[-1] for station in sta_list]
|
||||||
layer_3_response = self.json_get("/endp/%s?fields=%s" % (created_cx, layer3_fields))
|
stations = ','.join(stations)
|
||||||
if port_mgr_cols is not None:
|
|
||||||
port_mgr_response = self.json_get("/port/1/1/%s?fields=%s" % (sta_list, port_mgr_fields))
|
|
||||||
# get info from port manager with list of values from cx_a_side_list
|
|
||||||
if "endpoint" not in layer_3_response or layer_3_response is None:
|
|
||||||
print(layer_3_response)
|
|
||||||
raise ValueError("Cannot find columns requested to be searched. Exiting script, please retry.")
|
|
||||||
if debug:
|
|
||||||
print("Json layer_3_response from LANforge... " + str(layer_3_response))
|
|
||||||
if port_mgr_cols is not None:
|
|
||||||
if "interfaces" not in port_mgr_response or port_mgr_response is None:
|
|
||||||
print(port_mgr_response)
|
|
||||||
raise ValueError("Cannot find columns requested to be searched. Exiting script, please retry.")
|
|
||||||
if debug:
|
|
||||||
print("Json port_mgr_response from LANforge... " + str(port_mgr_response))
|
|
||||||
|
|
||||||
for endpoint in layer_3_response["endpoint"]: # each endpoint is a dictionary
|
if port_mgr_cols:
|
||||||
endp_values = list(endpoint.values())[0]
|
port_mgr_response = self.json_get("/port/1/1/%s?fields=%s" % (stations, port_mgr_fields))
|
||||||
temp_list = basecolumns
|
|
||||||
for columnname in header_row[len(basecolumns):]:
|
layer_3_response = self.json_get("/endp/%s?fields=%s" % (created_cx, layer3_fields))
|
||||||
temp_list.append(endp_values[columnname])
|
|
||||||
if port_mgr_cols is not None:
|
|
||||||
for sta_name in sta_list_edit:
|
|
||||||
if sta_name in current_sta:
|
|
||||||
for interface in port_mgr_response["interfaces"]:
|
|
||||||
if sta_name in list(interface.keys())[0]:
|
|
||||||
merge = temp_endp_values.copy()
|
|
||||||
# rename keys (separate port mgr 'rx bytes' from layer3 'rx bytes')
|
|
||||||
port_mgr_values_dict = list(interface.values())[0]
|
|
||||||
renamed_port_cols = {}
|
|
||||||
for key in port_mgr_values_dict.keys():
|
|
||||||
renamed_port_cols['port mgr - ' + key] = port_mgr_values_dict[key]
|
|
||||||
merge.update(renamed_port_cols)
|
|
||||||
for name in port_mgr_cols:
|
|
||||||
temp_list.append(merge[name])
|
|
||||||
csvwriter.writerow(temp_list)
|
|
||||||
|
|
||||||
new_cx_rx_values = self.__get_rx_values()
|
new_cx_rx_values = self.__get_rx_values()
|
||||||
if debug:
|
if debug:
|
||||||
@@ -258,38 +226,130 @@ class L3CXProfile(LFCliBase):
|
|||||||
passes += 1
|
passes += 1
|
||||||
else:
|
else:
|
||||||
self.fail("FAIL: Not all stations increased traffic")
|
self.fail("FAIL: Not all stations increased traffic")
|
||||||
self.exit_fail()
|
|
||||||
try:
|
|
||||||
cx_data = self.json_get("/cx/all")
|
|
||||||
cx_data.pop("handler")
|
|
||||||
cx_data.pop("uri")
|
|
||||||
|
|
||||||
for i in self.created_cx.keys():
|
result = dict() # create dataframe from layer 3 results
|
||||||
endp_a_data = self.json_get("/endp/"+ cx_data[i]['endpoints'][0])
|
if type(layer_3_response) is dict:
|
||||||
endp_b_data = self.json_get("/endp/" + cx_data[i]['endpoints'][1])
|
for dictionary in layer_3_response['endpoint']:
|
||||||
print("cx name:", i, "\n",
|
# if debug:
|
||||||
" bps tx a :", endp_a_data['endpoint']['tx rate'], " --> ",
|
print('layer_3_data: %s' % dictionary)
|
||||||
" bps rx b : ", endp_b_data['endpoint']['rx rate'],
|
result.update(dictionary)
|
||||||
" rx drop % b : ", cx_data[i]['rx drop % b'], "\n"
|
else:
|
||||||
" tx bytes a : ", endp_a_data['endpoint']['tx bytes'], " --> "
|
pass
|
||||||
" rx bytes b", endp_b_data['endpoint']['rx bytes'], "\n"
|
layer3 = pd.DataFrame(result.values())
|
||||||
" tx bytes b : ", endp_b_data['endpoint']['tx bytes'], " --> "
|
layer3.columns = ['l3-' + x for x in layer3.columns]
|
||||||
" rx bytes a", endp_a_data['endpoint']['rx bytes'], "\n"
|
|
||||||
" bps tx b :", endp_b_data['endpoint']['tx rate'], " --> "
|
if port_mgr_cols: # create dataframe from port mgr results
|
||||||
" bps rx a : ", endp_a_data['endpoint']['rx rate'],
|
result = dict()
|
||||||
" rx drop % a :", cx_data[i]['rx drop % a'], "\n"
|
if type(port_mgr_response) is dict:
|
||||||
" pkt rx a :", cx_data[i]['pkt rx a'], " pkt rx b : ", cx_data[i]['pkt rx b'],
|
print("port_mgr_response {pmr}".format(pmr=port_mgr_response))
|
||||||
)
|
if 'interfaces' in port_mgr_response:
|
||||||
print("\n\n\n")
|
for dictionary in port_mgr_response['interfaces']:
|
||||||
except Exception as e:
|
if debug:
|
||||||
print(e)
|
print('port mgr data: %s' % dictionary)
|
||||||
|
result.update(dictionary)
|
||||||
|
|
||||||
|
elif 'interface' in port_mgr_response:
|
||||||
|
dict_update = {port_mgr_response['interface']['alias']: port_mgr_response['interface']}
|
||||||
|
if debug:
|
||||||
|
print(dict_update)
|
||||||
|
result.update(dict_update)
|
||||||
|
if debug:
|
||||||
|
print(result)
|
||||||
|
else:
|
||||||
|
print('interfaces and interface not in port_mgr_response')
|
||||||
|
exit(1)
|
||||||
|
portdata_df = pd.DataFrame(result.values())
|
||||||
|
print("portdata_df {pd}".format(pd=portdata_df))
|
||||||
|
portdata_df.columns = ['port-' + x for x in portdata_df.columns]
|
||||||
|
portdata_df['alias'] = portdata_df['port-alias']
|
||||||
|
|
||||||
|
layer3_alias = list() # Add alias to layer 3 dataframe
|
||||||
|
for cross_connect in layer3['l3-name']:
|
||||||
|
for port in portdata_df['port-alias']:
|
||||||
|
if port in cross_connect:
|
||||||
|
layer3_alias.append(port)
|
||||||
|
if len(layer3_alias) == layer3.shape[0]:
|
||||||
|
layer3['alias'] = layer3_alias
|
||||||
|
else:
|
||||||
|
raise ValueError("The Stations or Connection on LANforge did not match expected, \
|
||||||
|
Check if LANForge initial state correct or delete/cleanup corrects")
|
||||||
|
|
||||||
|
timestamp_df = pd.merge(layer3, portdata_df, on='alias')
|
||||||
|
else:
|
||||||
|
timestamp_df = layer3
|
||||||
|
probe_port_df_list = list()
|
||||||
|
for station in sta_list:
|
||||||
|
probe_port = ProbePort(lfhost=self.lfclient_host,
|
||||||
|
lfport=self.lfclient_port,
|
||||||
|
eid_str=station,
|
||||||
|
debug=self.debug)
|
||||||
|
probe_results = dict()
|
||||||
|
probe_port.refreshProbe()
|
||||||
|
probe_results['Signal Avg Combined'] = probe_port.getSignalAvgCombined()
|
||||||
|
probe_results['Signal Avg per Chain'] = probe_port.getSignalAvgPerChain()
|
||||||
|
probe_results['Signal Combined'] = probe_port.getSignalCombined()
|
||||||
|
probe_results['Signal per Chain'] = probe_port.getSignalPerChain()
|
||||||
|
if 'Beacon Av Signal' in probe_results.keys():
|
||||||
|
probe_results['Beacon Avg Signal'] = probe_port.getBeaconSignalAvg()
|
||||||
|
else:
|
||||||
|
probe_results['Beacon Avg Signal'] = "0"
|
||||||
|
# probe_results['HE status'] = probe_port.he
|
||||||
|
probe_results['TX Bitrate'] = probe_port.tx_bitrate
|
||||||
|
probe_results['TX Mbps'] = probe_port.tx_mbit
|
||||||
|
probe_results['TX MCS ACTUAL'] = probe_port.tx_mcs
|
||||||
|
if probe_port.tx_mcs:
|
||||||
|
probe_results['TX MCS'] = int(probe_port.tx_mcs) % 8
|
||||||
|
else:
|
||||||
|
probe_results['TX MCS'] = probe_port.tx_mcs
|
||||||
|
probe_results['TX NSS'] = probe_port.tx_nss
|
||||||
|
probe_results['TX MHz'] = probe_port.tx_mhz
|
||||||
|
if probe_port.tx_gi:
|
||||||
|
probe_results['TX GI ns'] = (probe_port.tx_gi * 10**9)
|
||||||
|
else:
|
||||||
|
probe_results['TX GI ns'] = probe_port.tx_gi
|
||||||
|
probe_results['TX Mbps Calc'] = probe_port.tx_mbit_calc
|
||||||
|
probe_results['TX GI'] = probe_port.tx_gi
|
||||||
|
probe_results['TX Mbps short GI'] = probe_port.tx_data_rate_gi_short_Mbps
|
||||||
|
probe_results['TX Mbps long GI'] = probe_port.tx_data_rate_gi_long_Mbps
|
||||||
|
probe_results['RX Bitrate'] = probe_port.rx_bitrate
|
||||||
|
probe_results['RX Mbps'] = probe_port.rx_mbit
|
||||||
|
probe_results['RX MCS ACTUAL'] = probe_port.rx_mcs
|
||||||
|
if probe_port.rx_mcs:
|
||||||
|
probe_results['RX MCS'] = int(probe_port.rx_mcs) % 8
|
||||||
|
else:
|
||||||
|
probe_results['RX MCS'] = probe_port.rx_mcs
|
||||||
|
probe_results['RX NSS'] = probe_port.rx_nss
|
||||||
|
probe_results['RX MHz'] = probe_port.rx_mhz
|
||||||
|
if probe_port.rx_gi:
|
||||||
|
probe_results['RX GI ns'] = (probe_port.rx_gi * 10**9)
|
||||||
|
else:
|
||||||
|
probe_results['RX GI ns'] = probe_port.rx_gi
|
||||||
|
probe_results['RX Mbps Calc'] = probe_port.rx_mbit_calc
|
||||||
|
probe_results['RX GI'] = probe_port.rx_gi
|
||||||
|
probe_results['RX Mbps short GI'] = probe_port.rx_data_rate_gi_short_Mbps
|
||||||
|
probe_results['RX Mbps long GI'] = probe_port.rx_data_rate_gi_long_Mbps
|
||||||
|
|
||||||
|
probe_df_initial = pd.DataFrame(probe_results.values()).transpose()
|
||||||
|
probe_df_initial.columns = probe_results.keys()
|
||||||
|
probe_df_initial.columns = ['probe ' + x for x in probe_df_initial.columns]
|
||||||
|
probe_df_initial['alias'] = station.split('.')[-1]
|
||||||
|
probe_port_df_list.append(probe_df_initial)
|
||||||
|
probe_port_df = pd.concat(probe_port_df_list)
|
||||||
|
timestamp_df = pd.merge(timestamp_df, probe_port_df, on='alias')
|
||||||
|
timestamp_df['Timestamp'] = timestamp
|
||||||
|
timestamp_df['Timestamp milliseconds epoch'] = t_to_millisec_epoch
|
||||||
|
timestamp_df['Timestamp seconds epoch'] = t_to_sec_epoch
|
||||||
|
timestamp_df['Duration elapsed'] = time_elapsed
|
||||||
|
timestamp_data.append(timestamp_df)
|
||||||
time.sleep(monitor_interval_ms)
|
time.sleep(monitor_interval_ms)
|
||||||
csvfile.close()
|
df = pd.concat(timestamp_data)
|
||||||
|
df = df.drop('alias', axis=1)
|
||||||
|
df.to_csv(str(report_file), index=False)
|
||||||
|
|
||||||
# comparison to last report / report inputted
|
# comparison to last report / report inputted
|
||||||
if compared_report is not None:
|
if compared_report:
|
||||||
compared_df = pandas_extensions.compare_two_df(dataframe_one=pandas_extensions.file_to_df(report_file),
|
pandas_extensions.compare_two_df(dataframe_one=pandas_extensions.file_to_df(report_file),
|
||||||
dataframe_two=pandas_extensions.file_to_df(compared_report))
|
dataframe_two=pandas_extensions.file_to_df(compared_report))
|
||||||
exit(1)
|
exit(1)
|
||||||
# append compared df to created one
|
# append compared df to created one
|
||||||
if output_format.lower() != 'csv':
|
if output_format.lower() != 'csv':
|
||||||
@@ -310,7 +370,7 @@ class L3CXProfile(LFCliBase):
|
|||||||
print("Starting CXs...")
|
print("Starting CXs...")
|
||||||
for cx_name in self.created_cx.keys():
|
for cx_name in self.created_cx.keys():
|
||||||
if self.debug:
|
if self.debug:
|
||||||
print("cx-name: %s" % (cx_name))
|
print("cx-name: %s" % cx_name)
|
||||||
self.json_post("/cli-json/set_cx_state", {
|
self.json_post("/cli-json/set_cx_state", {
|
||||||
"test_mgr": "default_tm",
|
"test_mgr": "default_tm",
|
||||||
"cx_name": cx_name,
|
"cx_name": cx_name,
|
||||||
@@ -336,13 +396,13 @@ class L3CXProfile(LFCliBase):
|
|||||||
if len(self.created_cx) != 0:
|
if len(self.created_cx) != 0:
|
||||||
for cx_name in self.created_cx.keys():
|
for cx_name in self.created_cx.keys():
|
||||||
if self.debug:
|
if self.debug:
|
||||||
print("Cleaning cx: %s" % (cx_name))
|
print("Cleaning cx: %s" % cx_name)
|
||||||
self.local_realm.rm_cx(cx_name)
|
self.local_realm.rm_cx(cx_name)
|
||||||
|
|
||||||
for side in range(len(self.created_cx[cx_name])):
|
for side in range(len(self.created_cx[cx_name])):
|
||||||
ename = self.created_cx[cx_name][side]
|
ename = self.created_cx[cx_name][side]
|
||||||
if self.debug:
|
if self.debug:
|
||||||
print("Cleaning endpoint: %s" % (ename))
|
print("Cleaning endpoint: %s" % ename)
|
||||||
self.local_realm.rm_endp(self.created_cx[cx_name][side])
|
self.local_realm.rm_endp(self.created_cx[cx_name][side])
|
||||||
|
|
||||||
self.clean_cx_lists()
|
self.clean_cx_lists()
|
||||||
@@ -382,8 +442,6 @@ class L3CXProfile(LFCliBase):
|
|||||||
side_a_info = self.local_realm.name_to_eid(port_name, debug=debug_)
|
side_a_info = self.local_realm.name_to_eid(port_name, debug=debug_)
|
||||||
side_a_shelf = side_a_info[0]
|
side_a_shelf = side_a_info[0]
|
||||||
side_a_resource = side_a_info[1]
|
side_a_resource = side_a_info[1]
|
||||||
if port_name.find('.') < 0:
|
|
||||||
port_name = "%d.%s" % (side_a_info[1], port_name)
|
|
||||||
|
|
||||||
cx_name = "%s%s-%i" % (self.name_prefix, side_a_info[2], len(self.created_cx))
|
cx_name = "%s%s-%i" % (self.name_prefix, side_a_info[2], len(self.created_cx))
|
||||||
|
|
||||||
@@ -456,7 +514,7 @@ class L3CXProfile(LFCliBase):
|
|||||||
self.local_realm.json_post(url, data, debug_=debug_,
|
self.local_realm.json_post(url, data, debug_=debug_,
|
||||||
suppress_related_commands_=suppress_related_commands)
|
suppress_related_commands_=suppress_related_commands)
|
||||||
|
|
||||||
if tos != None:
|
if tos:
|
||||||
self.local_realm.set_endp_tos(endp_a_name, tos)
|
self.local_realm.set_endp_tos(endp_a_name, tos)
|
||||||
self.local_realm.set_endp_tos(endp_b_name, tos)
|
self.local_realm.set_endp_tos(endp_b_name, tos)
|
||||||
|
|
||||||
@@ -485,7 +543,6 @@ class L3CXProfile(LFCliBase):
|
|||||||
side_b_info = self.local_realm.name_to_eid(port_name, debug=debug_)
|
side_b_info = self.local_realm.name_to_eid(port_name, debug=debug_)
|
||||||
side_b_shelf = side_b_info[0]
|
side_b_shelf = side_b_info[0]
|
||||||
side_b_resource = side_b_info[1]
|
side_b_resource = side_b_info[1]
|
||||||
side_b_name = side_b_info[2]
|
|
||||||
|
|
||||||
cx_name = "%s%s-%i" % (self.name_prefix, port_name, len(self.created_cx))
|
cx_name = "%s%s-%i" % (self.name_prefix, port_name, len(self.created_cx))
|
||||||
endp_a_name = cx_name + "-A"
|
endp_a_name = cx_name + "-A"
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import time
|
|||||||
import datetime
|
import datetime
|
||||||
import ast
|
import ast
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -74,7 +73,8 @@ class L4CXProfile(LFCliBase):
|
|||||||
print(".", end='')
|
print(".", end='')
|
||||||
print("")
|
print("")
|
||||||
|
|
||||||
def compare_vals(self, old_list, new_list):
|
@staticmethod
|
||||||
|
def compare_vals(old_list, new_list):
|
||||||
passes = 0
|
passes = 0
|
||||||
expected_passes = 0
|
expected_passes = 0
|
||||||
if len(old_list) == len(new_list):
|
if len(old_list) == len(new_list):
|
||||||
@@ -139,10 +139,16 @@ class L4CXProfile(LFCliBase):
|
|||||||
self.json_post(req_url, data)
|
self.json_post(req_url, data)
|
||||||
# pprint(data)
|
# pprint(data)
|
||||||
|
|
||||||
def create(self, ports=[], sleep_time=.5, debug_=False, suppress_related_commands_=None):
|
def create(self, ports=None, sleep_time=.5, debug_=False, suppress_related_commands_=None):
|
||||||
|
if ports is None:
|
||||||
|
ports = []
|
||||||
cx_post_data = []
|
cx_post_data = []
|
||||||
for port_name in ports:
|
for port_name in ports:
|
||||||
print("port_name: {} len: {} self.local_realm.name_to_eid(port_name): {}".format(port_name, len(self.local_realm.name_to_eid(port_name)), self.local_realm.name_to_eid(port_name)))
|
print("port_name: {} len: {} self.local_realm.name_to_eid(port_name): {}".format(port_name,
|
||||||
|
len(self.local_realm.name_to_eid(
|
||||||
|
port_name)),
|
||||||
|
self.local_realm.name_to_eid(
|
||||||
|
port_name)))
|
||||||
shelf = self.local_realm.name_to_eid(port_name)[0]
|
shelf = self.local_realm.name_to_eid(port_name)[0]
|
||||||
resource = self.local_realm.name_to_eid(port_name)[1]
|
resource = self.local_realm.name_to_eid(port_name)[1]
|
||||||
name = self.local_realm.name_to_eid(port_name)[2]
|
name = self.local_realm.name_to_eid(port_name)[2]
|
||||||
@@ -189,16 +195,16 @@ class L4CXProfile(LFCliBase):
|
|||||||
arguments=None,
|
arguments=None,
|
||||||
iterations=0,
|
iterations=0,
|
||||||
debug=False):
|
debug=False):
|
||||||
try:
|
if duration_sec:
|
||||||
duration_sec = LFCliBase.parse_time(duration_sec).seconds
|
duration_sec = LFCliBase.parse_time(duration_sec).seconds
|
||||||
except:
|
else:
|
||||||
if (duration_sec is None) or (duration_sec <= 1):
|
if (duration_sec is None) or (duration_sec <= 1):
|
||||||
raise ValueError("L4CXProfile::monitor wants duration_sec > 1 second")
|
raise ValueError("L4CXProfile::monitor wants duration_sec > 1 second")
|
||||||
if (duration_sec <= monitor_interval):
|
if duration_sec <= monitor_interval:
|
||||||
raise ValueError("L4CXProfile::monitor wants duration_sec > monitor_interval")
|
raise ValueError("L4CXProfile::monitor wants duration_sec > monitor_interval")
|
||||||
if report_file == None:
|
if report_file is None:
|
||||||
raise ValueError("Monitor requires an output file to be defined")
|
raise ValueError("Monitor requires an output file to be defined")
|
||||||
if created_cx == None:
|
if created_cx is None:
|
||||||
raise ValueError("Monitor needs a list of Layer 4 connections")
|
raise ValueError("Monitor needs a list of Layer 4 connections")
|
||||||
if (monitor_interval is None) or (monitor_interval < 1):
|
if (monitor_interval is None) or (monitor_interval < 1):
|
||||||
raise ValueError("L4CXProfile::monitor wants monitor_interval >= 1 second")
|
raise ValueError("L4CXProfile::monitor wants monitor_interval >= 1 second")
|
||||||
@@ -211,20 +217,20 @@ class L4CXProfile(LFCliBase):
|
|||||||
# Step 1 - Assign column names
|
# Step 1 - Assign column names
|
||||||
|
|
||||||
if col_names is not None and len(col_names) > 0:
|
if col_names is not None and len(col_names) > 0:
|
||||||
header_row=col_names
|
header_row = col_names
|
||||||
else:
|
else:
|
||||||
header_row=list((list(self.json_get("/layer4/all")['endpoint'][0].values())[0].keys()))
|
header_row = list((list(self.json_get("/layer4/all")['endpoint'][0].values())[0].keys()))
|
||||||
if debug:
|
if debug:
|
||||||
print(header_row)
|
print(header_row)
|
||||||
|
|
||||||
# Step 2 - Monitor columns
|
# Step 2 - Monitor columns
|
||||||
start_time = datetime.datetime.now()
|
start_time = datetime.datetime.now()
|
||||||
end_time = start_time + datetime.timedelta(seconds=duration_sec)
|
end_time = start_time + datetime.timedelta(seconds=duration_sec)
|
||||||
sleep_interval = round(duration_sec // 5)
|
sleep_interval = round(duration_sec // 5)
|
||||||
if debug:
|
if debug:
|
||||||
print("Sleep_interval is %s ", sleep_interval)
|
print("Sleep_interval is %s ", sleep_interval)
|
||||||
print("Start time is %s " , start_time)
|
print("Start time is %s ", start_time)
|
||||||
print("End time is %s " ,end_time)
|
print("End time is %s ", end_time)
|
||||||
value_map = dict()
|
value_map = dict()
|
||||||
passes = 0
|
passes = 0
|
||||||
expected_passes = 0
|
expected_passes = 0
|
||||||
@@ -232,7 +238,7 @@ class L4CXProfile(LFCliBase):
|
|||||||
if self.test_type != 'urls':
|
if self.test_type != 'urls':
|
||||||
old_rx_values = self.get_bytes()
|
old_rx_values = self.get_bytes()
|
||||||
|
|
||||||
for test in range(1+iterations):
|
for test in range(1 + iterations):
|
||||||
while datetime.datetime.now() < end_time:
|
while datetime.datetime.now() < end_time:
|
||||||
if col_names is None:
|
if col_names is None:
|
||||||
response = self.json_get("/layer4/all")
|
response = self.json_get("/layer4/all")
|
||||||
@@ -276,10 +282,10 @@ class L4CXProfile(LFCliBase):
|
|||||||
|
|
||||||
print(value_map)
|
print(value_map)
|
||||||
|
|
||||||
#[further] post-processing data, after test completion
|
# [further] post-processing data, after test completion
|
||||||
full_test_data_list = []
|
full_test_data_list = []
|
||||||
for test_timestamp, data in value_map.items():
|
for test_timestamp, data in value_map.items():
|
||||||
#reduce the endpoint data to single dictionary of dictionaries
|
# reduce the endpoint data to single dictionary of dictionaries
|
||||||
for datum in data["endpoint"]:
|
for datum in data["endpoint"]:
|
||||||
for endpoint_data in datum.values():
|
for endpoint_data in datum.values():
|
||||||
if debug:
|
if debug:
|
||||||
@@ -287,19 +293,19 @@ class L4CXProfile(LFCliBase):
|
|||||||
endpoint_data["Timestamp"] = test_timestamp
|
endpoint_data["Timestamp"] = test_timestamp
|
||||||
full_test_data_list.append(endpoint_data)
|
full_test_data_list.append(endpoint_data)
|
||||||
|
|
||||||
|
|
||||||
header_row.append("Timestamp")
|
header_row.append("Timestamp")
|
||||||
header_row.append('Timestamp milliseconds')
|
header_row.append('Timestamp milliseconds')
|
||||||
df = pd.DataFrame(full_test_data_list)
|
df = pd.DataFrame(full_test_data_list)
|
||||||
|
|
||||||
df["Timestamp milliseconds"] = [self.get_milliseconds(x) for x in df["Timestamp"]]
|
df["Timestamp milliseconds"] = [self.get_milliseconds(x) for x in df["Timestamp"]]
|
||||||
#round entire column
|
# round entire column
|
||||||
df["Timestamp milliseconds"]=df["Timestamp milliseconds"].astype(int)
|
df["Timestamp milliseconds"] = df["Timestamp milliseconds"].astype(int)
|
||||||
df["Timestamp"]=df["Timestamp"].apply(lambda x:x.strftime("%m/%d/%Y %I:%M:%S"))
|
df["Timestamp"] = df["Timestamp"].apply(lambda x: x.strftime("%m/%d/%Y %I:%M:%S"))
|
||||||
df=df[["Timestamp","Timestamp milliseconds", *header_row[:-2]]]
|
df = df[["Timestamp", "Timestamp milliseconds", *header_row[:-2]]]
|
||||||
#compare previous data to current data
|
# compare previous data to current data
|
||||||
|
|
||||||
systeminfo = ast.literal_eval(requests.get('http://'+str(self.lfclient_host)+':'+str(self.lfclient_port)).text)
|
systeminfo = ast.literal_eval(
|
||||||
|
requests.get('http://' + str(self.lfclient_host) + ':' + str(self.lfclient_port)).text)
|
||||||
|
|
||||||
if output_format == 'hdf':
|
if output_format == 'hdf':
|
||||||
df.to_hdf(report_file, 'table', append=True)
|
df.to_hdf(report_file, 'table', append=True)
|
||||||
@@ -312,7 +318,7 @@ class L4CXProfile(LFCliBase):
|
|||||||
df.to_excel(report_file, index=False)
|
df.to_excel(report_file, index=False)
|
||||||
if output_format == 'df':
|
if output_format == 'df':
|
||||||
return df
|
return df
|
||||||
supported_formats = ['csv', 'json', 'stata', 'pickle','html']
|
supported_formats = ['csv', 'json', 'stata', 'pickle', 'html']
|
||||||
for x in supported_formats:
|
for x in supported_formats:
|
||||||
if output_format.lower() == x or report_file.split('.')[-1] == x:
|
if output_format.lower() == x or report_file.split('.')[-1] == x:
|
||||||
exec('df.to_' + x + '("'+report_file+'")')
|
exec('df.to_' + x + '("' + report_file + '")')
|
||||||
|
|||||||
@@ -27,15 +27,15 @@ class ChamberViewBase(LFCliBase):
|
|||||||
def remove_text_blobs(self):
|
def remove_text_blobs(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def add_text_blobs(self, type="", name="", data="", debug=False):
|
def add_text_blobs(self, text_type="", name="", data="", debug=False):
|
||||||
data = {'type': type,
|
data = {'type': text_type,
|
||||||
'name': name,
|
'name': name,
|
||||||
"text": data
|
"text": data
|
||||||
}
|
}
|
||||||
self.json_post("/cli-json/add_text_blob/", data, debug_=debug)
|
self.json_post("/cli-json/add_text_blob/", data, debug_=debug)
|
||||||
|
|
||||||
def get_text_blob(self, type="", name="", debug=False):
|
def get_text_blob(self, text_type="", name="", debug=False):
|
||||||
data = {'type': type,
|
data = {'type': text_type,
|
||||||
'name': name,
|
'name': name,
|
||||||
}
|
}
|
||||||
return self.json_post("/cli-json/show_text_blob/", data, debug_=debug)
|
return self.json_post("/cli-json/show_text_blob/", data, debug_=debug)
|
||||||
|
|||||||
@@ -4,16 +4,16 @@ import datetime
|
|||||||
|
|
||||||
# LFData class actions:
|
# LFData class actions:
|
||||||
# - Methods to collect data/store data (use from monitor instance) - used by Profile class.
|
# - Methods to collect data/store data (use from monitor instance) - used by Profile class.
|
||||||
# - file open/save
|
# - file open/save
|
||||||
# - save row (rolling) - to CSV (standard)
|
# - save row (rolling) - to CSV (standard)
|
||||||
# - headers
|
# - headers
|
||||||
# - file to data-storage-type conversion and vice versa (e.g. dataframe (or datatable) to file type and vice versa)
|
# - file to data-storage-type conversion and vice versa (e.g. dataframe (or datatable) to file type and vice versa)
|
||||||
# - other common util methods related to immediate data storage
|
# - other common util methods related to immediate data storage
|
||||||
# - include compression method
|
# - include compression method
|
||||||
# - monitoring truncates every 5 mins and sends to report? --- need clarification. truncate file and rewrite to same file?
|
# - monitoring truncates every 5 mins and sends to report? --- need clarification. truncate file and rewrite to same file?
|
||||||
# - large data collection use NFS share to NAS.
|
# - large data collection use NFS share to NAS.
|
||||||
# Websocket class actions:
|
# Websocket class actions:
|
||||||
#reading data from websockets
|
# reading data from websockets
|
||||||
|
|
||||||
class LFDataCollection:
|
class LFDataCollection:
|
||||||
def __init__(self, local_realm, debug=False):
|
def __init__(self, local_realm, debug=False):
|
||||||
@@ -22,70 +22,71 @@ class LFDataCollection:
|
|||||||
self.debug = debug or local_realm.debug
|
self.debug = debug or local_realm.debug
|
||||||
|
|
||||||
def json_get(self, _req_url, debug_=False):
|
def json_get(self, _req_url, debug_=False):
|
||||||
return self.parent_realm.json_get(_req_url, debug_=False)
|
return self.parent_realm.json_get(_req_url, debug_=debug_)
|
||||||
|
|
||||||
def check_json_validity(self, keyword=None, json_response=None):
|
@staticmethod
|
||||||
|
def check_json_validity(keyword=None, json_response=None):
|
||||||
if json_response is None:
|
if json_response is None:
|
||||||
raise ValueError("Cannot find columns requested to be searched in port manager. Exiting script, please retry.")
|
raise ValueError(
|
||||||
|
"Cannot find columns requested to be searched in port manager. Exiting script, please retry.")
|
||||||
if keyword is not None and keyword not in json_response:
|
if keyword is not None and keyword not in json_response:
|
||||||
raise ValueError("Cannot find proper information from json. Please check your json request. Exiting script, please retry.")
|
raise ValueError(
|
||||||
|
"Cannot find proper information from json. Please check your json request. Exiting script, please retry.")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_milliseconds(timestamp):
|
||||||
|
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds() * 1000
|
||||||
|
|
||||||
def get_milliseconds(self,
|
@staticmethod
|
||||||
timestamp):
|
def get_seconds(timestamp):
|
||||||
return (timestamp - datetime.datetime(1970,1,1)).total_seconds()*1000
|
return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds()
|
||||||
def get_seconds(self,
|
|
||||||
timestamp):
|
|
||||||
return (timestamp - datetime.datetime(1970,1,1)).total_seconds()
|
|
||||||
|
|
||||||
|
# only for ipv4_variable_time at the moment
|
||||||
|
def monitor_interval(self, header_row_=None,
|
||||||
|
start_time_=None, sta_list_=None,
|
||||||
|
created_cx_=None, layer3_fields_=None,
|
||||||
|
port_mgr_fields_=None):
|
||||||
|
|
||||||
#only for ipv4_variable_time at the moment
|
# time calculations for while loop and writing to csv
|
||||||
def monitor_interval(self, header_row_= None,
|
t = datetime.datetime.now()
|
||||||
start_time_= None, sta_list_= None,
|
timestamp = t.strftime("%m/%d/%Y %I:%M:%S")
|
||||||
created_cx_= None, layer3_fields_= None,
|
t_to_millisec_epoch = int(self.get_milliseconds(t))
|
||||||
port_mgr_fields_= None):
|
time_elapsed = int(self.get_seconds(t)) - int(self.get_seconds(start_time_))
|
||||||
|
|
||||||
#time calculations for while loop and writing to csv
|
# get responses from json
|
||||||
t = datetime.datetime.now()
|
layer_3_response = self.json_get("/endp/%s?fields=%s" % (created_cx_, layer3_fields_), debug_=self.debug)
|
||||||
timestamp= t.strftime("%m/%d/%Y %I:%M:%S")
|
if port_mgr_fields_ is not None:
|
||||||
t_to_millisec_epoch= int(self.get_milliseconds(t))
|
port_mgr_response = self.json_get("/port/1/1/%s?fields=%s" % (sta_list_, port_mgr_fields_),
|
||||||
time_elapsed=int(self.get_seconds(t))-int(self.get_seconds(start_time_))
|
debug_=self.debug)
|
||||||
|
|
||||||
#get responses from json
|
# check json response validity
|
||||||
layer_3_response = self.json_get("/endp/%s?fields=%s" % (created_cx_, layer3_fields_),debug_=self.debug)
|
self.check_json_validity(keyword="endpoint", json_response=layer_3_response)
|
||||||
|
self.check_json_validity(keyword="interfaces", json_response=port_mgr_response)
|
||||||
|
|
||||||
|
# dict manipulation
|
||||||
|
temp_list = []
|
||||||
|
for endpoint in layer_3_response["endpoint"]:
|
||||||
|
if self.debug:
|
||||||
|
print("Current endpoint values list... ")
|
||||||
|
print(list(endpoint.values())[0])
|
||||||
|
temp_endp_values = list(endpoint.values())[0] # dict
|
||||||
|
temp_list.extend([timestamp, t_to_millisec_epoch, time_elapsed])
|
||||||
|
current_sta = temp_endp_values['name']
|
||||||
|
merge = {}
|
||||||
if port_mgr_fields_ is not None:
|
if port_mgr_fields_ is not None:
|
||||||
port_mgr_response=self.json_get("/port/1/1/%s?fields=%s" % (sta_list_, port_mgr_fields_), debug_=self.debug)
|
for sta_name in sta_list_:
|
||||||
|
if sta_name in current_sta:
|
||||||
#check json response validity
|
for interface in port_mgr_response["interfaces"]:
|
||||||
self.check_json_validity(keyword="endpoint",json_response=layer_3_response)
|
if sta_name in list(interface.keys())[0]:
|
||||||
self.check_json_validity(keyword="interfaces",json_response=port_mgr_response)
|
merge = temp_endp_values.copy()
|
||||||
|
|
||||||
#dict manipulation
|
|
||||||
temp_list=[]
|
|
||||||
for endpoint in layer_3_response["endpoint"]:
|
|
||||||
if self.debug:
|
|
||||||
print("Current endpoint values list... ")
|
|
||||||
print(list(endpoint.values())[0])
|
|
||||||
temp_endp_values=list(endpoint.values())[0] #dict
|
|
||||||
temp_list.extend([timestamp,t_to_millisec_epoch,time_elapsed])
|
|
||||||
current_sta = temp_endp_values['name']
|
|
||||||
merge={}
|
|
||||||
if port_mgr_fields_ is not None:
|
|
||||||
for sta_name in sta_list_:
|
|
||||||
if sta_name in current_sta:
|
|
||||||
for interface in port_mgr_response["interfaces"]:
|
|
||||||
if sta_name in list(interface.keys())[0]:
|
|
||||||
merge=temp_endp_values.copy()
|
|
||||||
|
|
||||||
port_mgr_values_dict =list(interface.values())[0]
|
|
||||||
renamed_port_cols={}
|
|
||||||
for key in port_mgr_values_dict.keys():
|
|
||||||
renamed_port_cols['port mgr - ' +key]=port_mgr_values_dict[key]
|
|
||||||
merge.update(renamed_port_cols)
|
|
||||||
for name in header_row_[3:-3]:
|
|
||||||
temp_list.append(merge[name])
|
|
||||||
return temp_list
|
|
||||||
|
|
||||||
|
port_mgr_values_dict = list(interface.values())[0]
|
||||||
|
renamed_port_cols = {}
|
||||||
|
for key in port_mgr_values_dict.keys():
|
||||||
|
renamed_port_cols['port mgr - ' + key] = port_mgr_values_dict[key]
|
||||||
|
merge.update(renamed_port_cols)
|
||||||
|
for name in header_row_[3:-3]:
|
||||||
|
temp_list.append(merge[name])
|
||||||
|
return temp_list
|
||||||
|
|
||||||
#class WebSocket():
|
# class WebSocket():
|
||||||
|
|
||||||
|
|||||||
@@ -101,17 +101,17 @@ class MACVLANProfile(LFCliBase):
|
|||||||
pprint(set_port.set_port_current_flags)
|
pprint(set_port.set_port_current_flags)
|
||||||
pprint(set_port.set_port_interest_flags)
|
pprint(set_port.set_port_interest_flags)
|
||||||
return
|
return
|
||||||
if (param_name in set_port.set_port_cmd_flags):
|
if param_name in set_port.set_port_cmd_flags:
|
||||||
if (value == 1) and (param_name not in self.desired_set_port_cmd_flags):
|
if (value == 1) and (param_name not in self.desired_set_port_cmd_flags):
|
||||||
self.desired_set_port_cmd_flags.append(param_name)
|
self.desired_set_port_cmd_flags.append(param_name)
|
||||||
elif value == 0:
|
elif value == 0:
|
||||||
self.desired_set_port_cmd_flags.remove(param_name)
|
self.desired_set_port_cmd_flags.remove(param_name)
|
||||||
elif (param_name in set_port.set_port_current_flags):
|
elif param_name in set_port.set_port_current_flags:
|
||||||
if (value == 1) and (param_name not in self.desired_set_port_current_flags):
|
if (value == 1) and (param_name not in self.desired_set_port_current_flags):
|
||||||
self.desired_set_port_current_flags.append(param_name)
|
self.desired_set_port_current_flags.append(param_name)
|
||||||
elif value == 0:
|
elif value == 0:
|
||||||
self.desired_set_port_current_flags.remove(param_name)
|
self.desired_set_port_current_flags.remove(param_name)
|
||||||
elif (param_name in set_port.set_port_interest_flags):
|
elif param_name in set_port.set_port_interest_flags:
|
||||||
if (value == 1) and (param_name not in self.desired_set_port_interest_flags):
|
if (value == 1) and (param_name not in self.desired_set_port_interest_flags):
|
||||||
self.desired_set_port_interest_flags.append(param_name)
|
self.desired_set_port_interest_flags.append(param_name)
|
||||||
elif value == 0:
|
elif value == 0:
|
||||||
|
|||||||
@@ -2,9 +2,8 @@
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import importlib
|
import importlib
|
||||||
from pprint import pprint
|
import pprint
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -36,7 +35,7 @@ class MULTICASTProfile(LFCliBase):
|
|||||||
# Clean out our local lists, this by itself does NOT remove anything from LANforge manager.
|
# Clean out our local lists, this by itself does NOT remove anything from LANforge manager.
|
||||||
# but, if you are trying to modify existing connections, then clearing these arrays and
|
# but, if you are trying to modify existing connections, then clearing these arrays and
|
||||||
# re-calling 'create' will do the trick.
|
# re-calling 'create' will do the trick.
|
||||||
created_mc = {}
|
self.created_mc = {}
|
||||||
|
|
||||||
def get_mc_names(self):
|
def get_mc_names(self):
|
||||||
return self.created_mc.keys()
|
return self.created_mc.keys()
|
||||||
@@ -52,7 +51,7 @@ class MULTICASTProfile(LFCliBase):
|
|||||||
debug_ = True
|
debug_ = True
|
||||||
|
|
||||||
for endp_name in self.get_mc_names():
|
for endp_name in self.get_mc_names():
|
||||||
print("Starting mcast endpoint: %s" % (endp_name))
|
print("Starting mcast endpoint: %s" % endp_name)
|
||||||
json_data = {
|
json_data = {
|
||||||
"endp_name": endp_name
|
"endp_name": endp_name
|
||||||
}
|
}
|
||||||
@@ -78,15 +77,15 @@ class MULTICASTProfile(LFCliBase):
|
|||||||
def cleanup_prefix(self):
|
def cleanup_prefix(self):
|
||||||
self.local_realm.cleanup_cxe_prefix(self.name_prefix)
|
self.local_realm.cleanup_cxe_prefix(self.name_prefix)
|
||||||
|
|
||||||
def cleanup(self, suppress_related_commands=None, debug_ = False):
|
def cleanup(self, suppress_related_commands=None, debug_=False):
|
||||||
if self.debug:
|
if self.debug:
|
||||||
debug_ = True
|
debug_ = True
|
||||||
|
|
||||||
for endp_name in self.get_mc_names():
|
for endp_name in self.get_mc_names():
|
||||||
self.local_realm.rm_endp(endp_name, debug_=debug_, suppress_related_commands_=suppress_related_commands)
|
self.local_realm.rm_endp(endp_name, debug_=debug_, suppress_related_commands_=suppress_related_commands)
|
||||||
|
|
||||||
def create_mc_tx(self, endp_type, side_tx, mcast_group="224.9.9.9", mcast_dest_port=9999,
|
def create_mc_tx(self, endp_type, side_tx, mcast_group="224.9.9.9", mcast_dest_port=9999,
|
||||||
suppress_related_commands=None, debug_=False):
|
suppress_related_commands=None, debug_=False):
|
||||||
if self.debug:
|
if self.debug:
|
||||||
debug_ = True
|
debug_ = True
|
||||||
|
|
||||||
@@ -96,8 +95,6 @@ class MULTICASTProfile(LFCliBase):
|
|||||||
side_tx_port = side_tx_info[2]
|
side_tx_port = side_tx_info[2]
|
||||||
side_tx_name = "%smtx-%s-%i" % (self.name_prefix, side_tx_port, len(self.created_mc))
|
side_tx_name = "%smtx-%s-%i" % (self.name_prefix, side_tx_port, len(self.created_mc))
|
||||||
|
|
||||||
json_data = []
|
|
||||||
|
|
||||||
# add_endp mcast-xmit-sta 1 1 side_tx mc_udp -1 NO 4000000 0 NO 1472 0 INCREASING NO 32 0 0
|
# add_endp mcast-xmit-sta 1 1 side_tx mc_udp -1 NO 4000000 0 NO 1472 0 INCREASING NO 32 0 0
|
||||||
json_data = {
|
json_data = {
|
||||||
'alias': side_tx_name,
|
'alias': side_tx_name,
|
||||||
@@ -138,8 +135,8 @@ class MULTICASTProfile(LFCliBase):
|
|||||||
these_endp = [side_tx_name]
|
these_endp = [side_tx_name]
|
||||||
self.local_realm.wait_until_endps_appear(these_endp, debug=debug_)
|
self.local_realm.wait_until_endps_appear(these_endp, debug=debug_)
|
||||||
|
|
||||||
def create_mc_rx(self, endp_type, side_rx, mcast_group="224.9.9.9", mcast_dest_port=9999,
|
def create_mc_rx(self, endp_type, side_rx, mcast_group="224.9.9.9", mcast_dest_port=9999,
|
||||||
suppress_related_commands=None, debug_=False):
|
suppress_related_commands=None, debug_=False):
|
||||||
if self.debug:
|
if self.debug:
|
||||||
debug_ = True
|
debug_ = True
|
||||||
|
|
||||||
|
|||||||
@@ -203,7 +203,7 @@ def main():
|
|||||||
url = base_url+"/port/1/%s/list?fields=alias" % (resource_id)
|
url = base_url+"/port/1/%s/list?fields=alias" % (resource_id)
|
||||||
lf_r = LFRequest.LFRequest(url)
|
lf_r = LFRequest.LFRequest(url)
|
||||||
json_response = lf_r.getAsJson()
|
json_response = lf_r.getAsJson()
|
||||||
if json_response == None:
|
if json_response is None:
|
||||||
raise Exception("no reponse to: "+url)
|
raise Exception("no reponse to: "+url)
|
||||||
port_map = LFUtils.portListToAliasMap(json_response)
|
port_map = LFUtils.portListToAliasMap(json_response)
|
||||||
#LFUtils.debug_printer.pprint(port_map)
|
#LFUtils.debug_printer.pprint(port_map)
|
||||||
|
|||||||
@@ -136,7 +136,7 @@ def main():
|
|||||||
|
|
||||||
# Now lets do some cli-socket scripting
|
# Now lets do some cli-socket scripting
|
||||||
gui_telnet = pexpect.spawn('telnet %s %s'%(host, clisock))
|
gui_telnet = pexpect.spawn('telnet %s %s'%(host, clisock))
|
||||||
if (gui_telnet == None):
|
if gui_telnet is None:
|
||||||
print ("Unable to telnet to %s:%s"%(host,clisock));
|
print ("Unable to telnet to %s:%s"%(host,clisock));
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|||||||
708
py-json/port_probe.py
Normal file
708
py-json/port_probe.py
Normal file
@@ -0,0 +1,708 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import importlib
|
||||||
|
from time import sleep
|
||||||
|
# import pandas as pd
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
from pprint import pprint
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
|
LFCliBase = lfcli_base.LFCliBase
|
||||||
|
|
||||||
|
|
||||||
|
# Probe data can change frequently. It is recommended to update
|
||||||
|
|
||||||
|
class ProbePort(LFCliBase):
|
||||||
|
def __init__(self,
|
||||||
|
lfhost=None,
|
||||||
|
lfport='8080',
|
||||||
|
debug=False,
|
||||||
|
eid_str=None):
|
||||||
|
super().__init__(_lfjson_host=lfhost,
|
||||||
|
_lfjson_port=lfport,
|
||||||
|
_debug=debug)
|
||||||
|
hunks = eid_str.split(".")
|
||||||
|
self.eid_str = eid_str
|
||||||
|
self.probepath = "/probe/1/%s/%s" % (hunks[-2], hunks[-1])
|
||||||
|
self.response = None
|
||||||
|
self.signals = None
|
||||||
|
self.ofdma = False
|
||||||
|
|
||||||
|
self.tx_bitrate = None
|
||||||
|
self.tx_mcs = None
|
||||||
|
self.tx_nss = None
|
||||||
|
self.tx_mbit = None
|
||||||
|
self.tx_mhz = None
|
||||||
|
self.tx_gi = None
|
||||||
|
self.tx_duration = None
|
||||||
|
self.tx_mbit_calc = None
|
||||||
|
self.tx_data_rate_gi_short_Mbps = None
|
||||||
|
self.tx_data_rate_gi_long_Mbps = None
|
||||||
|
|
||||||
|
self.rx_bitrate = None
|
||||||
|
self.rx_mcs = None
|
||||||
|
self.rx_nss = None
|
||||||
|
self.rx_mbit = None
|
||||||
|
self.rx_mhz = None
|
||||||
|
self.rx_gi = None
|
||||||
|
self.rx_duration = None
|
||||||
|
self.rx_mbit_calc = None
|
||||||
|
self.rx_data_rate_gi_short_Mbps = None
|
||||||
|
self.rx_data_rate_gi_long_Mbps = None
|
||||||
|
|
||||||
|
self.data_rate = None
|
||||||
|
# folder = os.path.dirname(__file__)
|
||||||
|
|
||||||
|
def refreshProbe(self):
|
||||||
|
self.json_post(self.probepath, {})
|
||||||
|
sleep(0.2)
|
||||||
|
response = self.json_get(self.probepath)
|
||||||
|
self.response = response
|
||||||
|
if self.debug:
|
||||||
|
print("probepath (eid): {probepath}".format(probepath=self.probepath))
|
||||||
|
pprint("Probe response: {response}".format(response=self.response))
|
||||||
|
text = self.response['probe-results'][0][self.eid_str]['probe results'].split('\n')
|
||||||
|
signals = [x.strip('\t').split('\t') for x in text if 'signal' in x]
|
||||||
|
keys = [x[0].strip(' ').strip(':') for x in signals]
|
||||||
|
values = [x[1].strip('dBm').strip(' ') for x in signals]
|
||||||
|
# if self.debug:
|
||||||
|
print("signals keys: {keys}".format(keys=keys))
|
||||||
|
print("signals values: {values}".format(values=values))
|
||||||
|
self.signals = dict(zip(keys, values))
|
||||||
|
|
||||||
|
tx_bitrate = [x for x in text if 'tx bitrate' in x][0].replace('\t', ' ')
|
||||||
|
# if 'HE' in tx_bitrate:
|
||||||
|
# print("HE not supported ")
|
||||||
|
print("tx_bitrate {tx_bitrate}".format(tx_bitrate=tx_bitrate))
|
||||||
|
self.tx_bitrate = tx_bitrate.split(':')[-1].strip(' ')
|
||||||
|
if 'MHz' in tx_bitrate:
|
||||||
|
self.tx_mhz = [x.strip('\t') for x in text if 'tx bitrate' in x][0].split('MHz')[0].rsplit(' ')[-1].strip(
|
||||||
|
' ')
|
||||||
|
print("tx_mhz {tx_mhz}".format(tx_mhz=self.tx_mhz))
|
||||||
|
else:
|
||||||
|
self.tx_mhz = 20
|
||||||
|
print("HT: tx_mhz {tx_mhz}".format(tx_mhz=self.tx_mhz))
|
||||||
|
tx_mcs = [x.strip('\t') for x in text if 'tx bitrate' in x][0].split(':')[1].strip('\t')
|
||||||
|
if 'MCS' in tx_mcs:
|
||||||
|
self.tx_mcs = int(tx_mcs.split('MCS')[1].strip(' ').split(' ')[0])
|
||||||
|
print("self.tx_mcs {tx_mcs}".format(tx_mcs=self.tx_mcs))
|
||||||
|
if 'NSS' in text:
|
||||||
|
self.tx_nss = [x.strip('\t') for x in text if 'tx bitrate' in x][0].split('NSS')[1].strip(' ')
|
||||||
|
else:
|
||||||
|
# nss is not present need to derive from MCS for HT
|
||||||
|
if 0 <= self.tx_mcs <= 7:
|
||||||
|
self.tx_nss = 1
|
||||||
|
elif 8 <= self.tx_mcs <= 15:
|
||||||
|
self.tx_nss = 2
|
||||||
|
elif 16 <= self.tx_mcs <= 23:
|
||||||
|
self.tx_nss = 3
|
||||||
|
elif 24 <= self.tx_mcs <= 31:
|
||||||
|
self.tx_nss = 4
|
||||||
|
print("tx_nss {tx_nss}".format(tx_nss=self.tx_nss))
|
||||||
|
self.tx_mbit = float(self.tx_bitrate.split(' ')[0])
|
||||||
|
print("tx_mbit {tx_mbit}".format(tx_mbit=self.tx_mbit))
|
||||||
|
if 'HE' in tx_bitrate:
|
||||||
|
self.calculated_data_rate_tx_HE()
|
||||||
|
elif 'VHT' in tx_bitrate:
|
||||||
|
self.calculated_data_rate_tx_VHT()
|
||||||
|
else:
|
||||||
|
self.calculated_data_rate_tx_HT()
|
||||||
|
else:
|
||||||
|
print("No tx MCS value:{tx_bitrate}".format(tx_bitrate=tx_bitrate))
|
||||||
|
|
||||||
|
rx_bitrate = [x for x in text if 'rx bitrate' in x][0].replace('\t', ' ')
|
||||||
|
print("rx_bitrate {rx_bitrate}".format(rx_bitrate=rx_bitrate))
|
||||||
|
self.rx_bitrate = rx_bitrate.split(':')[-1].strip(' ')
|
||||||
|
print("self.rx_bitrate {rx_bitrate}".format(rx_bitrate=self.rx_bitrate))
|
||||||
|
# rx will received : 6Mbps encoding is legacy frame
|
||||||
|
# for 24g - MHz is 20
|
||||||
|
# try:
|
||||||
|
if 'MHz' in rx_bitrate:
|
||||||
|
self.rx_mhz = [x.strip('\t') for x in text if 'rx bitrate' in x][0].split('MHz')[0].rsplit(' ')[
|
||||||
|
-1].strip(' ')
|
||||||
|
print("rx_mhz {rx_mhz}".format(rx_mhz=self.rx_mhz))
|
||||||
|
else:
|
||||||
|
self.rx_mhz = 20
|
||||||
|
|
||||||
|
rx_mcs = [x.strip('\t') for x in text if 'rx bitrate' in x][0].split(':')[1].strip('\t')
|
||||||
|
# MCS is not in the 6.0MBit/s frame
|
||||||
|
if 'MCS' in rx_mcs:
|
||||||
|
self.rx_mcs = int(rx_mcs.split('MCS')[1].strip(' ').split(' ')[0])
|
||||||
|
print("self.rx_mcs {rx_mcs}".format(rx_mcs=self.rx_mcs))
|
||||||
|
if 'NSS' in text:
|
||||||
|
self.rx_nss = [x.strip('\t') for x in text if 'rx bitrate' in x][0].split('NSS')[1].strip(' ')
|
||||||
|
else:
|
||||||
|
# nss is not present need to derive from MCS for HT
|
||||||
|
if 0 <= self.rx_mcs <= 7:
|
||||||
|
self.rx_nss = 1
|
||||||
|
elif 8 <= self.rx_mcs <= 15:
|
||||||
|
self.rx_nss = 2
|
||||||
|
elif 16 <= self.rx_mcs <= 23:
|
||||||
|
self.rx_nss = 3
|
||||||
|
elif 24 <= self.rx_mcs <= 31:
|
||||||
|
self.rx_nss = 4
|
||||||
|
|
||||||
|
self.rx_mbit = self.rx_bitrate.split(' ')[0]
|
||||||
|
print("rx_nss {rx_nss}".format(rx_nss=self.rx_nss))
|
||||||
|
self.rx_mbit = float(self.rx_bitrate.split(' ')[0])
|
||||||
|
print("rx_mbit {rx_mbit}".format(rx_mbit=self.rx_mbit))
|
||||||
|
if 'HE' in rx_bitrate:
|
||||||
|
self.calculated_data_rate_rx_HE()
|
||||||
|
elif 'VHT' in rx_bitrate:
|
||||||
|
self.calculated_data_rate_rx_VHT()
|
||||||
|
else:
|
||||||
|
self.calculated_data_rate_rx_HT()
|
||||||
|
else:
|
||||||
|
print("No rx MCS value:{rx_bitrate}".format(rx_bitrate=rx_bitrate))
|
||||||
|
|
||||||
|
def getSignalAvgCombined(self):
|
||||||
|
return self.signals['signal avg'].split(' ')[0]
|
||||||
|
|
||||||
|
def getSignalAvgPerChain(self):
|
||||||
|
return ' '.join(self.signals['signal avg'].split(' ')[1:])
|
||||||
|
|
||||||
|
def getSignalCombined(self):
|
||||||
|
return self.signals['signal'].split(' ')[0]
|
||||||
|
|
||||||
|
def getSignalPerChain(self):
|
||||||
|
return ' '.join(self.signals['signal'].split(' ')[1:])
|
||||||
|
|
||||||
|
def getBeaconSignalAvg(self):
|
||||||
|
return ' '.join(self.signals['beacon signal avg']).replace(' ', '')
|
||||||
|
|
||||||
|
def calculated_data_rate_tx_HT(self):
|
||||||
|
print("calculated_data_rate_tx_HT")
|
||||||
|
# TODO compare with standard for 40 MHz if values change
|
||||||
|
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
|
||||||
|
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
|
||||||
|
R = 0 # coding , (Determined by the modulation, MCS )
|
||||||
|
N_ss = 0 # Number of Spatial Streams
|
||||||
|
T_dft = 3.2 * 10 ** -6 # Constant for HT
|
||||||
|
T_gi_short = .4 * 10 ** -6 # Guard index.
|
||||||
|
T_gi_long = .8 * 10 ** -6 # Guard index.
|
||||||
|
bw = 20
|
||||||
|
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
|
||||||
|
# the nubmer of Data Subcarriers is based on modulation and bandwith
|
||||||
|
bw = int(self.tx_mhz)
|
||||||
|
print("Mhz {Mhz}".format(Mhz=self.tx_mhz))
|
||||||
|
if bw == 20:
|
||||||
|
N_sd = 52
|
||||||
|
elif bw == 40:
|
||||||
|
N_sd = 108
|
||||||
|
elif bw == 80:
|
||||||
|
N_sd = 234
|
||||||
|
elif bw == 160:
|
||||||
|
N_sd = 468
|
||||||
|
else:
|
||||||
|
print("For HT if cannot be read bw is assumed to be 20")
|
||||||
|
N_sd = 52
|
||||||
|
self.tx_mhz = 20
|
||||||
|
|
||||||
|
# NSS
|
||||||
|
N_ss = self.tx_nss
|
||||||
|
# MCS (Modulation Coding Scheme) determines the constands
|
||||||
|
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
|
||||||
|
# Only for HT configuration
|
||||||
|
if self.tx_mcs == 0 or self.tx_mcs == 8 or self.tx_mcs == 16 or self.tx_mcs == 24:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 1
|
||||||
|
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
|
||||||
|
elif self.tx_mcs == 1 or self.tx_mcs == 9 or self.tx_mcs == 17 or self.tx_mcs == 25:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
|
||||||
|
elif self.tx_mcs == 2 or self.tx_mcs == 10 or self.tx_mcs == 18 or self.tx_mcs == 26:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
|
||||||
|
elif self.tx_mcs == 3 or self.tx_mcs == 11 or self.tx_mcs == 19 or self.tx_mcs == 27:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
|
||||||
|
elif self.tx_mcs == 4 or self.tx_mcs == 12 or self.tx_mcs == 20 or self.tx_mcs == 28:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
|
||||||
|
elif self.tx_mcs == 5 or self.tx_mcs == 13 or self.tx_mcs == 21 or self.tx_mcs == 29:
|
||||||
|
R = 2 / 3
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
|
||||||
|
elif self.tx_mcs == 6 or self.tx_mcs == 14 or self.tx_mcs == 22 or self.tx_mcs == 30:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
|
||||||
|
elif self.tx_mcs == 7 or self.tx_mcs == 15 or self.tx_mcs == 23 or self.tx_mcs == 31:
|
||||||
|
R = 5 / 6
|
||||||
|
N_bpscs = 6
|
||||||
|
|
||||||
|
print(
|
||||||
|
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
|
||||||
|
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
|
||||||
|
|
||||||
|
self.tx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
|
||||||
|
print("tx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.tx_data_rate_gi_short_Mbps))
|
||||||
|
|
||||||
|
print(
|
||||||
|
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
|
||||||
|
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
|
||||||
|
|
||||||
|
self.tx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
|
||||||
|
print("data_rate gi_long {data_rate} Mbps".format(data_rate=self.tx_data_rate_gi_long_Mbps))
|
||||||
|
|
||||||
|
if abs(self.tx_mbit - self.tx_data_rate_gi_short_Mbps) <= abs(self.tx_mbit - self.tx_data_rate_gi_long_Mbps):
|
||||||
|
self.tx_mbit_calc = self.tx_data_rate_gi_short_Mbps
|
||||||
|
self.tx_gi = T_gi_short
|
||||||
|
else:
|
||||||
|
self.tx_mbit_calc = self.tx_data_rate_gi_long_Mbps
|
||||||
|
self.tx_gi = T_gi_long
|
||||||
|
|
||||||
|
def calculated_data_rate_rx_HT(self):
|
||||||
|
print("calculated_data_rate_rx_HT")
|
||||||
|
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
|
||||||
|
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
|
||||||
|
R = 0 # coding , (Determined by the modulation, MCS )
|
||||||
|
N_ss = 0 # Number of Spatial Streams
|
||||||
|
T_dft = 3.2 * 10 ** -6 # Constant for HT
|
||||||
|
T_gi_short = .4 * 10 ** -6 # Guard index.
|
||||||
|
T_gi_long = .8 * 10 ** -6 # Guard index.
|
||||||
|
bw = 20
|
||||||
|
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
|
||||||
|
# the nubmer of Data Subcarriers is based on modulation and bandwith
|
||||||
|
|
||||||
|
bw = int(self.rx_mhz)
|
||||||
|
print("Mhz {Mhz}".format(Mhz=self.rx_mhz))
|
||||||
|
if bw == 20:
|
||||||
|
N_sd = 52
|
||||||
|
elif bw == 40:
|
||||||
|
N_sd = 108
|
||||||
|
elif bw == 80:
|
||||||
|
N_sd = 234
|
||||||
|
elif bw == 160:
|
||||||
|
N_sd = 468
|
||||||
|
else:
|
||||||
|
print("For HT if cannot be read bw is assumed to be 20")
|
||||||
|
N_sd = 52
|
||||||
|
self.rx_mhz = 20
|
||||||
|
# NSS
|
||||||
|
N_ss = self.rx_nss
|
||||||
|
# MCS (Modulation Coding Scheme) determines the constands
|
||||||
|
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
|
||||||
|
# Only for HT configuration
|
||||||
|
if self.rx_mcs == 0 or self.rx_mcs == 8 or self.rx_mcs == 16 or self.rx_mcs == 24:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 1
|
||||||
|
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
|
||||||
|
elif self.rx_mcs == 1 or self.rx_mcs == 9 or self.rx_mcs == 17 or self.rx_mcs == 25:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
|
||||||
|
elif self.rx_mcs == 2 or self.rx_mcs == 10 or self.rx_mcs == 18 or self.rx_mcs == 26:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
|
||||||
|
elif self.rx_mcs == 3 or self.rx_mcs == 11 or self.rx_mcs == 19 or self.rx_mcs == 27:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
|
||||||
|
elif self.rx_mcs == 4 or self.rx_mcs == 12 or self.rx_mcs == 20 or self.rx_mcs == 28:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
|
||||||
|
elif self.rx_mcs == 5 or self.rx_mcs == 13 or self.rx_mcs == 21 or self.rx_mcs == 29:
|
||||||
|
R = 2 / 3
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
|
||||||
|
elif self.rx_mcs == 6 or self.rx_mcs == 14 or self.rx_mcs == 22 or self.rx_mcs == 30:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
|
||||||
|
elif self.rx_mcs == 7 or self.rx_mcs == 15 or self.rx_mcs == 23 or self.rx_mcs == 31:
|
||||||
|
R = 5 / 6
|
||||||
|
N_bpscs = 6
|
||||||
|
print(
|
||||||
|
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
|
||||||
|
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
|
||||||
|
self.rx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
|
||||||
|
print("rx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.rx_data_rate_gi_short_Mbps))
|
||||||
|
print(
|
||||||
|
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
|
||||||
|
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
|
||||||
|
self.rx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
|
||||||
|
print("rx_data_rate gi_long {data_rate} Mbps".format(data_rate=self.rx_data_rate_gi_long_Mbps))
|
||||||
|
if abs(self.rx_mbit - self.rx_data_rate_gi_short_Mbps) <= abs(
|
||||||
|
self.rx_mbit - self.rx_data_rate_gi_long_Mbps):
|
||||||
|
self.rx_mbit_calc = self.rx_data_rate_gi_short_Mbps
|
||||||
|
self.rx_gi = T_gi_short
|
||||||
|
else:
|
||||||
|
self.rx_mbit_calc = self.rx_data_rate_gi_long_Mbps
|
||||||
|
self.rx_gi = T_gi_long
|
||||||
|
|
||||||
|
def calculated_data_rate_tx_VHT(self):
|
||||||
|
print("calculated_data_rate_tx_VHT")
|
||||||
|
# TODO compare with standard for 40 MHz if values change
|
||||||
|
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
|
||||||
|
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
|
||||||
|
R = 0 # coding , (Determined by the modulation, MCS )
|
||||||
|
N_ss = 0 # Number of Spatial Streams
|
||||||
|
T_dft = 3.2 * 10 ** -6 # Constant for HT
|
||||||
|
T_gi_short = .4 * 10 ** -6 # Guard index.
|
||||||
|
T_gi_long = .8 * 10 ** -6 # Guard index.
|
||||||
|
bw = 20
|
||||||
|
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
|
||||||
|
# the nubmer of Data Subcarriers is based on modulation and bandwith
|
||||||
|
bw = int(self.tx_mhz)
|
||||||
|
|
||||||
|
print("Mhz {Mhz}".format(Mhz=self.tx_mhz))
|
||||||
|
if bw == 20:
|
||||||
|
N_sd = 52
|
||||||
|
elif bw == 40:
|
||||||
|
N_sd = 108
|
||||||
|
elif bw == 80:
|
||||||
|
N_sd = 234
|
||||||
|
elif bw == 160:
|
||||||
|
N_sd = 468
|
||||||
|
else:
|
||||||
|
print("For HT if cannot be read bw is assumed to be 20")
|
||||||
|
N_sd = 52
|
||||||
|
self.tx_mhz = 20
|
||||||
|
|
||||||
|
# NSS
|
||||||
|
N_ss = self.tx_nss
|
||||||
|
# MCS (Modulation Coding Scheme) determines the constands
|
||||||
|
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
|
||||||
|
# Only for HT configuration
|
||||||
|
if self.tx_mcs == 0:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 1
|
||||||
|
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
|
||||||
|
elif self.tx_mcs == 1:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
|
||||||
|
elif self.tx_mcs == 2:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
|
||||||
|
elif self.tx_mcs == 3:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
|
||||||
|
elif self.tx_mcs == 4:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
|
||||||
|
elif self.tx_mcs == 5:
|
||||||
|
R = 2 / 3
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
|
||||||
|
elif self.tx_mcs == 6:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
|
||||||
|
elif self.tx_mcs == 7:
|
||||||
|
R = 5 / 6
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
|
||||||
|
elif self.tx_mcs == 8:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 8
|
||||||
|
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
|
||||||
|
elif self.tx_mcs == 9:
|
||||||
|
R = 5 / 6
|
||||||
|
N_bpscs = 8
|
||||||
|
|
||||||
|
print(
|
||||||
|
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
|
||||||
|
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
|
||||||
|
|
||||||
|
self.tx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
|
||||||
|
print("tx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.tx_data_rate_gi_short_Mbps))
|
||||||
|
|
||||||
|
print(
|
||||||
|
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
|
||||||
|
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
|
||||||
|
|
||||||
|
self.tx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
|
||||||
|
print("data_rate gi_long {data_rate} Mbps".format(data_rate=self.tx_data_rate_gi_long_Mbps))
|
||||||
|
|
||||||
|
if abs(self.tx_mbit - self.tx_data_rate_gi_short_Mbps) <= abs(self.tx_mbit - self.tx_data_rate_gi_long_Mbps):
|
||||||
|
self.tx_mbit_calc = self.tx_data_rate_gi_short_Mbps
|
||||||
|
self.tx_gi = T_gi_short
|
||||||
|
else:
|
||||||
|
self.tx_mbit_calc = self.tx_data_rate_gi_long_Mbps
|
||||||
|
self.tx_gi = T_gi_long
|
||||||
|
|
||||||
|
def calculated_data_rate_rx_VHT(self):
|
||||||
|
print("calculated_data_rate_rx_VHT")
|
||||||
|
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
|
||||||
|
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
|
||||||
|
R = 0 # coding , (Determined by the modulation, MCS )
|
||||||
|
N_ss = 0 # Number of Spatial Streams
|
||||||
|
T_dft = 3.2 * 10 ** -6 # Constant for HT
|
||||||
|
T_gi_short = .4 * 10 ** -6 # Guard index.
|
||||||
|
T_gi_long = .8 * 10 ** -6 # Guard index.
|
||||||
|
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
|
||||||
|
# the nubmer of Data Subcarriers is based on modulation and bandwith
|
||||||
|
bw = int(self.rx_mhz)
|
||||||
|
print("Mhz {Mhz}".format(Mhz=self.rx_mhz))
|
||||||
|
if bw == 20:
|
||||||
|
N_sd = 52
|
||||||
|
elif bw == 40:
|
||||||
|
N_sd = 108
|
||||||
|
elif bw == 80:
|
||||||
|
N_sd = 234
|
||||||
|
elif bw == 160:
|
||||||
|
N_sd = 468
|
||||||
|
else:
|
||||||
|
print("For HT if cannot be read bw is assumed to be 20")
|
||||||
|
N_sd = 52
|
||||||
|
self.rx_mhz = 20
|
||||||
|
# NSS
|
||||||
|
N_ss = self.rx_nss
|
||||||
|
# MCS (Modulation Coding Scheme) determines the constands
|
||||||
|
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
|
||||||
|
# Only for HT configuration
|
||||||
|
if self.rx_mcs == 0:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 1
|
||||||
|
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
|
||||||
|
elif self.rx_mcs == 1:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
|
||||||
|
elif self.rx_mcs == 2:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
|
||||||
|
elif self.rx_mcs == 3:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
|
||||||
|
elif self.rx_mcs == 4:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
|
||||||
|
elif self.rx_mcs == 5:
|
||||||
|
R = 2 / 3
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
|
||||||
|
elif self.rx_mcs == 6:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
|
||||||
|
elif self.rx_mcs == 7:
|
||||||
|
R = 5 / 6
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
|
||||||
|
elif self.rx_mcs == 8:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 8
|
||||||
|
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
|
||||||
|
elif self.rx_mcs == 9:
|
||||||
|
R = 5 / 6
|
||||||
|
N_bpscs = 8
|
||||||
|
print(
|
||||||
|
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
|
||||||
|
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
|
||||||
|
self.rx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
|
||||||
|
print("rx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.rx_data_rate_gi_short_Mbps))
|
||||||
|
print(
|
||||||
|
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
|
||||||
|
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
|
||||||
|
self.rx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
|
||||||
|
print("rx_data_rate gi_long {data_rate} Mbps".format(data_rate=self.rx_data_rate_gi_long_Mbps))
|
||||||
|
if abs(self.rx_mbit - self.rx_data_rate_gi_short_Mbps) <= abs(
|
||||||
|
self.rx_mbit - self.rx_data_rate_gi_long_Mbps):
|
||||||
|
self.rx_mbit_calc = self.rx_data_rate_gi_short_Mbps
|
||||||
|
self.rx_gi = T_gi_short
|
||||||
|
else:
|
||||||
|
self.rx_mbit_calc = self.rx_data_rate_gi_long_Mbps
|
||||||
|
self.rx_gi = T_gi_long
|
||||||
|
##########################################
|
||||||
|
#
|
||||||
|
# HE no OFDMA - changes the calculations
|
||||||
|
#
|
||||||
|
###########################################
|
||||||
|
|
||||||
|
def calculated_data_rate_tx_HE(self):
|
||||||
|
print("calculated_data_rate_tx_HE")
|
||||||
|
# TODO compare with standard for 40 MHz if values change
|
||||||
|
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
|
||||||
|
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
|
||||||
|
R = 0 # coding , (Determined by the modulation, MCS )
|
||||||
|
N_ss = 0 # Number of Spatial Streams
|
||||||
|
T_dft = 3.2 * 10 ** -6 # Constant for HT
|
||||||
|
T_gi_short = .4 * 10 ** -6 # Guard index.
|
||||||
|
T_gi_long = .8 * 10 ** -6 # Guard index.
|
||||||
|
bw = 20
|
||||||
|
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
|
||||||
|
# the nubmer of Data Subcarriers is based on modulation and bandwith
|
||||||
|
bw = int(self.tx_mhz)
|
||||||
|
print("Mhz {Mhz}".format(Mhz=self.tx_mhz))
|
||||||
|
if bw == 20:
|
||||||
|
N_sd = 52
|
||||||
|
elif bw == 40:
|
||||||
|
N_sd = 108
|
||||||
|
elif bw == 80:
|
||||||
|
N_sd = 234
|
||||||
|
elif bw == 160:
|
||||||
|
N_sd = 468
|
||||||
|
else:
|
||||||
|
print("For HT if cannot be read bw is assumed to be 20")
|
||||||
|
N_sd = 52
|
||||||
|
self.tx_mhz = 20
|
||||||
|
|
||||||
|
# NSS
|
||||||
|
N_ss = self.tx_nss
|
||||||
|
# MCS (Modulation Coding Scheme) determines the constands
|
||||||
|
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
|
||||||
|
# Only for HT configuration
|
||||||
|
if self.tx_mcs == 0:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 1
|
||||||
|
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
|
||||||
|
elif self.tx_mcs == 1:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
|
||||||
|
elif self.tx_mcs == 2:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
|
||||||
|
elif self.tx_mcs == 3:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
|
||||||
|
elif self.tx_mcs == 4:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
|
||||||
|
elif self.tx_mcs == 5:
|
||||||
|
R = 2 / 3
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
|
||||||
|
elif self.tx_mcs == 6:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
|
||||||
|
elif self.tx_mcs == 7:
|
||||||
|
R = 5 / 6
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
|
||||||
|
elif self.tx_mcs == 8:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 8
|
||||||
|
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
|
||||||
|
elif self.tx_mcs == 9:
|
||||||
|
R = 5 / 6
|
||||||
|
N_bpscs = 8
|
||||||
|
|
||||||
|
print(
|
||||||
|
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
|
||||||
|
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
|
||||||
|
|
||||||
|
self.tx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
|
||||||
|
print("tx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.tx_data_rate_gi_short_Mbps))
|
||||||
|
|
||||||
|
print(
|
||||||
|
"tx: mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
|
||||||
|
mcs=self.tx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
|
||||||
|
|
||||||
|
self.tx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
|
||||||
|
print("data_rate gi_long {data_rate} Mbps".format(data_rate=self.tx_data_rate_gi_long_Mbps))
|
||||||
|
|
||||||
|
if abs(self.tx_mbit - self.tx_data_rate_gi_short_Mbps) <= abs(self.tx_mbit - self.tx_data_rate_gi_long_Mbps):
|
||||||
|
self.tx_mbit_calc = self.tx_data_rate_gi_short_Mbps
|
||||||
|
self.tx_gi = T_gi_short
|
||||||
|
else:
|
||||||
|
self.tx_mbit_calc = self.tx_data_rate_gi_long_Mbps
|
||||||
|
self.tx_gi = T_gi_long
|
||||||
|
|
||||||
|
def calculated_data_rate_rx_HE(self):
|
||||||
|
print("calculated_data_rate_rx_HE")
|
||||||
|
N_sd = 0 # Number of Data Subcarriers based on modulation and bandwith
|
||||||
|
N_bpscs = 0 # Number of coded bits per Subcarrier(Determined by the modulation, MCS)
|
||||||
|
R = 0 # coding , (Determined by the modulation, MCS )
|
||||||
|
N_ss = 0 # Number of Spatial Streams
|
||||||
|
T_dft = 3.2 * 10 ** -6 # Constant for HT
|
||||||
|
T_gi_short = .4 * 10 ** -6 # Guard index.
|
||||||
|
T_gi_long = .8 * 10 ** -6 # Guard index.
|
||||||
|
# Note the T_gi is not exactly know so need to calculate bothh with .4 and .8
|
||||||
|
# the nubmer of Data Subcarriers is based on modulation and bandwith
|
||||||
|
bw = int(self.rx_mhz)
|
||||||
|
print("Mhz {Mhz}".format(Mhz=self.rx_mhz))
|
||||||
|
if bw == 20:
|
||||||
|
N_sd = 52
|
||||||
|
elif bw == 40:
|
||||||
|
N_sd = 108
|
||||||
|
elif bw == 80:
|
||||||
|
N_sd = 234
|
||||||
|
elif bw == 160:
|
||||||
|
N_sd = 468
|
||||||
|
else:
|
||||||
|
print("For HT if cannot be read bw is assumed to be 20")
|
||||||
|
N_sd = 52
|
||||||
|
self.rx_mhz = 20
|
||||||
|
# NSS
|
||||||
|
N_ss = self.rx_nss
|
||||||
|
# MCS (Modulation Coding Scheme) determines the constands
|
||||||
|
# MCS 0 == Modulation BPSK R = 1/2 , N_bpscs = 1,
|
||||||
|
# Only for HT configuration
|
||||||
|
if self.rx_mcs == 0:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 1
|
||||||
|
# MCS 1 == Modulation QPSK R = 1/2 , N_bpscs = 2
|
||||||
|
elif self.rx_mcs == 1:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 2 == Modulation QPSK R = 3/4 , N_bpscs = 2
|
||||||
|
elif self.rx_mcs == 2:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 2
|
||||||
|
# MCS 3 == Modulation 16-QAM R = 1/2 , N_bpscs = 4
|
||||||
|
elif self.rx_mcs == 3:
|
||||||
|
R = 1 / 2
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 4 == Modulation 16-QAM R = 3/4 , N_bpscs = 4
|
||||||
|
elif self.rx_mcs == 4:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 4
|
||||||
|
# MCS 5 == Modulation 64-QAM R = 2/3 , N_bpscs = 6
|
||||||
|
elif self.rx_mcs == 5:
|
||||||
|
R = 2 / 3
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 6 == Modulation 64-QAM R = 3/4 , N_bpscs = 6
|
||||||
|
elif self.rx_mcs == 6:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 7 == Modulation 64-QAM R = 5/6 , N_bpscs = 6
|
||||||
|
elif self.rx_mcs == 7:
|
||||||
|
R = 5 / 6
|
||||||
|
N_bpscs = 6
|
||||||
|
# MCS 8 == Modulation 256-QAM R = 3/4 , N_bpscs = 8
|
||||||
|
elif self.rx_mcs == 8:
|
||||||
|
R = 3 / 4
|
||||||
|
N_bpscs = 8
|
||||||
|
# MCS 9 == Modulation 256-QAM R = 5/6 , N_bpscs = 8
|
||||||
|
elif self.rx_mcs == 9:
|
||||||
|
R = 5 / 6
|
||||||
|
N_bpscs = 8
|
||||||
|
print(
|
||||||
|
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_short {T_gi_short}".format(
|
||||||
|
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_short=T_gi_short))
|
||||||
|
self.rx_data_rate_gi_short_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_short)) / 1000000
|
||||||
|
print("rx_data_rate gi_short {data_rate} Mbit/s".format(data_rate=self.rx_data_rate_gi_short_Mbps))
|
||||||
|
print(
|
||||||
|
"mcs {mcs} N_sd {N_sd} N_bpscs {N_bpscs} R {R} N_ss {N_ss} T_dft {T_dft} T_gi_long {T_gi_long}".format(
|
||||||
|
mcs=self.rx_mcs, N_sd=N_sd, N_bpscs=N_bpscs, R=R, N_ss=N_ss, T_dft=T_dft, T_gi_long=T_gi_long))
|
||||||
|
self.rx_data_rate_gi_long_Mbps = ((N_sd * N_bpscs * R * float(N_ss)) / (T_dft + T_gi_long)) / 1000000
|
||||||
|
print("rx_data_rate gi_long {data_rate} Mbps".format(data_rate=self.rx_data_rate_gi_long_Mbps))
|
||||||
|
if abs(self.rx_mbit - self.rx_data_rate_gi_short_Mbps) <= abs(
|
||||||
|
self.rx_mbit - self.rx_data_rate_gi_long_Mbps):
|
||||||
|
self.rx_mbit_calc = self.rx_data_rate_gi_short_Mbps
|
||||||
|
self.rx_gi = T_gi_short
|
||||||
|
else:
|
||||||
|
self.rx_mbit_calc = self.rx_data_rate_gi_long_Mbps
|
||||||
|
self.rx_gi = T_gi_long
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
class PortUtils():
|
class PortUtils:
|
||||||
def __init__(self, local_realm):
|
def __init__(self, local_realm):
|
||||||
self.local_realm = local_realm
|
self.local_realm = local_realm
|
||||||
|
|
||||||
|
|||||||
@@ -5,13 +5,13 @@ import importlib
|
|||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
LFCliBase = lfcli_base.LFCliBase
|
LFCliBase = lfcli_base.LFCliBase
|
||||||
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
|
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
|
||||||
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
||||||
|
set_port = importlib.import_module("py-json.LANforge.set_port")
|
||||||
|
|
||||||
|
|
||||||
class QVLANProfile(LFCliBase):
|
class QVLANProfile(LFCliBase):
|
||||||
@@ -19,7 +19,6 @@ class QVLANProfile(LFCliBase):
|
|||||||
local_realm,
|
local_realm,
|
||||||
qvlan_parent="eth1",
|
qvlan_parent="eth1",
|
||||||
num_qvlans=1,
|
num_qvlans=1,
|
||||||
admin_down=False,
|
|
||||||
dhcp=False,
|
dhcp=False,
|
||||||
debug_=False):
|
debug_=False):
|
||||||
super().__init__(lfclient_host, lfclient_port, debug_)
|
super().__init__(lfclient_host, lfclient_port, debug_)
|
||||||
@@ -98,17 +97,17 @@ class QVLANProfile(LFCliBase):
|
|||||||
pprint(set_port.set_port_current_flags)
|
pprint(set_port.set_port_current_flags)
|
||||||
pprint(set_port.set_port_interest_flags)
|
pprint(set_port.set_port_interest_flags)
|
||||||
return
|
return
|
||||||
if (param_name in set_port.set_port_cmd_flags):
|
if param_name in set_port.set_port_cmd_flags:
|
||||||
if (value == 1) and (param_name not in self.desired_set_port_cmd_flags):
|
if (value == 1) and (param_name not in self.desired_set_port_cmd_flags):
|
||||||
self.desired_set_port_cmd_flags.append(param_name)
|
self.desired_set_port_cmd_flags.append(param_name)
|
||||||
elif value == 0:
|
elif value == 0:
|
||||||
self.desired_set_port_cmd_flags.remove(param_name)
|
self.desired_set_port_cmd_flags.remove(param_name)
|
||||||
elif (param_name in set_port.set_port_current_flags):
|
elif param_name in set_port.set_port_current_flags:
|
||||||
if (value == 1) and (param_name not in self.desired_set_port_current_flags):
|
if (value == 1) and (param_name not in self.desired_set_port_current_flags):
|
||||||
self.desired_set_port_current_flags.append(param_name)
|
self.desired_set_port_current_flags.append(param_name)
|
||||||
elif value == 0:
|
elif value == 0:
|
||||||
self.desired_set_port_current_flags.remove(param_name)
|
self.desired_set_port_current_flags.remove(param_name)
|
||||||
elif (param_name in set_port.set_port_interest_flags):
|
elif param_name in set_port.set_port_interest_flags:
|
||||||
if (value == 1) and (param_name not in self.desired_set_port_interest_flags):
|
if (value == 1) and (param_name not in self.desired_set_port_interest_flags):
|
||||||
self.desired_set_port_interest_flags.append(param_name)
|
self.desired_set_port_interest_flags.append(param_name)
|
||||||
elif value == 0:
|
elif value == 0:
|
||||||
@@ -116,11 +115,11 @@ class QVLANProfile(LFCliBase):
|
|||||||
else:
|
else:
|
||||||
raise ValueError("Unknown param name: " + param_name)
|
raise ValueError("Unknown param name: " + param_name)
|
||||||
|
|
||||||
def create(self, admin_down=False, debug=False, sleep_time=1):
|
def create(self, sleep_time=1):
|
||||||
print("Creating qvlans...")
|
print("Creating qvlans...")
|
||||||
req_url = "/cli-json/add_vlan"
|
req_url = "/cli-json/add_vlan"
|
||||||
|
|
||||||
if not self.dhcp and self.first_ip_addr is not None and self.netmask is not None and self.gateway is not None:
|
if not self.dhcp and self.first_ip_addr and self.netmask and self.gateway:
|
||||||
self.desired_set_port_interest_flags.append("ip_address")
|
self.desired_set_port_interest_flags.append("ip_address")
|
||||||
self.desired_set_port_interest_flags.append("ip_Mask")
|
self.desired_set_port_interest_flags.append("ip_Mask")
|
||||||
self.desired_set_port_interest_flags.append("ip_gateway")
|
self.desired_set_port_interest_flags.append("ip_gateway")
|
||||||
@@ -143,11 +142,10 @@ class QVLANProfile(LFCliBase):
|
|||||||
"shelf": self.shelf,
|
"shelf": self.shelf,
|
||||||
"resource": self.resource,
|
"resource": self.resource,
|
||||||
"port": self.local_realm.name_to_eid(self.qvlan_parent)[2],
|
"port": self.local_realm.name_to_eid(self.qvlan_parent)[2],
|
||||||
"vid": i+1
|
"vid": i + 1
|
||||||
}
|
}
|
||||||
self.created_qvlans.append("%s.%s.%s#%d" % (self.shelf, self.resource,
|
self.created_qvlans.append("%s.%s.%s#%d" % (self.shelf, self.resource,
|
||||||
self.qvlan_parent, int(
|
self.qvlan_parent, int(self.desired_qvlans[i][self.desired_qvlans[i].index('#') + 1:])))
|
||||||
self.desired_qvlans[i][self.desired_qvlans[i].index('#') + 1:])))
|
|
||||||
self.local_realm.json_post(req_url, data)
|
self.local_realm.json_post(req_url, data)
|
||||||
time.sleep(sleep_time)
|
time.sleep(sleep_time)
|
||||||
|
|
||||||
|
|||||||
314
py-json/realm.py
314
py-json/realm.py
@@ -16,7 +16,7 @@ from pprint import pprint
|
|||||||
|
|
||||||
# ---- ---- ---- ---- LANforge Base Imports ---- ---- ---- ----
|
# ---- ---- ---- ---- LANforge Base Imports ---- ---- ---- ----
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
LANforge = importlib.import_module("py-json.LANforge")
|
LANforge = importlib.import_module("py-json.LANforge")
|
||||||
@@ -28,8 +28,6 @@ LFCliBase = lfcli_base.LFCliBase
|
|||||||
|
|
||||||
l3_cxprofile = importlib.import_module("py-json.l3_cxprofile")
|
l3_cxprofile = importlib.import_module("py-json.l3_cxprofile")
|
||||||
L3CXProfile = l3_cxprofile.L3CXProfile
|
L3CXProfile = l3_cxprofile.L3CXProfile
|
||||||
l3_cxprofile2 = importlib.import_module("py-json.l3_cxprofile2")
|
|
||||||
L3CXProfile2 = l3_cxprofile2.L3CXProfile2
|
|
||||||
l4_cxprofile = importlib.import_module("py-json.l4_cxprofile")
|
l4_cxprofile = importlib.import_module("py-json.l4_cxprofile")
|
||||||
L4CXProfile = l4_cxprofile.L4CXProfile
|
L4CXProfile = l4_cxprofile.L4CXProfile
|
||||||
lf_attenmod = importlib.import_module("py-json.lf_attenmod")
|
lf_attenmod = importlib.import_module("py-json.lf_attenmod")
|
||||||
@@ -96,7 +94,7 @@ class Realm(LFCliBase):
|
|||||||
_exit_on_error=False,
|
_exit_on_error=False,
|
||||||
_exit_on_fail=False,
|
_exit_on_fail=False,
|
||||||
_proxy_str=None,
|
_proxy_str=None,
|
||||||
_capture_signal_list=[]):
|
_capture_signal_list=None):
|
||||||
super().__init__(_lfjson_host=lfclient_host,
|
super().__init__(_lfjson_host=lfclient_host,
|
||||||
_lfjson_port=lfclient_port,
|
_lfjson_port=lfclient_port,
|
||||||
_debug=debug_,
|
_debug=debug_,
|
||||||
@@ -105,6 +103,8 @@ class Realm(LFCliBase):
|
|||||||
_proxy_str=_proxy_str,
|
_proxy_str=_proxy_str,
|
||||||
_capture_signal_list=_capture_signal_list)
|
_capture_signal_list=_capture_signal_list)
|
||||||
|
|
||||||
|
if _capture_signal_list is None:
|
||||||
|
_capture_signal_list = []
|
||||||
self.debug = debug_
|
self.debug = debug_
|
||||||
# if debug_:
|
# if debug_:
|
||||||
# print("Realm _proxy_str: %s" % _proxy_str)
|
# print("Realm _proxy_str: %s" % _proxy_str)
|
||||||
@@ -112,7 +112,6 @@ class Realm(LFCliBase):
|
|||||||
self.check_connect()
|
self.check_connect()
|
||||||
self.chan_to_freq = {}
|
self.chan_to_freq = {}
|
||||||
self.freq_to_chan = {}
|
self.freq_to_chan = {}
|
||||||
freq = 0
|
|
||||||
chan = 1
|
chan = 1
|
||||||
for freq in range(2412, 2472, 5):
|
for freq in range(2412, 2472, 5):
|
||||||
self.freq_to_chan[freq] = chan
|
self.freq_to_chan[freq] = chan
|
||||||
@@ -223,35 +222,38 @@ class Realm(LFCliBase):
|
|||||||
port_list=sta_list,
|
port_list=sta_list,
|
||||||
debug=debug_)
|
debug=debug_)
|
||||||
|
|
||||||
def rm_port(self, port_eid, check_exists=True, debug_=False):
|
def rm_port(self, port_eid, check_exists=True, debug_=None):
|
||||||
if port_eid is None:
|
if port_eid is None:
|
||||||
raise ValueError("realm.rm_port: want a port eid like 1.1.eth1")
|
raise ValueError("realm.rm_port: want a port eid like 1.1.eth1")
|
||||||
debug_ |= self.debug
|
if debug_ is None:
|
||||||
|
debug_ = self.debug
|
||||||
req_url = "/cli-json/rm_vlan"
|
req_url = "/cli-json/rm_vlan"
|
||||||
eid = self.name_to_eid(port_eid)
|
eid = self.name_to_eid(port_eid)
|
||||||
if check_exists:
|
if check_exists:
|
||||||
if not self.port_exists(port_eid):
|
if not self.port_exists(port_eid, debug=debug_):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"shelf": eid[0],
|
"shelf": eid[0],
|
||||||
"resource": eid[1],
|
"resource": eid[1],
|
||||||
"port": eid[2]
|
"port": eid[2]
|
||||||
}
|
}
|
||||||
rsp = self.json_post(req_url, data, debug_=debug_)
|
self.json_post(req_url, data, debug_=debug_)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def port_exists(self, port_eid):
|
def port_exists(self, port_eid, debug=None):
|
||||||
|
if debug is None:
|
||||||
|
debug = self.debug
|
||||||
eid = self.name_to_eid(port_eid)
|
eid = self.name_to_eid(port_eid)
|
||||||
current_stations = self.json_get("/port/%s/%s/%s?fields=alias" % (eid[0], eid[1], eid[2]))
|
current_stations = self.json_get("/port/%s/%s/%s?fields=alias" % (eid[0], eid[1], eid[2]),
|
||||||
if not current_stations is None:
|
debug_=debug)
|
||||||
|
if current_stations:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def admin_up(self, port_eid):
|
def admin_up(self, port_eid):
|
||||||
# print("186 admin_up port_eid: "+port_eid)
|
# print("186 admin_up port_eid: "+port_eid)
|
||||||
eid = self.name_to_eid(port_eid)
|
eid = self.name_to_eid(port_eid)
|
||||||
shelf = eid[0]
|
|
||||||
resource = eid[1]
|
resource = eid[1]
|
||||||
port = eid[2]
|
port = eid[2]
|
||||||
request = LFUtils.port_up_request(resource_id=resource, port_name=port)
|
request = LFUtils.port_up_request(resource_id=resource, port_name=port)
|
||||||
@@ -261,7 +263,6 @@ class Realm(LFCliBase):
|
|||||||
|
|
||||||
def admin_down(self, port_eid):
|
def admin_down(self, port_eid):
|
||||||
eid = self.name_to_eid(port_eid)
|
eid = self.name_to_eid(port_eid)
|
||||||
shelf = eid[0]
|
|
||||||
resource = eid[1]
|
resource = eid[1]
|
||||||
port = eid[2]
|
port = eid[2]
|
||||||
request = LFUtils.port_down_request(resource_id=resource, port_name=port)
|
request = LFUtils.port_down_request(resource_id=resource, port_name=port)
|
||||||
@@ -269,7 +270,6 @@ class Realm(LFCliBase):
|
|||||||
|
|
||||||
def reset_port(self, port_eid):
|
def reset_port(self, port_eid):
|
||||||
eid = self.name_to_eid(port_eid)
|
eid = self.name_to_eid(port_eid)
|
||||||
shelf = eid[0]
|
|
||||||
resource = eid[1]
|
resource = eid[1]
|
||||||
port = eid[2]
|
port = eid[2]
|
||||||
request = LFUtils.port_reset_request(resource_id=resource, port_name=port)
|
request = LFUtils.port_reset_request(resource_id=resource, port_name=port)
|
||||||
@@ -317,13 +317,13 @@ class Realm(LFCliBase):
|
|||||||
|
|
||||||
def cleanup_cxe_prefix(self, prefix):
|
def cleanup_cxe_prefix(self, prefix):
|
||||||
cx_list = self.cx_list()
|
cx_list = self.cx_list()
|
||||||
if cx_list is not None:
|
if cx_list:
|
||||||
for cx_name in cx_list:
|
for cx_name in cx_list:
|
||||||
if cx_name.startswith(prefix):
|
if cx_name.startswith(prefix):
|
||||||
self.rm_cx(cx_name)
|
self.rm_cx(cx_name)
|
||||||
|
|
||||||
endp_list = self.json_get("/endp/list")
|
endp_list = self.json_get("/endp/list")
|
||||||
if endp_list is not None:
|
if endp_list:
|
||||||
if 'endpoint' in endp_list:
|
if 'endpoint' in endp_list:
|
||||||
endp_list = list(endp_list['endpoint'])
|
endp_list = list(endp_list['endpoint'])
|
||||||
for idx in range(len(endp_list)):
|
for idx in range(len(endp_list)):
|
||||||
@@ -351,12 +351,11 @@ class Realm(LFCliBase):
|
|||||||
if debug_:
|
if debug_:
|
||||||
dbg_param = "?__debug=1"
|
dbg_param = "?__debug=1"
|
||||||
|
|
||||||
while (last_response != "YES"):
|
while last_response != "YES":
|
||||||
response = self.json_post("/gui-json/cmd%s" % dbg_param, data, debug_=debug_,
|
self.json_post("/gui-json/cmd%s" % dbg_param, data, debug_=debug_, response_json_list_=response_json)
|
||||||
response_json_list_=response_json)
|
|
||||||
# LFUtils.debug_printer.pprint(response_json)
|
# LFUtils.debug_printer.pprint(response_json)
|
||||||
last_response = response_json[0]["LAST"]["response"]
|
last_response = response_json[0]["LAST"]["response"]
|
||||||
if (last_response != "YES"):
|
if last_response != "YES":
|
||||||
last_response = None
|
last_response = None
|
||||||
response_json = []
|
response_json = []
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -397,22 +396,23 @@ class Realm(LFCliBase):
|
|||||||
found_endps = {}
|
found_endps = {}
|
||||||
if debug:
|
if debug:
|
||||||
print("Waiting on endpoint endp_list {}".format(endp_list))
|
print("Waiting on endpoint endp_list {}".format(endp_list))
|
||||||
if (endp_list is not None) and ("items" not in endp_list):
|
if endp_list and ("items" not in endp_list):
|
||||||
try:
|
try:
|
||||||
endp_list = list(endp_list['endpoint'])
|
endp_list = list(endp_list['endpoint'])
|
||||||
for idx in range(len(endp_list)):
|
for idx in range(len(endp_list)):
|
||||||
name = list(endp_list[idx])[0]
|
name = list(endp_list[idx])[0]
|
||||||
found_endps[name] = name
|
found_endps[name] = name
|
||||||
except:
|
except:
|
||||||
print("non-fatal exception endp_list = list(endp_list['endpoint'] did not exist, will wait some more")
|
print(
|
||||||
|
"non-fatal exception endp_list = list(endp_list['endpoint'] did not exist, will wait some more")
|
||||||
|
|
||||||
for req in these_endp:
|
for req in these_endp:
|
||||||
if not req in found_endps:
|
if req not in found_endps:
|
||||||
if debug:
|
if debug:
|
||||||
print("Waiting on endpoint: %s" % (req))
|
print("Waiting on endpoint: %s" % req)
|
||||||
wait_more = True
|
wait_more = True
|
||||||
count += 1
|
count += 1
|
||||||
if (count > 100):
|
if count > 100:
|
||||||
break
|
break
|
||||||
|
|
||||||
return not wait_more
|
return not wait_more
|
||||||
@@ -429,23 +429,25 @@ class Realm(LFCliBase):
|
|||||||
found_cxs = {}
|
found_cxs = {}
|
||||||
cx_list = self.cx_list()
|
cx_list = self.cx_list()
|
||||||
not_cx = ['warnings', 'errors', 'handler', 'uri', 'items']
|
not_cx = ['warnings', 'errors', 'handler', 'uri', 'items']
|
||||||
if cx_list is not None:
|
if cx_list:
|
||||||
for cx_name in cx_list:
|
for cx_name in cx_list:
|
||||||
if cx_name in not_cx:
|
if cx_name in not_cx:
|
||||||
continue
|
continue
|
||||||
found_cxs[cx_name] = cx_name
|
found_cxs[cx_name] = cx_name
|
||||||
|
|
||||||
for req in these_cx:
|
for req in these_cx:
|
||||||
if not req in found_cxs:
|
if req not in found_cxs:
|
||||||
if debug:
|
if debug:
|
||||||
print("Waiting on CX: %s" % (req))
|
print("Waiting on CX: %s" % req)
|
||||||
wait_more = True
|
wait_more = True
|
||||||
count += 1
|
count += 1
|
||||||
if (count > 100):
|
if count > 100:
|
||||||
break
|
break
|
||||||
|
|
||||||
return not wait_more
|
return not wait_more
|
||||||
|
|
||||||
|
# def wait_until_database_loaded(self):
|
||||||
|
|
||||||
# Returns map of all stations with port+type == WIFI-STATION
|
# Returns map of all stations with port+type == WIFI-STATION
|
||||||
# Key is the EID, value is the map of key/values for the port values.
|
# Key is the EID, value is the map of key/values for the port values.
|
||||||
def station_map(self):
|
def station_map(self):
|
||||||
@@ -457,7 +459,7 @@ class Realm(LFCliBase):
|
|||||||
sta_map = {}
|
sta_map = {}
|
||||||
temp_map = LFUtils.portListToAliasMap(response)
|
temp_map = LFUtils.portListToAliasMap(response)
|
||||||
for k, v in temp_map.items():
|
for k, v in temp_map.items():
|
||||||
if (v['port type'] == "WIFI-STA"):
|
if v['port type'] == "WIFI-STA":
|
||||||
sta_map[k] = v
|
sta_map[k] = v
|
||||||
temp_map.clear()
|
temp_map.clear()
|
||||||
del temp_map
|
del temp_map
|
||||||
@@ -482,7 +484,6 @@ class Realm(LFCliBase):
|
|||||||
|
|
||||||
# Returns list of all ports
|
# Returns list of all ports
|
||||||
def port_list(self):
|
def port_list(self):
|
||||||
sta_list = []
|
|
||||||
response = super().json_get("/port/list?fields=all")
|
response = super().json_get("/port/list?fields=all")
|
||||||
if (response is None) or ("interfaces" not in response):
|
if (response is None) or ("interfaces" not in response):
|
||||||
print("port_list: incomplete response:")
|
print("port_list: incomplete response:")
|
||||||
@@ -515,14 +516,14 @@ class Realm(LFCliBase):
|
|||||||
"shelf": eid_toks[0],
|
"shelf": eid_toks[0],
|
||||||
"resource": eid_toks[1],
|
"resource": eid_toks[1],
|
||||||
"serno": eid_toks[2],
|
"serno": eid_toks[2],
|
||||||
"atten_idx":eid_toks[3],
|
"atten_idx": eid_toks[3],
|
||||||
"val":atten_ddb,
|
"val": atten_ddb,
|
||||||
}
|
}
|
||||||
self.json_post(req_url, data)
|
self.json_post(req_url, data)
|
||||||
|
|
||||||
# removes port by eid/eidpn
|
# removes port by eid/eidpn
|
||||||
def remove_vlan_by_eid(self, eid):
|
def remove_vlan_by_eid(self, eid):
|
||||||
if (eid is None) or ("" == eid):
|
if (eid is None) or (eid == ""):
|
||||||
raise ValueError("removeVlanByEid wants eid like 1.1.sta0 but given[%s]" % eid)
|
raise ValueError("removeVlanByEid wants eid like 1.1.sta0 but given[%s]" % eid)
|
||||||
hunks = self.name_to_eid(eid)
|
hunks = self.name_to_eid(eid)
|
||||||
# print("- - - - - - - - - - - - - - - - -")
|
# print("- - - - - - - - - - - - - - - - -")
|
||||||
@@ -556,7 +557,7 @@ class Realm(LFCliBase):
|
|||||||
if debug_:
|
if debug_:
|
||||||
print("- prelim - - - - - - - - - - - - - - - - - - -")
|
print("- prelim - - - - - - - - - - - - - - - - - - -")
|
||||||
pprint(record)
|
pprint(record)
|
||||||
if (record["port type"] == "WIFI-STA"):
|
if record["port type"] == "WIFI-STA":
|
||||||
prelim_map[name] = record
|
prelim_map[name] = record
|
||||||
|
|
||||||
except Exception as x:
|
except Exception as x:
|
||||||
@@ -606,7 +607,7 @@ class Realm(LFCliBase):
|
|||||||
|
|
||||||
def name_to_eid(self, eid, debug=False, non_port=False):
|
def name_to_eid(self, eid, debug=False, non_port=False):
|
||||||
if debug:
|
if debug:
|
||||||
self.logg(level="debug", mesg="name_to_eid: "+str(eid))
|
self.logg(level="debug", mesg="name_to_eid: " + str(eid))
|
||||||
if (type(eid) is list) or (type(eid) is tuple):
|
if (type(eid) is list) or (type(eid) is tuple):
|
||||||
return eid
|
return eid
|
||||||
return LFUtils.name_to_eid(eid, non_port=non_port)
|
return LFUtils.name_to_eid(eid, non_port=non_port)
|
||||||
@@ -645,7 +646,7 @@ class Realm(LFCliBase):
|
|||||||
|
|
||||||
for sta_eid in station_list:
|
for sta_eid in station_list:
|
||||||
if debug:
|
if debug:
|
||||||
print("checking sta-eid: %s" % (sta_eid))
|
print("checking sta-eid: %s" % sta_eid)
|
||||||
eid = self.name_to_eid(sta_eid)
|
eid = self.name_to_eid(sta_eid)
|
||||||
|
|
||||||
response = super().json_get("/port/%s/%s/%s?fields=alias,ip,port+type,ipv6+address" %
|
response = super().json_get("/port/%s/%s/%s?fields=alias,ip,port+type,ipv6+address" %
|
||||||
@@ -663,7 +664,7 @@ class Realm(LFCliBase):
|
|||||||
if v['ip'] in waiting_states:
|
if v['ip'] in waiting_states:
|
||||||
wait_more = True
|
wait_more = True
|
||||||
if debug:
|
if debug:
|
||||||
print("Waiting for port %s to get IPv4 Address." % (sta_eid))
|
print("Waiting for port %s to get IPv4 Address." % sta_eid)
|
||||||
else:
|
else:
|
||||||
if sta_eid not in stas_with_ips:
|
if sta_eid not in stas_with_ips:
|
||||||
stas_with_ips[sta_eid] = {'ipv4': v['ip']}
|
stas_with_ips[sta_eid] = {'ipv4': v['ip']}
|
||||||
@@ -682,7 +683,7 @@ class Realm(LFCliBase):
|
|||||||
else:
|
else:
|
||||||
wait_more = True
|
wait_more = True
|
||||||
if debug:
|
if debug:
|
||||||
print("Waiting for port %s to get IPv6 Address." % (sta_eid))
|
print("Waiting for port %s to get IPv6 Address." % sta_eid)
|
||||||
|
|
||||||
if wait_more:
|
if wait_more:
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -698,7 +699,7 @@ class Realm(LFCliBase):
|
|||||||
raise ValueError("check for num curr ips expects non-empty list of ports")
|
raise ValueError("check for num curr ips expects non-empty list of ports")
|
||||||
for sta_eid in station_list:
|
for sta_eid in station_list:
|
||||||
if debug:
|
if debug:
|
||||||
print("checking sta-eid: %s" % (sta_eid))
|
print("checking sta-eid: %s" % sta_eid)
|
||||||
eid = self.name_to_eid(sta_eid)
|
eid = self.name_to_eid(sta_eid)
|
||||||
response = super().json_get("/port/%s/%s/%s?fields=alias,ip,port+type,ipv6+address" %
|
response = super().json_get("/port/%s/%s/%s?fields=alias,ip,port+type,ipv6+address" %
|
||||||
(eid[0], eid[1], eid[2]))
|
(eid[0], eid[1], eid[2]))
|
||||||
@@ -711,9 +712,9 @@ class Realm(LFCliBase):
|
|||||||
break
|
break
|
||||||
if ipv4:
|
if ipv4:
|
||||||
v = response['interface']
|
v = response['interface']
|
||||||
if (v['ip'] in waiting_states):
|
if v['ip'] in waiting_states:
|
||||||
if debug:
|
if debug:
|
||||||
print("Waiting for port %s to get IPv4 Address." % (sta_eid))
|
print("Waiting for port %s to get IPv4 Address." % sta_eid)
|
||||||
else:
|
else:
|
||||||
if debug:
|
if debug:
|
||||||
print("Found IP: %s on port: %s" % (v['ip'], sta_eid))
|
print("Found IP: %s on port: %s" % (v['ip'], sta_eid))
|
||||||
@@ -723,9 +724,9 @@ class Realm(LFCliBase):
|
|||||||
num_sta_with_ips += 1
|
num_sta_with_ips += 1
|
||||||
if ipv6:
|
if ipv6:
|
||||||
v = response['interface']
|
v = response['interface']
|
||||||
if (v['ip'] in waiting_states):
|
if v['ip'] in waiting_states:
|
||||||
if debug:
|
if debug:
|
||||||
print("Waiting for port %s to get IPv6 Address." % (sta_eid))
|
print("Waiting for port %s to get IPv6 Address." % sta_eid)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if debug:
|
if debug:
|
||||||
@@ -736,11 +737,12 @@ class Realm(LFCliBase):
|
|||||||
num_sta_with_ips += 1
|
num_sta_with_ips += 1
|
||||||
return num_sta_with_ips
|
return num_sta_with_ips
|
||||||
|
|
||||||
def duration_time_to_seconds(self, time_string):
|
@staticmethod
|
||||||
|
def duration_time_to_seconds(time_string):
|
||||||
if isinstance(time_string, str):
|
if isinstance(time_string, str):
|
||||||
pattern = re.compile("^(\d+)([dhms]$)")
|
pattern = re.compile("^(\d+)([dhms]$)")
|
||||||
td = pattern.match(time_string)
|
td = pattern.match(time_string)
|
||||||
if td is not None:
|
if td:
|
||||||
dur_time = int(td.group(1))
|
dur_time = int(td.group(1))
|
||||||
dur_measure = str(td.group(2))
|
dur_measure = str(td.group(2))
|
||||||
if dur_measure == "d":
|
if dur_measure == "d":
|
||||||
@@ -757,11 +759,10 @@ class Realm(LFCliBase):
|
|||||||
raise ValueError("time_string must be of type str. Type %s provided" % type(time_string))
|
raise ValueError("time_string must be of type str. Type %s provided" % type(time_string))
|
||||||
return duration_sec
|
return duration_sec
|
||||||
|
|
||||||
|
|
||||||
def remove_all_stations(self, resource):
|
def remove_all_stations(self, resource):
|
||||||
port_list = self.station_list()
|
port_list = self.station_list()
|
||||||
sta_list = []
|
sta_list = []
|
||||||
if sta_list is not None:
|
if port_list:
|
||||||
print("Removing all stations")
|
print("Removing all stations")
|
||||||
for item in list(port_list):
|
for item in list(port_list):
|
||||||
if "sta" in list(item)[0]:
|
if "sta" in list(item)[0]:
|
||||||
@@ -780,7 +781,7 @@ class Realm(LFCliBase):
|
|||||||
endp_list = self.json_get("/endp/list")
|
endp_list = self.json_get("/endp/list")
|
||||||
if "items" in endp_list or "empty" in endp_list:
|
if "items" in endp_list or "empty" in endp_list:
|
||||||
return
|
return
|
||||||
if endp_list is not None or endp_list:
|
if endp_list:
|
||||||
print("Removing all endps")
|
print("Removing all endps")
|
||||||
endp_list = list(endp_list['endpoint'])
|
endp_list = list(endp_list['endpoint'])
|
||||||
for endp_name in range(len(endp_list)):
|
for endp_name in range(len(endp_list)):
|
||||||
@@ -796,10 +797,10 @@ class Realm(LFCliBase):
|
|||||||
# remove endpoints
|
# remove endpoints
|
||||||
# nc show endpoints
|
# nc show endpoints
|
||||||
# nc show cross connects
|
# nc show cross connects
|
||||||
try:
|
if self.cx_list():
|
||||||
cx_list = list(self.cx_list())
|
cx_list = list(self.cx_list())
|
||||||
not_cx = ['warnings', 'errors', 'handler', 'uri', 'items', 'empty']
|
not_cx = ['warnings', 'errors', 'handler', 'uri', 'items', 'empty']
|
||||||
if cx_list is not None:
|
if cx_list:
|
||||||
print("Removing all cxs")
|
print("Removing all cxs")
|
||||||
for cx_name in cx_list:
|
for cx_name in cx_list:
|
||||||
if cx_name in not_cx:
|
if cx_name in not_cx:
|
||||||
@@ -810,7 +811,7 @@ class Realm(LFCliBase):
|
|||||||
"cx_name": cx_name
|
"cx_name": cx_name
|
||||||
}
|
}
|
||||||
self.json_post(req_url, data)
|
self.json_post(req_url, data)
|
||||||
except:
|
else:
|
||||||
print("no cxs to remove")
|
print("no cxs to remove")
|
||||||
|
|
||||||
if remove_all_endpoints:
|
if remove_all_endpoints:
|
||||||
@@ -820,186 +821,93 @@ class Realm(LFCliBase):
|
|||||||
"endpoint": "all"
|
"endpoint": "all"
|
||||||
}
|
}
|
||||||
self.json_post(req_url, data)
|
self.json_post(req_url, data)
|
||||||
req_url = "cli-json/show_cx"
|
|
||||||
data = {
|
|
||||||
"test_mgr": "all",
|
|
||||||
"cross_connect": "all"
|
|
||||||
}
|
|
||||||
|
|
||||||
def parse_link(self, link):
|
def parse_link(self, link):
|
||||||
link = self.lfclient_url + link
|
link = self.lfclient_url + link
|
||||||
info = ()
|
info = ()
|
||||||
|
|
||||||
def new_station_profile(self, ver = 1):
|
def new_station_profile(self):
|
||||||
if ver == 1:
|
return StationProfile(self.lfclient_url, local_realm=self, debug_=self.debug, up=False)
|
||||||
station_prof = StationProfile(self.lfclient_url, local_realm=self, debug_=self.debug, up=False)
|
|
||||||
#elif ver == 2:
|
|
||||||
# import station_profile2
|
|
||||||
# station_prof = station_profile2.StationProfile2(self.lfclient_url, local_realm=self, debug_=self.debug, up=False)
|
|
||||||
return station_prof
|
|
||||||
|
|
||||||
def new_multicast_profile(self, ver = 1):
|
def new_multicast_profile(self):
|
||||||
if ver == 1:
|
return MULTICASTProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug, report_timer_=3000)
|
||||||
multi_prof = MULTICASTProfile(self.lfclient_host, self.lfclient_port,
|
|
||||||
local_realm=self, debug_=self.debug, report_timer_=3000)
|
|
||||||
#elif ver == 2:
|
|
||||||
# import multicast_profile2
|
|
||||||
# multi_prof = multicast_profile2.MULTICASTProfile2(self.lfclient_host, self.lfclient_port,
|
|
||||||
# local_realm=self, debug_=self.debug, report_timer_=3000)
|
|
||||||
return multi_prof
|
|
||||||
|
|
||||||
def new_wifi_monitor_profile(self, resource_=1, debug_=False, up_=False, ver = 1):
|
def new_wifi_monitor_profile(self, resource_=1, debug_=False, up_=False):
|
||||||
if ver == 1:
|
return WifiMonitor(self.lfclient_url,
|
||||||
wifi_mon_prof = WifiMonitor(self.lfclient_url,
|
local_realm=self,
|
||||||
local_realm=self,
|
resource_=resource_,
|
||||||
resource_=resource_,
|
up=up_,
|
||||||
up=up_,
|
debug_=(self.debug or debug_))
|
||||||
debug_=(self.debug or debug_))
|
|
||||||
#elif ver == 2:
|
|
||||||
# import wifi_monitor_profile2
|
|
||||||
# wifi_mon_prof = wifi_monitor_profile2.WifiMonitor2(self.lfclient_url,
|
|
||||||
# local_realm=self,
|
|
||||||
# resource_=resource_,
|
|
||||||
# up=up_,
|
|
||||||
# debug_=(self.debug or debug_))
|
|
||||||
return wifi_mon_prof
|
|
||||||
|
|
||||||
def new_l3_cx_profile(self, ver=1):
|
def new_l3_cx_profile(self):
|
||||||
if ver == 1:
|
return L3CXProfile(self.lfclient_host,
|
||||||
cx_prof = L3CXProfile(self.lfclient_host,
|
self.lfclient_port,
|
||||||
self.lfclient_port,
|
local_realm=self,
|
||||||
local_realm=self,
|
debug_=self.debug,
|
||||||
debug_=self.debug,
|
report_timer_=3000)
|
||||||
report_timer_=3000)
|
|
||||||
elif ver == 2:
|
|
||||||
cx_prof = L3CXProfile2(self.lfclient_host,
|
|
||||||
self.lfclient_port,
|
|
||||||
local_realm=self,
|
|
||||||
debug_=self.debug,
|
|
||||||
report_timer_=3000)
|
|
||||||
return cx_prof
|
|
||||||
|
|
||||||
def new_l4_cx_profile(self, ver=1):
|
def new_l4_cx_profile(self):
|
||||||
if ver == 1:
|
return L4CXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
||||||
cx_prof = L4CXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
#elif ver == 2:
|
|
||||||
# import l4_cxprofile2
|
|
||||||
# cx_prof = l4_cxprofile2.L4CXProfile2(self.lfclient_host,
|
|
||||||
# self.lfclient_port,
|
|
||||||
# local_realm=self,
|
|
||||||
# debug_=self.debug,
|
|
||||||
# report_timer_=3000)
|
|
||||||
return cx_prof
|
|
||||||
def new_attenuator_profile(self, ver=1):
|
|
||||||
if ver == 1:
|
|
||||||
atten_prof = ATTENUATORProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
return atten_prof
|
|
||||||
def new_generic_endp_profile(self, ver=1):
|
|
||||||
if ver == 1 :
|
|
||||||
endp_prof = GenCXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
#elif ver == 2:
|
|
||||||
# import gen_cxprofile2
|
|
||||||
# endp_prof = gen_cxprofile2.GenCXProfile(self.lfclient_host,
|
|
||||||
# self.lfclient_port,
|
|
||||||
# local_realm=self,
|
|
||||||
# debug_=self.debug,
|
|
||||||
# report_timer_=3000)
|
|
||||||
return endp_prof
|
|
||||||
|
|
||||||
def new_generic_cx_profile(self, ver=1):
|
def new_attenuator_profile(self):
|
||||||
|
return ATTENUATORProfile(self.lfclient_host, self.lfclient_port, debug_=self.debug)
|
||||||
|
|
||||||
|
def new_generic_endp_profile(self):
|
||||||
|
return GenCXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
||||||
|
|
||||||
|
def new_generic_cx_profile(self):
|
||||||
"""
|
"""
|
||||||
@deprecated
|
@deprecated
|
||||||
:return: new GenCXProfile
|
:return: new GenCXProfile
|
||||||
"""
|
"""
|
||||||
if ver == 1:
|
return GenCXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
||||||
cx_prof = GenCXProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
#elif ver == 2:
|
|
||||||
# import gen_cxprofile2
|
|
||||||
# cx_prof = gen_cxprofile2.GenCXProfile(self.lfclient_host,
|
|
||||||
# self.lfclient_port,
|
|
||||||
# local_realm=self,
|
|
||||||
# debug_=self.debug,
|
|
||||||
# report_timer_=3000)
|
|
||||||
return cx_prof
|
|
||||||
|
|
||||||
def new_vap_profile(self, ver=1):
|
def new_vap_profile(self):
|
||||||
if ver == 1:
|
return VAPProfile(lfclient_host=self.lfclient_host, lfclient_port=self.lfclient_port, local_realm=self,
|
||||||
vap_prof = VAPProfile(lfclient_host=self.lfclient_host, lfclient_port=self.lfclient_port, local_realm=self,
|
debug_=self.debug)
|
||||||
debug_=self.debug)
|
|
||||||
# elif ver == 2:
|
|
||||||
# import vap_profile2
|
|
||||||
# vap_prof = vap_profile2.VAPProfile2(lfclient_host=self.lfclient_host, lfclient_port=self.lfclient_port, local_realm=self,
|
|
||||||
# debug_=self.debug)
|
|
||||||
return vap_prof
|
|
||||||
|
|
||||||
def new_vr_profile(self, ver=2):
|
# def new_vr_profile(self):
|
||||||
if ver == 2:
|
# return VRProfile(local_realm=self,
|
||||||
from vr_profile2 import VRProfile
|
# debug=self.debug)
|
||||||
vap_prof = VRProfile(local_realm=self,
|
|
||||||
debug=self.debug)
|
|
||||||
return vap_prof
|
|
||||||
|
|
||||||
def new_http_profile(self, ver = 1):
|
def new_http_profile(self):
|
||||||
if ver == 1:
|
return HTTPProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
||||||
http_prof = HTTPProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
# elif ver == 2:
|
|
||||||
# import http_profile2
|
|
||||||
# http_prof = http_profile2.HTTPProfile2(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
return http_prof
|
|
||||||
|
|
||||||
def new_fio_endp_profile(self, ver = 1):
|
def new_fio_endp_profile(self):
|
||||||
if ver == 1:
|
return FIOEndpProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
||||||
cx_prof = FIOEndpProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
# elif ver == 2:
|
|
||||||
# import fio_endp_profile2
|
|
||||||
# cx_prof = fio_endp_profile2.FIOEndpProfile2(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
return cx_prof
|
|
||||||
|
|
||||||
def new_dut_profile(self, ver = 1):
|
def new_dut_profile(self):
|
||||||
if ver == 1:
|
return DUTProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
||||||
dut_profile = DUTProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
# elif ver == 2:
|
|
||||||
# import dut_profile2
|
|
||||||
# dut_profile = dut_profile2.DUTProfile2(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
return dut_profile
|
|
||||||
|
|
||||||
def new_mvlan_profile(self, ver = 1):
|
def new_mvlan_profile(self):
|
||||||
if ver == 1:
|
return MACVLANProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
||||||
mac_vlan_profile = MACVLANProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
# elif ver == 2:
|
|
||||||
# import mac_vlan_profile2
|
|
||||||
# mac_vlan_profile = mac_vlan_profile2.MACVLANProfile2(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
return mac_vlan_profile
|
|
||||||
|
|
||||||
def new_qvlan_profile(self):
|
def new_qvlan_profile(self):
|
||||||
return QVLANProfile(self.host, self.port, local_realm=self, debug_=self.debug)
|
return QVLANProfile(self.host, self.port, local_realm=self, debug_=self.debug)
|
||||||
|
|
||||||
def new_test_group_profile(self, ver = 1):
|
def new_test_group_profile(self):
|
||||||
if ver == 1:
|
return TestGroupProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
||||||
test_group_profile = TestGroupProfile(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
# elif ver == 2:
|
|
||||||
# import test_group_profile2
|
|
||||||
# test_group_profile = test_group_profile2.TestGroupProfile2(self.lfclient_host, self.lfclient_port, local_realm=self, debug_=self.debug)
|
|
||||||
return test_group_profile
|
|
||||||
|
|
||||||
def new_lf_data_collection(self):
|
def new_lf_data_collection(self):
|
||||||
return LFDataCollection(local_realm=self)
|
return LFDataCollection(local_realm=self)
|
||||||
|
|
||||||
class PacketFilter():
|
|
||||||
|
|
||||||
def get_filter_wlan_assoc_packets(self, ap_mac, sta_mac):
|
class PacketFilter:
|
||||||
filter = "-T fields -e wlan.fc.type_subtype -e wlan.addr -e wlan.fc.pwrmgt " \
|
|
||||||
"-Y \"(wlan.addr==%s or wlan.addr==%s) and wlan.fc.type_subtype<=3\" " % (ap_mac, sta_mac)
|
|
||||||
return filter
|
|
||||||
|
|
||||||
def get_filter_wlan_null_packets(self, ap_mac, sta_mac):
|
@staticmethod
|
||||||
filter = "-T fields -e wlan.fc.type_subtype -e wlan.addr -e wlan.fc.pwrmgt " \
|
def get_filter_wlan_assoc_packets(ap_mac, sta_mac):
|
||||||
"-Y \"(wlan.addr==%s or wlan.addr==%s) and wlan.fc.type_subtype==44\" " % (ap_mac, sta_mac)
|
return "-T fields -e wlan.fc.type_subtype -e wlan.addr -e wlan.fc.pwrmgt " \
|
||||||
return filter
|
"-Y \"(wlan.addr==%s or wlan.addr==%s) and wlan.fc.type_subtype<=3\" " % (ap_mac, sta_mac)
|
||||||
|
|
||||||
def run_filter(self, pcap_file, filter):
|
@staticmethod
|
||||||
|
def get_filter_wlan_null_packets(ap_mac, sta_mac):
|
||||||
|
return "-T fields -e wlan.fc.type_subtype -e wlan.addr -e wlan.fc.pwrmgt " \
|
||||||
|
"-Y \"(wlan.addr==%s or wlan.addr==%s) and wlan.fc.type_subtype==44\" " % (ap_mac, sta_mac)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def run_filter(pcap_file, file_filter):
|
||||||
filename = "/tmp/tshark_dump.txt"
|
filename = "/tmp/tshark_dump.txt"
|
||||||
cmd = "tshark -r %s %s > %s" % (pcap_file, filter, filename)
|
cmd = "tshark -r %s %s > %s" % (pcap_file, file_filter, filename)
|
||||||
# print("CMD: ", cmd)
|
# print("CMD: ", cmd)
|
||||||
os.system(cmd)
|
os.system(cmd)
|
||||||
lines = []
|
lines = []
|
||||||
|
|||||||
@@ -12,25 +12,24 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit()
|
exit()
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
|
LFRequest = importlib.import_module("py-json.LANforge.LFRequest")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
url = "http://localhost:8080/port/1/1/list"
|
url = "http://localhost:8080/port/1/1/list"
|
||||||
timeout = 5 # seconds
|
|
||||||
|
lf_r = LFRequest.LFRequest(url)
|
||||||
|
json_response = lf_r.getAsJson(True)
|
||||||
|
j_printer = pprint.PrettyPrinter(indent=2)
|
||||||
|
j_printer.pprint(json_response)
|
||||||
|
|
||||||
|
# for record in json_response['interfaces']:
|
||||||
|
# j_printer.pprint(record)
|
||||||
|
|
||||||
lf_r = LFRequest.LFRequest(url)
|
|
||||||
json_response = lf_r.getAsJson(True)
|
|
||||||
j_printer = pprint.PrettyPrinter(indent=2)
|
|
||||||
j_printer.pprint(json_response)
|
|
||||||
|
|
||||||
#for record in json_response['interfaces']:
|
|
||||||
#j_printer.pprint(record)
|
|
||||||
|
|
||||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
#
|
#
|
||||||
|
|||||||
@@ -300,17 +300,17 @@ class StationProfile:
|
|||||||
pprint(set_port.set_port_current_flags)
|
pprint(set_port.set_port_current_flags)
|
||||||
pprint(set_port.set_port_interest_flags)
|
pprint(set_port.set_port_interest_flags)
|
||||||
return
|
return
|
||||||
if (param_name in set_port.set_port_cmd_flags):
|
if param_name in set_port.set_port_cmd_flags:
|
||||||
if (value == 1) and (param_name not in self.desired_set_port_cmd_flags):
|
if (value == 1) and (param_name not in self.desired_set_port_cmd_flags):
|
||||||
self.desired_set_port_cmd_flags.append(param_name)
|
self.desired_set_port_cmd_flags.append(param_name)
|
||||||
elif value == 0:
|
elif value == 0:
|
||||||
self.desired_set_port_cmd_flags.remove(param_name)
|
self.desired_set_port_cmd_flags.remove(param_name)
|
||||||
elif (param_name in set_port.set_port_current_flags):
|
elif param_name in set_port.set_port_current_flags:
|
||||||
if (value == 1) and (param_name not in self.desired_set_port_current_flags):
|
if (value == 1) and (param_name not in self.desired_set_port_current_flags):
|
||||||
self.desired_set_port_current_flags.append(param_name)
|
self.desired_set_port_current_flags.append(param_name)
|
||||||
elif value == 0:
|
elif value == 0:
|
||||||
self.desired_set_port_current_flags.remove(param_name)
|
self.desired_set_port_current_flags.remove(param_name)
|
||||||
elif (param_name in set_port.set_port_interest_flags):
|
elif param_name in set_port.set_port_interest_flags:
|
||||||
if (value == 1) and (param_name not in self.desired_set_port_interest_flags):
|
if (value == 1) and (param_name not in self.desired_set_port_interest_flags):
|
||||||
self.desired_set_port_interest_flags.append(param_name)
|
self.desired_set_port_interest_flags.append(param_name)
|
||||||
elif value == 0:
|
elif value == 0:
|
||||||
@@ -359,7 +359,7 @@ class StationProfile:
|
|||||||
def cleanup(self, desired_stations=None, delay=0.03, debug_=False):
|
def cleanup(self, desired_stations=None, delay=0.03, debug_=False):
|
||||||
print("Cleaning up stations")
|
print("Cleaning up stations")
|
||||||
|
|
||||||
if (desired_stations is None):
|
if desired_stations is None:
|
||||||
desired_stations = self.station_names
|
desired_stations = self.station_names
|
||||||
|
|
||||||
if len(desired_stations) < 1:
|
if len(desired_stations) < 1:
|
||||||
@@ -371,7 +371,9 @@ class StationProfile:
|
|||||||
self.local_realm.rm_port(port_eid, check_exists=True, debug_=debug_)
|
self.local_realm.rm_port(port_eid, check_exists=True, debug_=debug_)
|
||||||
time.sleep(delay)
|
time.sleep(delay)
|
||||||
# And now see if they are gone
|
# And now see if they are gone
|
||||||
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=desired_stations)
|
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url,
|
||||||
|
port_list=desired_stations,
|
||||||
|
debug=debug_)
|
||||||
|
|
||||||
# Checks for errors in initialization values and creates specified number of stations using init parameters
|
# Checks for errors in initialization values and creates specified number of stations using init parameters
|
||||||
def create(self, radio,
|
def create(self, radio,
|
||||||
@@ -440,7 +442,6 @@ class StationProfile:
|
|||||||
set_port_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_port", debug_=debug)
|
set_port_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_port", debug_=debug)
|
||||||
wifi_extra_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_wifi_extra", debug_=debug)
|
wifi_extra_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_wifi_extra", debug_=debug)
|
||||||
wifi_txo_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_wifi_txo", debug_=debug)
|
wifi_txo_r = LFRequest.LFRequest(self.lfclient_url + "/cli-json/set_wifi_txo", debug_=debug)
|
||||||
my_sta_names = []
|
|
||||||
# add radio here
|
# add radio here
|
||||||
if (num_stations > 0) and (len(sta_names_) < 1):
|
if (num_stations > 0) and (len(sta_names_) < 1):
|
||||||
# print("CREATING MORE STA NAMES == == == == == == == == == == == == == == == == == == == == == == == ==")
|
# print("CREATING MORE STA NAMES == == == == == == == == == == == == == == == == == == == == == == == ==")
|
||||||
@@ -449,7 +450,7 @@ class StationProfile:
|
|||||||
else:
|
else:
|
||||||
my_sta_names = sta_names_
|
my_sta_names = sta_names_
|
||||||
|
|
||||||
if (len(my_sta_names) >= 15) or (suppress_related_commands_ == True):
|
if (len(my_sta_names) >= 15) or suppress_related_commands_:
|
||||||
self.add_sta_data["suppress_preexec_cli"] = "yes"
|
self.add_sta_data["suppress_preexec_cli"] = "yes"
|
||||||
self.add_sta_data["suppress_preexec_method"] = 1
|
self.add_sta_data["suppress_preexec_method"] = 1
|
||||||
self.set_port_data["suppress_preexec_cli"] = "yes"
|
self.set_port_data["suppress_preexec_cli"] = "yes"
|
||||||
@@ -503,13 +504,13 @@ class StationProfile:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# print("- 3264 - ## %s ## add_sta_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
|
# print("- 3264 - ## %s ## add_sta_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
|
||||||
json_response = add_sta_r.jsonPost(debug=self.debug)
|
add_sta_r.jsonPost(debug=self.debug)
|
||||||
finished_sta.append(eidn)
|
finished_sta.append(eidn)
|
||||||
# print("- ~3264 - %s - add_sta_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
|
# print("- ~3264 - %s - add_sta_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
|
||||||
time.sleep(0.01)
|
time.sleep(0.01)
|
||||||
set_port_r.addPostData(self.set_port_data)
|
set_port_r.addPostData(self.set_port_data)
|
||||||
# print("- 3270 -- %s -- set_port_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
|
# print("- 3270 -- %s -- set_port_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
|
||||||
json_response = set_port_r.jsonPost(debug)
|
set_port_r.jsonPost(debug)
|
||||||
# print("- ~3270 - %s - set_port_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
|
# print("- ~3270 - %s - set_port_r.jsonPost - - - - - - - - - - - - - - - - - - "%eidn)
|
||||||
time.sleep(0.01)
|
time.sleep(0.01)
|
||||||
|
|
||||||
@@ -519,10 +520,10 @@ class StationProfile:
|
|||||||
self.wifi_txo_data["port"] = name
|
self.wifi_txo_data["port"] = name
|
||||||
if self.wifi_extra_data_modified:
|
if self.wifi_extra_data_modified:
|
||||||
wifi_extra_r.addPostData(self.wifi_extra_data)
|
wifi_extra_r.addPostData(self.wifi_extra_data)
|
||||||
json_response = wifi_extra_r.jsonPost(debug)
|
wifi_extra_r.jsonPost(debug)
|
||||||
if self.wifi_txo_data_modified:
|
if self.wifi_txo_data_modified:
|
||||||
wifi_txo_r.addPostData(self.wifi_txo_data)
|
wifi_txo_r.addPostData(self.wifi_txo_data)
|
||||||
json_response = wifi_txo_r.jsonPost(debug)
|
wifi_txo_r.jsonPost(debug)
|
||||||
|
|
||||||
# append created stations to self.station_names
|
# append created stations to self.station_names
|
||||||
self.station_names.append("%s.%s.%s" % (radio_shelf, radio_resource, name))
|
self.station_names.append("%s.%s.%s" % (radio_shelf, radio_resource, name))
|
||||||
@@ -534,7 +535,7 @@ class StationProfile:
|
|||||||
# and set ports up
|
# and set ports up
|
||||||
if dry_run:
|
if dry_run:
|
||||||
return
|
return
|
||||||
if (self.up):
|
if self.up:
|
||||||
self.admin_up()
|
self.admin_up()
|
||||||
|
|
||||||
# for sta_name in self.station_names:
|
# for sta_name in self.station_names:
|
||||||
@@ -551,8 +552,15 @@ class StationProfile:
|
|||||||
self.add_sta_data["flags"] = self.add_named_flags(self.desired_add_sta_flags, add_sta.add_sta_flags)
|
self.add_sta_data["flags"] = self.add_named_flags(self.desired_add_sta_flags, add_sta.add_sta_flags)
|
||||||
self.add_sta_data["flags_mask"] = self.add_named_flags(self.desired_add_sta_flags_mask,
|
self.add_sta_data["flags_mask"] = self.add_named_flags(self.desired_add_sta_flags_mask,
|
||||||
add_sta.add_sta_flags)
|
add_sta.add_sta_flags)
|
||||||
|
|
||||||
|
station_eid = self.local_realm.name_to_eid(station)
|
||||||
|
station_shelf = station_eid[0]
|
||||||
|
station_resource = station_eid[1]
|
||||||
|
station_port = station_eid[2]
|
||||||
self.add_sta_data["radio"] = radio
|
self.add_sta_data["radio"] = radio
|
||||||
self.add_sta_data["sta_name"] = station
|
self.add_sta_data["shelf"] = station_shelf
|
||||||
|
self.add_sta_data["resource"] = station_resource
|
||||||
|
self.add_sta_data["sta_name"] = station_port
|
||||||
self.add_sta_data["ssid"] = 'NA'
|
self.add_sta_data["ssid"] = 'NA'
|
||||||
self.add_sta_data["key"] = 'NA'
|
self.add_sta_data["key"] = 'NA'
|
||||||
self.add_sta_data['mac'] = 'NA'
|
self.add_sta_data['mac'] = 'NA'
|
||||||
@@ -565,4 +573,4 @@ class StationProfile:
|
|||||||
print(self.lfclient_url + "/cli_json/add_sta")
|
print(self.lfclient_url + "/cli_json/add_sta")
|
||||||
print(self.add_sta_data)
|
print(self.add_sta_data)
|
||||||
add_sta_r.addPostData(self.add_sta_data)
|
add_sta_r.addPostData(self.add_sta_data)
|
||||||
json_response = add_sta_r.jsonPost(self.debug)
|
add_sta_r.jsonPost(self.debug)
|
||||||
|
|||||||
@@ -27,8 +27,8 @@ class TestBase:
|
|||||||
def build(self):
|
def build(self):
|
||||||
# - create station profile
|
# - create station profile
|
||||||
# - create 2 criteria [ex: not down, continually_receiving] object (for ex)
|
# - create 2 criteria [ex: not down, continually_receiving] object (for ex)
|
||||||
# - station_profile.add_criteria([not_down, continually_receiving, etc_3])
|
# - station_profile.add_criteria([not_down, continually_receiving, etc_3])
|
||||||
# design - inversion of control
|
# design - inversion of control
|
||||||
|
|
||||||
if self.profiles:
|
if self.profiles:
|
||||||
for profile in self.profiles:
|
for profile in self.profiles:
|
||||||
@@ -38,31 +38,30 @@ class TestBase:
|
|||||||
if self.profiles:
|
if self.profiles:
|
||||||
for profile in self.profiles:
|
for profile in self.profiles:
|
||||||
profile.check_passes()
|
profile.check_passes()
|
||||||
|
|
||||||
def run_duration(self, monitor_enabled= False):
|
def run_duration(self, monitor_enabled=False):
|
||||||
#here check if monitor is enabled or not, then run loop accordingly
|
# here check if monitor is enabled or not, then run loop accordingly
|
||||||
self.check_for_halt()
|
self.check_for_halt()
|
||||||
if self.profiles:
|
if self.profiles:
|
||||||
if monitor_enabled:
|
if monitor_enabled:
|
||||||
for profile in self.profiles:
|
for profile in self.profiles:
|
||||||
profile.monitor_record() #check for halt in monitor record?
|
profile.monitor_record() # check for halt in monitor record?
|
||||||
for profile in self.profiles:
|
for profile in self.profiles:
|
||||||
profile.grade()
|
profile.grade()
|
||||||
if self.exit_on_fail:
|
if self.exit_on_fail:
|
||||||
if self.fails():
|
if self.fails():
|
||||||
self.exit_fail()
|
self.exit_fail()
|
||||||
self.check_for_quit()
|
self.check_for_quit()
|
||||||
|
|
||||||
def report(self, enabled= False):
|
def report(self, enabled=False):
|
||||||
#here check if monitor is enabled or not, then run loop accordingly with lfreporting
|
# here check if monitor is enabled or not, then run loop accordingly with lfreporting
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def begin(self):
|
def begin(self):
|
||||||
self.pre_clean_up()
|
self.pre_clean_up()
|
||||||
self.build()
|
self.build()
|
||||||
self.start()
|
self.start()
|
||||||
self.run_duration()
|
self.run_duration()
|
||||||
self.stop()
|
self.stop()
|
||||||
self.report()
|
self.report()
|
||||||
self.clean_up()
|
self.clean_up()
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ import threading
|
|||||||
import re
|
import re
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -28,13 +27,14 @@ updates_path = webconsole_dir + "/web_json/updates.js"
|
|||||||
|
|
||||||
|
|
||||||
class ClientVisualization(LFCliBase, threading.Thread):
|
class ClientVisualization(LFCliBase, threading.Thread):
|
||||||
def __init__(self, lfclient_host="localhost", lfclient_port=8080, num_clients= 64, max_data= 120, thread_id=None, _debug_on=False, _exit_on_error=False, _exit_on_fail=False):
|
def __init__(self, lfclient_host="localhost", lfclient_port=8080, num_clients=64, max_data=120, thread_id=None,
|
||||||
|
_debug_on=False, _exit_on_error=False, _exit_on_fail=False):
|
||||||
super().__init__(lfclient_host, lfclient_port, _debug=_debug_on, _exit_on_fail=_exit_on_fail)
|
super().__init__(lfclient_host, lfclient_port, _debug=_debug_on, _exit_on_fail=_exit_on_fail)
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.num_clients = num_clients
|
self.num_clients = num_clients
|
||||||
self.max_data = max_data
|
self.max_data = max_data
|
||||||
self._stop_event = threading.Event()
|
self._stop_event = threading.Event()
|
||||||
self.client_data = {"down":[], "phantom":[], "ip":[], "scanning":[]}
|
self.client_data = {"down": [], "phantom": [], "ip": [], "scanning": []}
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
self._stop_event.set()
|
self._stop_event.set()
|
||||||
@@ -56,15 +56,15 @@ class ClientVisualization(LFCliBase, threading.Thread):
|
|||||||
for j in i:
|
for j in i:
|
||||||
print(i[j]['port type'])
|
print(i[j]['port type'])
|
||||||
if i[j]['port type'] == "WIFI-STA" and i[j]['parent dev'] == "wiphy1" and i[j]['alias'] != 'wlan1':
|
if i[j]['port type'] == "WIFI-STA" and i[j]['parent dev'] == "wiphy1" and i[j]['alias'] != 'wlan1':
|
||||||
#print(j)
|
# print(j)
|
||||||
if i[j]['down'] == False and i[j]['phantom'] == False and i[j]['ip'] == '0.0.0.0':
|
if i[j]['down'] is False and i[j]['phantom'] is False and i[j]['ip'] == '0.0.0.0':
|
||||||
self.scanning += 1
|
self.scanning += 1
|
||||||
elif i[j]['down'] == False and i[j]['phantom'] == True:
|
elif i[j]['down'] is False and i[j]['phantom'] is True:
|
||||||
self.phantom += 1
|
self.phantom += 1
|
||||||
elif i[j]['down'] == True and i[j]['phantom'] == True:
|
elif i[j]['down'] is True and i[j]['phantom'] is True:
|
||||||
self.phantom += 1
|
self.phantom += 1
|
||||||
self.client_data['phantom'].append(self.phantom)
|
self.client_data['phantom'].append(self.phantom)
|
||||||
elif i[j]['down'] == True and i[j]['phantom'] == False:
|
elif i[j]['down'] is True and i[j]['phantom'] is False:
|
||||||
self.down += 1
|
self.down += 1
|
||||||
elif i[j]['ip'] != "0.0.0.0":
|
elif i[j]['ip'] != "0.0.0.0":
|
||||||
self.ip += 1
|
self.ip += 1
|
||||||
@@ -75,7 +75,6 @@ class ClientVisualization(LFCliBase, threading.Thread):
|
|||||||
self.client_data['down'].append(self.down)
|
self.client_data['down'].append(self.down)
|
||||||
self.client_data['ip'].append(self.ip)
|
self.client_data['ip'].append(self.ip)
|
||||||
|
|
||||||
|
|
||||||
for i in self.client_data:
|
for i in self.client_data:
|
||||||
if len(self.client_data[i]) >= self.max_data:
|
if len(self.client_data[i]) >= self.max_data:
|
||||||
self.client_data[i].pop(0)
|
self.client_data[i].pop(0)
|
||||||
@@ -84,20 +83,21 @@ class ClientVisualization(LFCliBase, threading.Thread):
|
|||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
class CreateHTML():
|
class CreateHTML:
|
||||||
def __init__(self, path="", test_name="", time_snap="", dut_ssid="", test_conf_data={}, objective="", test_results={}, chart_data={}, chart_params={}):
|
def __init__(self, path="", test_name="", time_snap="", dut_ssid="", test_conf_data={}, objective="",
|
||||||
|
test_results={}, chart_data={}, chart_params={}):
|
||||||
self.head = """
|
self.head = """
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<title>"""+test_name+"""</title>
|
<title>""" + test_name + """</title>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div class='Section report_banner-1000x205' style='background-image:url("../../assets/brand/banner.jpg");background-repeat:no-repeat;padding:0;margin:0;min-width:1000px; min-height:205px;width:1000px; height:205px;max-width:1000px; max-height:205px;'>
|
<div class='Section report_banner-1000x205' style='background-image:url("../../assets/brand/banner.jpg");background-repeat:no-repeat;padding:0;margin:0;min-width:1000px; min-height:205px;width:1000px; height:205px;max-width:1000px; max-height:205px;'>
|
||||||
<img align='right' style='padding:25;margin:5;width:200px;' src="../../assets/brand/logo.png" border='0' />
|
<img align='right' style='padding:25;margin:5;width:200px;' src="../../assets/brand/logo.png" border='0' />
|
||||||
<div class='HeaderStyle'>
|
<div class='HeaderStyle'>
|
||||||
<br>
|
<br>
|
||||||
<h1 class='TitleFontPrint' style='color:darkgreen;'>"""+test_name+"""</h1>
|
<h1 class='TitleFontPrint' style='color:darkgreen;'>""" + test_name + """</h1>
|
||||||
<h3 class='TitleFontPrint' style="color:darkgreen;">"""+time_snap+"""</h3>
|
<h3 class='TitleFontPrint' style="color:darkgreen;">""" + time_snap + """</h3>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<br>
|
<br>
|
||||||
@@ -119,16 +119,16 @@ class CreateHTML():
|
|||||||
<td>
|
<td>
|
||||||
SSID
|
SSID
|
||||||
</td>
|
</td>
|
||||||
<td colspan="3">"""+dut_ssid+"""
|
<td colspan="3">""" + dut_ssid + """
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for i in test_conf_data:
|
for i in test_conf_data:
|
||||||
self.test_conf = self.test_conf + """<tr>
|
self.test_conf = self.test_conf + """<tr>
|
||||||
<td>"""+str(i)+"""
|
<td>""" + str(i) + """
|
||||||
</td>
|
</td>
|
||||||
<td colspan="3">"""+test_conf_data[i]+"""
|
<td colspan="3">""" + test_conf_data[i] + """
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
"""
|
"""
|
||||||
@@ -141,13 +141,13 @@ class CreateHTML():
|
|||||||
|
|
||||||
self.objective = """
|
self.objective = """
|
||||||
<br><h2 align="left">Objective</h2> <p align="left" width="900">
|
<br><h2 align="left">Objective</h2> <p align="left" width="900">
|
||||||
"""+objective+"""
|
""" + objective + """
|
||||||
</p>
|
</p>
|
||||||
<br>
|
<br>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if str(test_results['summary']).__contains__("PASS"):
|
if str(test_results['summary']).__contains__("PASS"):
|
||||||
self.summary_results ="""
|
self.summary_results = """
|
||||||
<br>
|
<br>
|
||||||
<table width="700px" border="1" cellpadding="2" cellspacing="0" style="border-top-color: gray; border-top-style: solid; border-top-width: 1px; border-right-color: gray; border-right-style: solid; border-right-width: 1px; border-bottom-color: gray; border-bottom-style: solid; border-bottom-width: 1px; border-left-color: gray; border-left-style: solid; border-left-width: 1px">
|
<table width="700px" border="1" cellpadding="2" cellspacing="0" style="border-top-color: gray; border-top-style: solid; border-top-width: 1px; border-right-color: gray; border-right-style: solid; border-right-width: 1px; border-bottom-color: gray; border-bottom-style: solid; border-bottom-width: 1px; border-left-color: gray; border-left-style: solid; border-left-width: 1px">
|
||||||
<tr>
|
<tr>
|
||||||
@@ -180,23 +180,20 @@ class CreateHTML():
|
|||||||
</table>
|
</table>
|
||||||
<br>
|
<br>
|
||||||
"""
|
"""
|
||||||
chart_d =[]
|
chart_d = []
|
||||||
chart_label =[]
|
chart_label = []
|
||||||
for i in chart_data:
|
for i in chart_data:
|
||||||
chart_label.append(i)
|
chart_label.append(i)
|
||||||
chart_d.append(chart_data[i])
|
chart_d.append(chart_data[i])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
self.detail_result = """<table width="1000px" border="1" cellpadding="2" cellspacing="0" >
|
self.detail_result = """<table width="1000px" border="1" cellpadding="2" cellspacing="0" >
|
||||||
<tr><th colspan="2">Detailed Results</th></tr>
|
<tr><th colspan="2">Detailed Results</th></tr>
|
||||||
<table width="1000px" border="1" >
|
<table width="1000px" border="1" >
|
||||||
<tr>
|
<tr>
|
||||||
"""
|
"""
|
||||||
for index in test_results['detail']['keys']:
|
for index in test_results['detail']['keys']:
|
||||||
self.detail_result = self.detail_result+"<th colspan='2'>"+index+"</th>"
|
self.detail_result = self.detail_result + "<th colspan='2'>" + index + "</th>"
|
||||||
self.detail_result = self.detail_result +"</tr>"
|
self.detail_result = self.detail_result + "</tr>"
|
||||||
|
|
||||||
for data in test_results['detail']['data']:
|
for data in test_results['detail']['data']:
|
||||||
self.detail_result = self.detail_result + "<tr align='center'>"
|
self.detail_result = self.detail_result + "<tr align='center'>"
|
||||||
@@ -205,12 +202,14 @@ class CreateHTML():
|
|||||||
for i in data:
|
for i in data:
|
||||||
print(data[i])
|
print(data[i])
|
||||||
if str(data[i]).__contains__("PASS"):
|
if str(data[i]).__contains__("PASS"):
|
||||||
self.detail_result = self.detail_result + "<th colspan='2' bgcolor='#90EE90'>" + str(data[i]) + "</th>"
|
self.detail_result = self.detail_result + "<th colspan='2' bgcolor='#90EE90'>" + str(
|
||||||
|
data[i]) + "</th>"
|
||||||
elif str(data[i]).__contains__("FAIL"):
|
elif str(data[i]).__contains__("FAIL"):
|
||||||
self.detail_result = self.detail_result + "<th colspan='2' bgcolor='orange'>" + str(data[i]) + "</th>"
|
self.detail_result = self.detail_result + "<th colspan='2' bgcolor='orange'>" + str(
|
||||||
|
data[i]) + "</th>"
|
||||||
else:
|
else:
|
||||||
self.detail_result = self.detail_result + "<th colspan='2'>" + str(data[i]) + "</th>"
|
self.detail_result = self.detail_result + "<th colspan='2'>" + str(data[i]) + "</th>"
|
||||||
self.detail_result = self.detail_result +"</tr>"
|
self.detail_result = self.detail_result + "</tr>"
|
||||||
|
|
||||||
self.chart_data = chart_data
|
self.chart_data = chart_data
|
||||||
chart_values = []
|
chart_values = []
|
||||||
@@ -231,26 +230,24 @@ class CreateHTML():
|
|||||||
|
|
||||||
self.chart = """<img align='center' style='padding:25;margin:5;width:600px;' src="plot.png" border='0' />"""
|
self.chart = """<img align='center' style='padding:25;margin:5;width:600px;' src="plot.png" border='0' />"""
|
||||||
|
|
||||||
|
|
||||||
self.end = """</table>
|
self.end = """</table>
|
||||||
</table>
|
</table>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"""
|
"""
|
||||||
self.report = self.head + self.test_conf + self.objective + self.summary_results + self.chart +self.detail_result + self.end
|
self.report = self.head + self.test_conf + self.objective + self.summary_results + self.chart + self.detail_result + self.end
|
||||||
|
|
||||||
|
|
||||||
|
class RuntimeUpdates:
|
||||||
class RuntimeUpdates():
|
|
||||||
def __init__(self, session_id, init_data):
|
def __init__(self, session_id, init_data):
|
||||||
self.session_id = session_id
|
self.session_id = session_id
|
||||||
self.init_data = init_data
|
self.init_data = init_data
|
||||||
f = open(updates_path, 'r+')
|
f = open(updates_path, 'r+')
|
||||||
data = f.read()
|
data = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
obj = data[data.find('{'): data.rfind('}') + 1]
|
self.obj = data[data.find('{'): data.rfind('}') + 1]
|
||||||
obj = re.sub('[\']', '"', obj)
|
self.obj = re.sub('[\']', '"', self.obj)
|
||||||
data = json.loads(obj)
|
data = json.loads(self.obj)
|
||||||
print(data)
|
print(data)
|
||||||
data["web_updates"].append({"ID": self.session_id, "data": self.init_data})
|
data["web_updates"].append({"ID": self.session_id, "data": self.init_data})
|
||||||
print(data)
|
print(data)
|
||||||
@@ -264,9 +261,9 @@ class RuntimeUpdates():
|
|||||||
f = open(updates_path, 'r+')
|
f = open(updates_path, 'r+')
|
||||||
data = f.read()
|
data = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
obj = data[data.find('{'): data.rfind('}') + 1]
|
data_obj = data[data.find('{'): data.rfind('}') + 1]
|
||||||
obj = re.sub('[\']', '"', obj)
|
data_obj = re.sub('[\']', '"', data_obj)
|
||||||
data = json.loads(obj)
|
data = json.loads(data_obj)
|
||||||
|
|
||||||
for update in data["web_updates"]:
|
for update in data["web_updates"]:
|
||||||
if update["ID"] == self.session_id:
|
if update["ID"] == self.session_id:
|
||||||
@@ -306,22 +303,14 @@ class StatusSession(LFCliBase):
|
|||||||
Method to read all the messages for a particular session
|
Method to read all the messages for a particular session
|
||||||
"""
|
"""
|
||||||
keys = []
|
keys = []
|
||||||
for i in self.json_get("/status-msg/"+self.session_id)['messages']:
|
for i in self.json_get("/status-msg/" + self.session_id)['messages']:
|
||||||
keys.append(i['key'])
|
keys.append(i['key'])
|
||||||
json_uri = "/status-msg/"+self.session_id + "/"
|
json_uri = "/status-msg/" + self.session_id + "/"
|
||||||
for i in keys:
|
for i in keys:
|
||||||
json_uri = json_uri + i + ","
|
json_uri = json_uri + i + ","
|
||||||
return self.json_get(json_uri)['messages']
|
return self.json_get(json_uri)['messages']
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
obj = StatusMsg(lfclient_host="localhost", lfclient_port=8090, session_id="01_18_21_20_04_20")
|
obj = StatusMsg(lfclient_host="localhost", lfclient_port=8090, session_id="01_18_21_20_04_20")
|
||||||
print(obj.read())
|
print(obj.read())
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import importlib
|
|||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -97,7 +96,7 @@ class VAPProfile(LFCliBase):
|
|||||||
req_json = LFUtils.portUpRequest(resource, None, debug_on=self.debug)
|
req_json = LFUtils.portUpRequest(resource, None, debug_on=self.debug)
|
||||||
req_json["port"] = self.vap_name
|
req_json["port"] = self.vap_name
|
||||||
set_port_r.addPostData(req_json)
|
set_port_r.addPostData(req_json)
|
||||||
json_response = set_port_r.jsonPost(self.debug)
|
set_port_r.jsonPost(self.debug)
|
||||||
time.sleep(0.03)
|
time.sleep(0.03)
|
||||||
|
|
||||||
def admin_down(self, resource):
|
def admin_down(self, resource):
|
||||||
@@ -105,7 +104,7 @@ class VAPProfile(LFCliBase):
|
|||||||
req_json = LFUtils.port_down_request(resource, None, debug_on=self.debug)
|
req_json = LFUtils.port_down_request(resource, None, debug_on=self.debug)
|
||||||
req_json["port"] = self.vap_name
|
req_json["port"] = self.vap_name
|
||||||
set_port_r.addPostData(req_json)
|
set_port_r.addPostData(req_json)
|
||||||
json_response = set_port_r.jsonPost(self.debug)
|
set_port_r.jsonPost(self.debug)
|
||||||
time.sleep(0.03)
|
time.sleep(0.03)
|
||||||
|
|
||||||
def use_security(self, security_type, ssid=None, passwd=None):
|
def use_security(self, security_type, ssid=None, passwd=None):
|
||||||
@@ -141,7 +140,7 @@ class VAPProfile(LFCliBase):
|
|||||||
print("Command name name [%s] not defined in %s" % (command_name, self.COMMANDS))
|
print("Command name name [%s] not defined in %s" % (command_name, self.COMMANDS))
|
||||||
return
|
return
|
||||||
if command_name == "add_vap":
|
if command_name == "add_vap":
|
||||||
if (param_name not in add_vap.add_vap_flags):
|
if param_name not in add_vap.add_vap_flags:
|
||||||
print("Parameter name [%s] not defined in add_vap.py" % param_name)
|
print("Parameter name [%s] not defined in add_vap.py" % param_name)
|
||||||
if self.debug:
|
if self.debug:
|
||||||
pprint(add_vap.add_vap_flags)
|
pprint(add_vap.add_vap_flags)
|
||||||
@@ -255,7 +254,6 @@ class VAPProfile(LFCliBase):
|
|||||||
raise ValueError("No radio %s.%s found" % (resource, radio))
|
raise ValueError("No radio %s.%s found" % (resource, radio))
|
||||||
|
|
||||||
eid = "1.%s.%s" % (resource, radio)
|
eid = "1.%s.%s" % (resource, radio)
|
||||||
frequency = 0
|
|
||||||
country = 0
|
country = 0
|
||||||
if eid in jr:
|
if eid in jr:
|
||||||
country = jr[eid]["country"]
|
country = jr[eid]["country"]
|
||||||
@@ -312,18 +310,18 @@ class VAPProfile(LFCliBase):
|
|||||||
pprint(add_vap_r)
|
pprint(add_vap_r)
|
||||||
print("- ~1502 - - - - - - - - - - - - - - - - - - - ")
|
print("- ~1502 - - - - - - - - - - - - - - - - - - - ")
|
||||||
|
|
||||||
json_response = add_vap_r.jsonPost(debug)
|
add_vap_r.jsonPost(debug)
|
||||||
# time.sleep(0.03)
|
# time.sleep(0.03)
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
set_port_r.addPostData(self.set_port_data)
|
set_port_r.addPostData(self.set_port_data)
|
||||||
json_response = set_port_r.jsonPost(debug)
|
set_port_r.jsonPost(debug)
|
||||||
time.sleep(0.03)
|
time.sleep(0.03)
|
||||||
|
|
||||||
self.wifi_extra_data["resource"] = resource
|
self.wifi_extra_data["resource"] = resource
|
||||||
self.wifi_extra_data["port"] = self.vap_name
|
self.wifi_extra_data["port"] = self.vap_name
|
||||||
if self.wifi_extra_data_modified:
|
if self.wifi_extra_data_modified:
|
||||||
wifi_extra_r.addPostData(self.wifi_extra_data)
|
wifi_extra_r.addPostData(self.wifi_extra_data)
|
||||||
json_response = wifi_extra_r.jsonPost(debug)
|
wifi_extra_r.jsonPost(debug)
|
||||||
|
|
||||||
port_list = self.local_realm.json_get("port/1/1/list")
|
port_list = self.local_realm.json_get("port/1/1/list")
|
||||||
if port_list is not None:
|
if port_list is not None:
|
||||||
@@ -335,7 +333,7 @@ class VAPProfile(LFCliBase):
|
|||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
|
|
||||||
# create bridge
|
# create bridge
|
||||||
if bridge :
|
if bridge:
|
||||||
print("creating bridge")
|
print("creating bridge")
|
||||||
data = {
|
data = {
|
||||||
"shelf": 1,
|
"shelf": 1,
|
||||||
@@ -354,19 +352,16 @@ class VAPProfile(LFCliBase):
|
|||||||
}
|
}
|
||||||
self.local_realm.json_post("cli-json/set_port", bridge_set_port)
|
self.local_realm.json_post("cli-json/set_port", bridge_set_port)
|
||||||
|
|
||||||
if (self.up):
|
if self.up:
|
||||||
self.admin_up(resource)
|
self.admin_up(resource)
|
||||||
|
|
||||||
def cleanup(self, resource, delay=0.03):
|
def cleanup(self, resource, delay=0.03):
|
||||||
print("Cleaning up VAPs")
|
print("Cleaning up VAPs")
|
||||||
desired_ports = ["1.%s.%s" % (resource, self.vap_name), "1.%s.br0" % resource]
|
desired_ports = ["1.%s.%s" % (resource, self.vap_name), "1.%s.br0" % resource]
|
||||||
|
|
||||||
del_count = len(desired_ports)
|
|
||||||
|
|
||||||
# First, request remove on the list.
|
# First, request remove on the list.
|
||||||
for port_eid in desired_ports:
|
for port_eid in desired_ports:
|
||||||
self.local_realm.rm_port(port_eid, check_exists=True)
|
self.local_realm.rm_port(port_eid, check_exists=True)
|
||||||
|
|
||||||
# And now see if they are gone
|
# And now see if they are gone
|
||||||
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=desired_ports)
|
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=desired_ports)
|
||||||
|
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ class WifiMonitor:
|
|||||||
raise ValueError("No radio %s.%s found" % (resource_, radio_))
|
raise ValueError("No radio %s.%s found" % (resource_, radio_))
|
||||||
|
|
||||||
eid = "1.%s.%s" % (resource_, radio_)
|
eid = "1.%s.%s" % (resource_, radio_)
|
||||||
#frequency = 0
|
# frequency = 0
|
||||||
country = 0
|
country = 0
|
||||||
if eid in jr:
|
if eid in jr:
|
||||||
country = jr[eid]["country"]
|
country = jr[eid]["country"]
|
||||||
@@ -73,7 +73,7 @@ class WifiMonitor:
|
|||||||
})
|
})
|
||||||
|
|
||||||
def set_flag(self, param_name, value):
|
def set_flag(self, param_name, value):
|
||||||
if (param_name not in add_monitor.flags):
|
if param_name not in add_monitor.flags:
|
||||||
raise ValueError("Flag '%s' does not exist for add_monitor, consult add_monitor.py" % param_name)
|
raise ValueError("Flag '%s' does not exist for add_monitor, consult add_monitor.py" % param_name)
|
||||||
if (value == 1) and (param_name not in self.flag_names):
|
if (value == 1) and (param_name not in self.flag_names):
|
||||||
self.flag_names.append(param_name)
|
self.flag_names.append(param_name)
|
||||||
@@ -97,13 +97,13 @@ class WifiMonitor:
|
|||||||
if (existing_ports is None) or ("interfaces" not in existing_ports) or ("interface" not in existing_ports):
|
if (existing_ports is None) or ("interfaces" not in existing_ports) or ("interface" not in existing_ports):
|
||||||
print("No monitor names found to delete")
|
print("No monitor names found to delete")
|
||||||
return
|
return
|
||||||
if ("interfaces" in existing_ports):
|
if "interfaces" in existing_ports:
|
||||||
for eid, info in existing_ports["interfaces"].items():
|
for eid, info in existing_ports["interfaces"].items():
|
||||||
LFUtils.removePort(resource=resource_,
|
LFUtils.removePort(resource=resource_,
|
||||||
port_name=info["alias"],
|
port_name=info["alias"],
|
||||||
baseurl=self.lfclient_url,
|
baseurl=self.lfclient_url,
|
||||||
debug=self.debug)
|
debug=self.debug)
|
||||||
if ("interface" in existing_ports):
|
if "interface" in existing_ports:
|
||||||
for eid, info in existing_ports["interface"].items():
|
for eid, info in existing_ports["interface"].items():
|
||||||
LFUtils.removePort(resource=resource_,
|
LFUtils.removePort(resource=resource_,
|
||||||
port_name=info["alias"],
|
port_name=info["alias"],
|
||||||
@@ -132,4 +132,3 @@ class WifiMonitor:
|
|||||||
"duration": duration_sec
|
"duration": duration_sec
|
||||||
}
|
}
|
||||||
self.local_realm.json_post("/cli-json/sniff_port", _data=data)
|
self.local_realm.json_post("/cli-json/sniff_port", _data=data)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
'''
|
"""
|
||||||
|
|
||||||
Candela Technologies Inc.
|
Candela Technologies Inc.
|
||||||
Info : Standard Script for WLAN Capaity Calculator
|
Info : Standard Script for WLAN Capaity Calculator
|
||||||
@@ -13,7 +13,7 @@ This Script has three classes :
|
|||||||
3. ac11_calculator : It will take all the user input of 802.11ac station,calculate Intermediate values and Theoretical values.
|
3. ac11_calculator : It will take all the user input of 802.11ac station,calculate Intermediate values and Theoretical values.
|
||||||
All classes have different functions: input_parameter() that calculates intermediate values and generate theroretical data
|
All classes have different functions: input_parameter() that calculates intermediate values and generate theroretical data
|
||||||
|
|
||||||
'''
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
@@ -22,8 +22,7 @@ import json
|
|||||||
# Class to take all user input (802.11a/b/g Standard)
|
# Class to take all user input (802.11a/b/g Standard)
|
||||||
|
|
||||||
|
|
||||||
|
class abg11_calculator:
|
||||||
class abg11_calculator():
|
|
||||||
|
|
||||||
def __init__(self, Traffic_Type, PHY_Bit_Rate, Encryption, QoS, MAC_Frame_802_11, Basic_Rate_Set, Preamble,
|
def __init__(self, Traffic_Type, PHY_Bit_Rate, Encryption, QoS, MAC_Frame_802_11, Basic_Rate_Set, Preamble,
|
||||||
slot_name, Codec_Type, RTS_CTS_Handshake, CTS_to_self):
|
slot_name, Codec_Type, RTS_CTS_Handshake, CTS_to_self):
|
||||||
@@ -39,19 +38,16 @@ class abg11_calculator():
|
|||||||
self.RTS_CTS_Handshake = RTS_CTS_Handshake
|
self.RTS_CTS_Handshake = RTS_CTS_Handshake
|
||||||
self.CTS_to_self = CTS_to_self
|
self.CTS_to_self = CTS_to_self
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# This function is for calculate intermediate values and Theoretical values
|
# This function is for calculate intermediate values and Theoretical values
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_argparse(prog=None, formatter_class=None, epilog=None, description=None):
|
def create_argparse(prog=None, formatter_class=None, epilog=None, description=None):
|
||||||
if (prog is not None) or (formatter_class is not None) or (epilog is not None) or (description is not None):
|
if (prog is not None) or (formatter_class is not None) or (epilog is not None) or (description is not None):
|
||||||
ap = argparse.ArgumentParser(prog=prog,
|
ap = argparse.ArgumentParser(prog=prog,
|
||||||
formatter_class=formatter_class,
|
formatter_class=formatter_class,
|
||||||
allow_abbrev=True,
|
allow_abbrev=True,
|
||||||
epilog=epilog,
|
epilog=epilog,
|
||||||
description=description)
|
description=description)
|
||||||
else:
|
else:
|
||||||
ap = argparse.ArgumentParser()
|
ap = argparse.ArgumentParser()
|
||||||
|
|
||||||
@@ -329,7 +325,7 @@ class abg11_calculator():
|
|||||||
|
|
||||||
# CWmin_str (leave alone for default)
|
# CWmin_str (leave alone for default)
|
||||||
|
|
||||||
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
|
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
|
||||||
CWmin_str = 31
|
CWmin_str = 31
|
||||||
else:
|
else:
|
||||||
if (
|
if (
|
||||||
@@ -376,7 +372,7 @@ class abg11_calculator():
|
|||||||
|
|
||||||
# PHY Bit Rate of Control Frames
|
# PHY Bit Rate of Control Frames
|
||||||
|
|
||||||
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
|
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
|
||||||
data = 1
|
data = 1
|
||||||
else:
|
else:
|
||||||
data = 6
|
data = 6
|
||||||
@@ -406,7 +402,7 @@ class abg11_calculator():
|
|||||||
Preamble_1 = float(192)
|
Preamble_1 = float(192)
|
||||||
|
|
||||||
if (PHY_Bit == 1) or (PHY_Bit == 2) or (PHY_Bit == 5.5) or (PHY_Bit == 11):
|
if (PHY_Bit == 1) or (PHY_Bit == 2) or (PHY_Bit == 5.5) or (PHY_Bit == 11):
|
||||||
Ttxframe = (14 * 8) / PHY_Bit + (Preamble_1)
|
Ttxframe = (14 * 8) / PHY_Bit + Preamble_1
|
||||||
|
|
||||||
Ttxframe_new = format(Ttxframe, '.2f')
|
Ttxframe_new = format(Ttxframe, '.2f')
|
||||||
else:
|
else:
|
||||||
@@ -415,7 +411,7 @@ class abg11_calculator():
|
|||||||
|
|
||||||
# RTS/CTS Handshake Overhead
|
# RTS/CTS Handshake Overhead
|
||||||
|
|
||||||
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
|
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
|
||||||
SIFS_value = float(10)
|
SIFS_value = float(10)
|
||||||
else:
|
else:
|
||||||
SIFS_value = float(16)
|
SIFS_value = float(16)
|
||||||
@@ -425,7 +421,7 @@ class abg11_calculator():
|
|||||||
|
|
||||||
elif "Yes" in self.RTS_CTS_Handshake:
|
elif "Yes" in self.RTS_CTS_Handshake:
|
||||||
if (PHY_Bit == 1) or (PHY_Bit == 2) or (PHY_Bit == 5.5) or (PHY_Bit == 11):
|
if (PHY_Bit == 1) or (PHY_Bit == 2) or (PHY_Bit == 5.5) or (PHY_Bit == 11):
|
||||||
RTS_CTS_Handshake = ((20 + 14) * 8) / PHY_Bit + (Preamble_1)
|
RTS_CTS_Handshake = ((20 + 14) * 8) / PHY_Bit + Preamble_1
|
||||||
|
|
||||||
else:
|
else:
|
||||||
RTS_CTS_Handshake = int(((20 + 14) * 8 + 22 + PHY_Bit * 4 - 1) / (PHY_Bit * 4)) * 4 + 2 * 20
|
RTS_CTS_Handshake = int(((20 + 14) * 8 + 22 + PHY_Bit * 4 - 1) / (PHY_Bit * 4)) * 4 + 2 * 20
|
||||||
@@ -441,26 +437,26 @@ class abg11_calculator():
|
|||||||
CTS_to_self_Handshake = 0
|
CTS_to_self_Handshake = 0
|
||||||
else:
|
else:
|
||||||
if (PHY_Bit == 1) or (PHY_Bit == 2) or (PHY_Bit == 5.5) or (PHY_Bit == 11):
|
if (PHY_Bit == 1) or (PHY_Bit == 2) or (PHY_Bit == 5.5) or (PHY_Bit == 11):
|
||||||
CTS_to_self_Handshake = (14 * 8) / PHY_Bit + (Preamble_1) + SIFS_value
|
CTS_to_self_Handshake = (14 * 8) / PHY_Bit + Preamble_1 + SIFS_value
|
||||||
else:
|
else:
|
||||||
CTS_to_self_Handshake = int(
|
CTS_to_self_Handshake = int(
|
||||||
(14 * 8 + 22 + PHY_Bit * 4 - 1) / (PHY_Bit * 4)) * 4 + 20 + SIFS_value
|
(14 * 8 + 22 + PHY_Bit * 4 - 1) / (PHY_Bit * 4)) * 4 + 20 + SIFS_value
|
||||||
|
|
||||||
# DIFS calulation
|
# DIFS calulation
|
||||||
|
|
||||||
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
|
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
|
||||||
DIFS_value = 50
|
DIFS_value = 50
|
||||||
elif ("Short" in self.slot_name):
|
elif "Short" in self.slot_name:
|
||||||
DIFS_value = 34
|
DIFS_value = 34
|
||||||
else:
|
else:
|
||||||
DIFS_value = 50
|
DIFS_value = 50
|
||||||
|
|
||||||
# MeanBackoff calculation
|
# MeanBackoff calculation
|
||||||
|
|
||||||
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
|
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
|
||||||
c4 = (CWmin_str * 20 / 2)
|
c4 = (CWmin_str * 20 / 2)
|
||||||
MeanBackoff_value = float(c4)
|
MeanBackoff_value = float(c4)
|
||||||
elif ("Short" in self.slot_name):
|
elif "Short" in self.slot_name:
|
||||||
d2 = (CWmin_str * 9 / 2)
|
d2 = (CWmin_str * 9 / 2)
|
||||||
MeanBackoff_value = float(d2)
|
MeanBackoff_value = float(d2)
|
||||||
else:
|
else:
|
||||||
@@ -474,7 +470,7 @@ class abg11_calculator():
|
|||||||
Nbits_value = (MAC_MPDU_Size * 8)
|
Nbits_value = (MAC_MPDU_Size * 8)
|
||||||
|
|
||||||
# Tmac, time for MAC frame and Tplcp, time for MAC PLCP
|
# Tmac, time for MAC frame and Tplcp, time for MAC PLCP
|
||||||
if (PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11):
|
if PHY_Bit_Rate_int == 1 or PHY_Bit_Rate_int == 2 or PHY_Bit_Rate_int == 5 or PHY_Bit_Rate_int == 11:
|
||||||
Tmac_value = Nbits_value / yellow_cell
|
Tmac_value = Nbits_value / yellow_cell
|
||||||
if "Short" in self.Preamble:
|
if "Short" in self.Preamble:
|
||||||
Tplcp = float(96)
|
Tplcp = float(96)
|
||||||
@@ -597,8 +593,6 @@ class abg11_calculator():
|
|||||||
IP_Throughput_C6_new = "N/A"
|
IP_Throughput_C6_new = "N/A"
|
||||||
IP_Throughput_C7_new = "N/A"
|
IP_Throughput_C7_new = "N/A"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Voice_Call = Max_Frame_Rate_C1 / Codec_Frame_rate
|
Voice_Call = Max_Frame_Rate_C1 / Codec_Frame_rate
|
||||||
Voice_Call_value = round(Voice_Call)
|
Voice_Call_value = round(Voice_Call)
|
||||||
|
|
||||||
@@ -620,18 +614,18 @@ class abg11_calculator():
|
|||||||
self.Estimated_MOS_Score = "N/A"
|
self.Estimated_MOS_Score = "N/A"
|
||||||
self.Maximum_Bidirectional_Voice_Calls = "N/A"
|
self.Maximum_Bidirectional_Voice_Calls = "N/A"
|
||||||
else:
|
else:
|
||||||
if (Voice_Call_value <= 1):
|
if Voice_Call_value <= 1:
|
||||||
Maximum_Bidirectional_Voice_Calls1 = self.Max_Frame_Rate_C1_round / Codec_Frame_rate
|
Maximum_Bidirectional_Voice_Calls1 = self.Max_Frame_Rate_C1_round / Codec_Frame_rate
|
||||||
elif (Voice_Call_value <= 2):
|
elif Voice_Call_value <= 2:
|
||||||
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C2_round / Codec_Frame_rate
|
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C2_round / Codec_Frame_rate
|
||||||
elif (Voice_Call_value <= 5):
|
elif Voice_Call_value <= 5:
|
||||||
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C3_round / Codec_Frame_rate
|
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C3_round / Codec_Frame_rate
|
||||||
|
|
||||||
elif (Voice_Call_value <= 10):
|
elif Voice_Call_value <= 10:
|
||||||
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C4_round / Codec_Frame_rate
|
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C4_round / Codec_Frame_rate
|
||||||
elif (Voice_Call_value <= 20):
|
elif Voice_Call_value <= 20:
|
||||||
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C5_round / Codec_Frame_rate
|
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C5_round / Codec_Frame_rate
|
||||||
elif (Voice_Call_value <= 50):
|
elif Voice_Call_value <= 50:
|
||||||
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C6_round / Codec_Frame_rate
|
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C6_round / Codec_Frame_rate
|
||||||
else:
|
else:
|
||||||
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C7_round / Codec_Frame_rate
|
Maximum_Bidirectional_Voice_Calls1 = Max_Frame_Rate_C7_round / Codec_Frame_rate
|
||||||
@@ -644,18 +638,17 @@ class abg11_calculator():
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
Estimated_MOS_Score_1 = 1 + 0.035 * self.Maximum_Theoretical_R_value + self.Maximum_Theoretical_R_value * (
|
Estimated_MOS_Score_1 = 1 + 0.035 * self.Maximum_Theoretical_R_value + self.Maximum_Theoretical_R_value * (
|
||||||
self.Maximum_Theoretical_R_value - 60) * (
|
self.Maximum_Theoretical_R_value - 60) * (
|
||||||
100 - self.Maximum_Theoretical_R_value) * 7 * 0.000001
|
100 - self.Maximum_Theoretical_R_value) * 7 * 0.000001
|
||||||
self.Estimated_MOS_Score = round(Estimated_MOS_Score_1, 2)
|
self.Estimated_MOS_Score = round(Estimated_MOS_Score_1, 2)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_result(self):
|
def get_result(self):
|
||||||
|
|
||||||
print("\n" + "******************Station : 11abgCalculator*****************************" + "\n")
|
print("\n" + "******************Station : 11abgCalculator*****************************" + "\n")
|
||||||
print("Theoretical Maximum Offered Load" + "\n")
|
print("Theoretical Maximum Offered Load" + "\n")
|
||||||
print("1 Client:")
|
print("1 Client:")
|
||||||
All_theoretical_output = {'Packet Interval(usec)': self.Client_1_new, 'Max Frame Rate(fps)': self.Max_Frame_Rate_C1_round,
|
All_theoretical_output = {'Packet Interval(usec)': self.Client_1_new,
|
||||||
|
'Max Frame Rate(fps)': self.Max_Frame_Rate_C1_round,
|
||||||
'Max. Offered Load (802.11)(Mb/s)': self.Max_Offered_Load_C1_new,
|
'Max. Offered Load (802.11)(Mb/s)': self.Max_Offered_Load_C1_new,
|
||||||
'Offered Load Per 802.11 Client(Mb/s)': self.Offered_Load_Per_Client1_new,
|
'Offered Load Per 802.11 Client(Mb/s)': self.Offered_Load_Per_Client1_new,
|
||||||
'Offered Load (802.3 Side)(Mb/s)': self.Offered_Load_C1_new,
|
'Offered Load (802.3 Side)(Mb/s)': self.Offered_Load_C1_new,
|
||||||
@@ -677,9 +670,10 @@ class n11_calculator(abg11_calculator):
|
|||||||
def __init__(self, Traffic_Type, Data_Voice_MCS, Channel_Bandwidth, Guard_Interval_value, Highest_Basic_str,
|
def __init__(self, Traffic_Type, Data_Voice_MCS, Channel_Bandwidth, Guard_Interval_value, Highest_Basic_str,
|
||||||
Encryption, QoS,
|
Encryption, QoS,
|
||||||
IP_Packets_MSDU_str, MAC_Frames_per_A_MPDU_str, BSS_Basic_Rate, MAC_MPDU_Size_Data_Traffic,
|
IP_Packets_MSDU_str, MAC_Frames_per_A_MPDU_str, BSS_Basic_Rate, MAC_MPDU_Size_Data_Traffic,
|
||||||
Codec_Type, PLCP, CWmin, RTS_CTS_Handshake, CTS_to_self,PHY_Bit_Rate=None,MAC_Frame_802_11=None,Basic_Rate_Set=None,Preamble=None,slot_name=None):
|
Codec_Type, PLCP, CWmin, RTS_CTS_Handshake, CTS_to_self, PHY_Bit_Rate=None, MAC_Frame_802_11=None,
|
||||||
|
Basic_Rate_Set=None, Preamble=None, slot_name=None):
|
||||||
super().__init__(Traffic_Type, PHY_Bit_Rate, Encryption, QoS, MAC_Frame_802_11, Basic_Rate_Set, Preamble,
|
super().__init__(Traffic_Type, PHY_Bit_Rate, Encryption, QoS, MAC_Frame_802_11, Basic_Rate_Set, Preamble,
|
||||||
slot_name, Codec_Type, RTS_CTS_Handshake, CTS_to_self)
|
slot_name, Codec_Type, RTS_CTS_Handshake, CTS_to_self)
|
||||||
self.Data_Voice_MCS = Data_Voice_MCS
|
self.Data_Voice_MCS = Data_Voice_MCS
|
||||||
self.Channel_Bandwidth = Channel_Bandwidth
|
self.Channel_Bandwidth = Channel_Bandwidth
|
||||||
self.Guard_Interval_value = Guard_Interval_value
|
self.Guard_Interval_value = Guard_Interval_value
|
||||||
@@ -691,7 +685,6 @@ class n11_calculator(abg11_calculator):
|
|||||||
self.PLCP = PLCP
|
self.PLCP = PLCP
|
||||||
self.CWmin = CWmin
|
self.CWmin = CWmin
|
||||||
|
|
||||||
|
|
||||||
# This function is for calculate intermediate values and Theoretical values
|
# This function is for calculate intermediate values and Theoretical values
|
||||||
|
|
||||||
def calculate(self):
|
def calculate(self):
|
||||||
@@ -875,7 +868,7 @@ class n11_calculator(abg11_calculator):
|
|||||||
MAC_MPDU_Size = int(self.MAC_MPDU_Size_Data_Traffic)
|
MAC_MPDU_Size = int(self.MAC_MPDU_Size_Data_Traffic)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if ((IP_Packets_MSDU == 0)):
|
if IP_Packets_MSDU == 0:
|
||||||
MAC_MPDU_Size = (Codec_IP_Packet_Size + 28 + QoS_Hdr + Encrypt_Hdr + 8)
|
MAC_MPDU_Size = (Codec_IP_Packet_Size + 28 + QoS_Hdr + Encrypt_Hdr + 8)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -890,7 +883,7 @@ class n11_calculator(abg11_calculator):
|
|||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
MSDU_1 = (MAC_MPDU_Size - 28 - QoS_Hdr - Encrypt_Hdr - (IP_Packets_MSDU) * (14 + 3))
|
MSDU_1 = (MAC_MPDU_Size - 28 - QoS_Hdr - Encrypt_Hdr - IP_Packets_MSDU * (14 + 3))
|
||||||
MSDU_final = MSDU_1 / IP_Packets_MSDU
|
MSDU_final = MSDU_1 / IP_Packets_MSDU
|
||||||
|
|
||||||
if MSDU_final < 0:
|
if MSDU_final < 0:
|
||||||
@@ -1076,7 +1069,7 @@ class n11_calculator(abg11_calculator):
|
|||||||
|
|
||||||
# g22 A-MPDU Pad
|
# g22 A-MPDU Pad
|
||||||
|
|
||||||
if ((MAC_Frames_per_A_MPDU == 0)):
|
if MAC_Frames_per_A_MPDU == 0:
|
||||||
MPDU_Pad = int(0)
|
MPDU_Pad = int(0)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -1087,7 +1080,7 @@ class n11_calculator(abg11_calculator):
|
|||||||
# c26 Nbits, Bits per MAC PPDU
|
# c26 Nbits, Bits per MAC PPDU
|
||||||
|
|
||||||
MAC_Frames_per_A_MPDU_loc = MAC_Frames_per_A_MPDU + 1
|
MAC_Frames_per_A_MPDU_loc = MAC_Frames_per_A_MPDU + 1
|
||||||
if (MAC_Frames_per_A_MPDU == 0):
|
if MAC_Frames_per_A_MPDU == 0:
|
||||||
Nbits_Bits_per_MAC_PPDU = MAC_MPDU_Size * 8
|
Nbits_Bits_per_MAC_PPDU = MAC_MPDU_Size * 8
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -1100,7 +1093,7 @@ class n11_calculator(abg11_calculator):
|
|||||||
Guard_Interval_1 = 1
|
Guard_Interval_1 = 1
|
||||||
elif "800" in self.Guard_Interval_value:
|
elif "800" in self.Guard_Interval_value:
|
||||||
Guard_Interval_1 = 0
|
Guard_Interval_1 = 0
|
||||||
calculation = (((Data_Voice_MCS_int > 7 and PLCP_Configuration_int == 2) or PLCP_Configuration_int == 1))
|
calculation = ((Data_Voice_MCS_int > 7 and PLCP_Configuration_int == 2) or PLCP_Configuration_int == 1)
|
||||||
if (Guard_Interval_1 == 1) and calculation:
|
if (Guard_Interval_1 == 1) and calculation:
|
||||||
Tsymbol_Data_Symbol_Period = 3.60
|
Tsymbol_Data_Symbol_Period = 3.60
|
||||||
|
|
||||||
@@ -1132,7 +1125,7 @@ class n11_calculator(abg11_calculator):
|
|||||||
(22 + 32 * 8 + PHY_Bit_Rate_of_Control_Frames * 4 - 1) / (PHY_Bit_Rate_of_Control_Frames * 4)) * 4 + 20
|
(22 + 32 * 8 + PHY_Bit_Rate_of_Control_Frames * 4 - 1) / (PHY_Bit_Rate_of_Control_Frames * 4)) * 4 + 20
|
||||||
|
|
||||||
# g20 Use BlockAck
|
# g20 Use BlockAck
|
||||||
if (MAC_Frames_per_A_MPDU == 0):
|
if MAC_Frames_per_A_MPDU == 0:
|
||||||
Use_BlockAck = False
|
Use_BlockAck = False
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -1228,14 +1221,14 @@ class n11_calculator(abg11_calculator):
|
|||||||
Client_14_new = format(Max_PPDU_Rate_7, '.2f')
|
Client_14_new = format(Max_PPDU_Rate_7, '.2f')
|
||||||
|
|
||||||
# c44 Max_MAC_MPDU_Rate_1
|
# c44 Max_MAC_MPDU_Rate_1
|
||||||
if (MAC_Frames_per_A_MPDU > 0):
|
if MAC_Frames_per_A_MPDU > 0:
|
||||||
Max_MAC_MPDU_Rate_1 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_1
|
Max_MAC_MPDU_Rate_1 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_1
|
||||||
Max_MAC_MPDU_Rate_2 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_2
|
Max_MAC_MPDU_Rate_2 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_2
|
||||||
Max_MAC_MPDU_Rate_3 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_3
|
Max_MAC_MPDU_Rate_3 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_3
|
||||||
Max_MAC_MPDU_Rate_4 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_4
|
Max_MAC_MPDU_Rate_4 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_4
|
||||||
Max_MAC_MPDU_Rate_5 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_5
|
Max_MAC_MPDU_Rate_5 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_5
|
||||||
Max_MAC_MPDU_Rate_6 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_6
|
Max_MAC_MPDU_Rate_6 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_6
|
||||||
Max_MAC_MPDU_Rate_7 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_7
|
Max_MAC_MPDU_Rate_7 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_7
|
||||||
else:
|
else:
|
||||||
Max_MAC_MPDU_Rate_1 = Max_PPDU_Rate_1
|
Max_MAC_MPDU_Rate_1 = Max_PPDU_Rate_1
|
||||||
Max_MAC_MPDU_Rate_2 = Max_PPDU_Rate_2
|
Max_MAC_MPDU_Rate_2 = Max_PPDU_Rate_2
|
||||||
@@ -1255,14 +1248,14 @@ class n11_calculator(abg11_calculator):
|
|||||||
|
|
||||||
# Max MAC MSDU Rate
|
# Max MAC MSDU Rate
|
||||||
|
|
||||||
if (IP_Packets_MSDU > 0):
|
if IP_Packets_MSDU > 0:
|
||||||
Max_MAC_MSDU_Rate_1 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_1
|
Max_MAC_MSDU_Rate_1 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_1
|
||||||
Max_MAC_MSDU_Rate_2 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_2
|
Max_MAC_MSDU_Rate_2 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_2
|
||||||
Max_MAC_MSDU_Rate_3 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_3
|
Max_MAC_MSDU_Rate_3 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_3
|
||||||
Max_MAC_MSDU_Rate_4 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_4
|
Max_MAC_MSDU_Rate_4 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_4
|
||||||
Max_MAC_MSDU_Rate_5 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_5
|
Max_MAC_MSDU_Rate_5 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_5
|
||||||
Max_MAC_MSDU_Rate_6 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_6
|
Max_MAC_MSDU_Rate_6 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_6
|
||||||
Max_MAC_MSDU_Rate_7 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_7
|
Max_MAC_MSDU_Rate_7 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_7
|
||||||
|
|
||||||
else:
|
else:
|
||||||
Max_MAC_MSDU_Rate_1 = Max_MAC_MPDU_Rate_1
|
Max_MAC_MSDU_Rate_1 = Max_MAC_MPDU_Rate_1
|
||||||
@@ -1458,15 +1451,13 @@ class n11_calculator(abg11_calculator):
|
|||||||
else:
|
else:
|
||||||
self.Maximum_Bidirectional_Voice_Calls = round(Maximum_Bidirectional, 2)
|
self.Maximum_Bidirectional_Voice_Calls = round(Maximum_Bidirectional, 2)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_result(self):
|
def get_result(self):
|
||||||
|
|
||||||
print("\n" + "******************Station : 11nCalculator*****************************" + "\n")
|
print("\n" + "******************Station : 11nCalculator*****************************" + "\n")
|
||||||
print("Theoretical Maximum Offered Load" + "\n")
|
print("Theoretical Maximum Offered Load" + "\n")
|
||||||
print("1 Client:")
|
print("1 Client:")
|
||||||
All_theoretical_output = {'MAC PPDU Interval(usec)': self.Client_1_new,
|
All_theoretical_output = {'MAC PPDU Interval(usec)': self.Client_1_new,
|
||||||
'Max PPDU Rate(fps)': self.Client_8_new,
|
'Max PPDU Rate(fps)': self.Client_8_new,
|
||||||
'Max MAC MPDU Rate': self.Client_15_new,
|
'Max MAC MPDU Rate': self.Client_15_new,
|
||||||
'Max MAC MSDU Rate': self.Client_22_new,
|
'Max MAC MSDU Rate': self.Client_22_new,
|
||||||
'Max. 802.11 MAC Frame Data Rate(Mb/s)': self.Client_29_new,
|
'Max. 802.11 MAC Frame Data Rate(Mb/s)': self.Client_29_new,
|
||||||
@@ -1488,20 +1479,18 @@ class n11_calculator(abg11_calculator):
|
|||||||
|
|
||||||
class ac11_calculator(n11_calculator):
|
class ac11_calculator(n11_calculator):
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, Traffic_Type, Data_Voice_MCS, spatial, Channel_Bandwidth, Guard_Interval_value,
|
def __init__(self, Traffic_Type, Data_Voice_MCS, spatial, Channel_Bandwidth, Guard_Interval_value,
|
||||||
Highest_Basic_str, Encryption, QoS,IP_Packets_MSDU_str, MAC_Frames_per_A_MPDU_str, BSS_Basic_Rate, MAC_MPDU_Size_Data_Traffic,
|
Highest_Basic_str, Encryption, QoS, IP_Packets_MSDU_str, MAC_Frames_per_A_MPDU_str, BSS_Basic_Rate,
|
||||||
Codec_Type, CWmin, RTS_CTS,PLCP = None,RTS_CTS_Handshake=None,CTS_to_self=None):
|
MAC_MPDU_Size_Data_Traffic,
|
||||||
|
Codec_Type, CWmin, RTS_CTS, PLCP=None, RTS_CTS_Handshake=None, CTS_to_self=None):
|
||||||
super().__init__(Traffic_Type, Data_Voice_MCS, Channel_Bandwidth, Guard_Interval_value, Highest_Basic_str,
|
super().__init__(Traffic_Type, Data_Voice_MCS, Channel_Bandwidth, Guard_Interval_value, Highest_Basic_str,
|
||||||
Encryption, QoS,
|
Encryption, QoS,
|
||||||
IP_Packets_MSDU_str, MAC_Frames_per_A_MPDU_str, BSS_Basic_Rate, MAC_MPDU_Size_Data_Traffic,
|
IP_Packets_MSDU_str, MAC_Frames_per_A_MPDU_str, BSS_Basic_Rate, MAC_MPDU_Size_Data_Traffic,
|
||||||
Codec_Type, PLCP, CWmin, RTS_CTS_Handshake, CTS_to_self)
|
Codec_Type, PLCP, CWmin, RTS_CTS_Handshake, CTS_to_self)
|
||||||
|
|
||||||
self.spatial = spatial
|
self.spatial = spatial
|
||||||
self.RTS_CTS = RTS_CTS
|
self.RTS_CTS = RTS_CTS
|
||||||
|
|
||||||
|
|
||||||
# This function is for calculate intermediate values and Theoretical values
|
# This function is for calculate intermediate values and Theoretical values
|
||||||
|
|
||||||
def calculate(self):
|
def calculate(self):
|
||||||
@@ -1695,7 +1684,7 @@ class ac11_calculator(n11_calculator):
|
|||||||
MAC_MPDU_Size = int(self.MAC_MPDU_Size_Data_Traffic)
|
MAC_MPDU_Size = int(self.MAC_MPDU_Size_Data_Traffic)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if (IP_Packets_MSDU == 0):
|
if IP_Packets_MSDU == 0:
|
||||||
MAC_MPDU_Size = (Codec_IP_Packet_Size + 28 + QoS_Hdr + Encrypt_Hdr + 8)
|
MAC_MPDU_Size = (Codec_IP_Packet_Size + 28 + QoS_Hdr + Encrypt_Hdr + 8)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -1711,8 +1700,8 @@ class ac11_calculator(n11_calculator):
|
|||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
MSDU_1 = (MAC_MPDU_Size - 28 - QoS_Hdr - Encrypt_Hdr - (IP_Packets_MSDU) * (14 + 3))
|
MSDU_1 = (MAC_MPDU_Size - 28 - QoS_Hdr - Encrypt_Hdr - IP_Packets_MSDU * (14 + 3))
|
||||||
MSDU_final = (int(MSDU_1 / (IP_Packets_MSDU)))
|
MSDU_final = (int(MSDU_1 / IP_Packets_MSDU))
|
||||||
if MSDU_final < 0:
|
if MSDU_final < 0:
|
||||||
MSDU = MSDU_final - 1
|
MSDU = MSDU_final - 1
|
||||||
else:
|
else:
|
||||||
@@ -1775,7 +1764,7 @@ class ac11_calculator(n11_calculator):
|
|||||||
elif "800" in self.Guard_Interval_value:
|
elif "800" in self.Guard_Interval_value:
|
||||||
Guard_Interval_1 = 0
|
Guard_Interval_1 = 0
|
||||||
|
|
||||||
calculation = (((Data_Voice_MCS_int > 7 and plcp == 2) or plcp == 1))
|
calculation = ((Data_Voice_MCS_int > 7 and plcp == 2) or plcp == 1)
|
||||||
|
|
||||||
if (Guard_Interval_1 == 1) and calculation:
|
if (Guard_Interval_1 == 1) and calculation:
|
||||||
Tsymbol_Data_Symbol_Period = 3.60
|
Tsymbol_Data_Symbol_Period = 3.60
|
||||||
@@ -1839,7 +1828,7 @@ class ac11_calculator(n11_calculator):
|
|||||||
# Nbits, Bits per MAC PPDU
|
# Nbits, Bits per MAC PPDU
|
||||||
# A-MPDU Pad
|
# A-MPDU Pad
|
||||||
|
|
||||||
if ((MAC_Frames_per_A_MPDU == 0)):
|
if MAC_Frames_per_A_MPDU == 0:
|
||||||
MPDU_Pad = int(0)
|
MPDU_Pad = int(0)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -1848,7 +1837,7 @@ class ac11_calculator(n11_calculator):
|
|||||||
MPDU_Pad = int((y % 4))
|
MPDU_Pad = int((y % 4))
|
||||||
|
|
||||||
MAC_Frames_per_A_MPDU_loc = MAC_Frames_per_A_MPDU + 1
|
MAC_Frames_per_A_MPDU_loc = MAC_Frames_per_A_MPDU + 1
|
||||||
if (MAC_Frames_per_A_MPDU == 0):
|
if MAC_Frames_per_A_MPDU == 0:
|
||||||
Nbits_Bits_per_MAC_PPDU = MAC_MPDU_Size * 8
|
Nbits_Bits_per_MAC_PPDU = MAC_MPDU_Size * 8
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -1925,7 +1914,7 @@ class ac11_calculator(n11_calculator):
|
|||||||
# c37 Ack Response Overhead
|
# c37 Ack Response Overhead
|
||||||
# g20 Use BlockAck
|
# g20 Use BlockAck
|
||||||
|
|
||||||
if (MAC_Frames_per_A_MPDU == 0):
|
if MAC_Frames_per_A_MPDU == 0:
|
||||||
Use_BlockAck = False
|
Use_BlockAck = False
|
||||||
else:
|
else:
|
||||||
Use_BlockAck = True
|
Use_BlockAck = True
|
||||||
@@ -1987,14 +1976,14 @@ class ac11_calculator(n11_calculator):
|
|||||||
|
|
||||||
# c44 Max_MAC_MPDU_Rate_1
|
# c44 Max_MAC_MPDU_Rate_1
|
||||||
|
|
||||||
if (MAC_Frames_per_A_MPDU > 0):
|
if MAC_Frames_per_A_MPDU > 0:
|
||||||
Max_MAC_MPDU_Rate_1 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_1
|
Max_MAC_MPDU_Rate_1 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_1
|
||||||
Max_MAC_MPDU_Rate_2 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_2
|
Max_MAC_MPDU_Rate_2 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_2
|
||||||
Max_MAC_MPDU_Rate_3 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_3
|
Max_MAC_MPDU_Rate_3 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_3
|
||||||
Max_MAC_MPDU_Rate_4 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_4
|
Max_MAC_MPDU_Rate_4 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_4
|
||||||
Max_MAC_MPDU_Rate_5 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_5
|
Max_MAC_MPDU_Rate_5 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_5
|
||||||
Max_MAC_MPDU_Rate_6 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_6
|
Max_MAC_MPDU_Rate_6 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_6
|
||||||
Max_MAC_MPDU_Rate_7 = (MAC_Frames_per_A_MPDU) * Max_PPDU_Rate_7
|
Max_MAC_MPDU_Rate_7 = MAC_Frames_per_A_MPDU * Max_PPDU_Rate_7
|
||||||
else:
|
else:
|
||||||
Max_MAC_MPDU_Rate_1 = Max_PPDU_Rate_1
|
Max_MAC_MPDU_Rate_1 = Max_PPDU_Rate_1
|
||||||
Max_MAC_MPDU_Rate_2 = Max_PPDU_Rate_2
|
Max_MAC_MPDU_Rate_2 = Max_PPDU_Rate_2
|
||||||
@@ -2014,14 +2003,14 @@ class ac11_calculator(n11_calculator):
|
|||||||
|
|
||||||
# Max MAC MSDU Rate
|
# Max MAC MSDU Rate
|
||||||
|
|
||||||
if (IP_Packets_MSDU > 0):
|
if IP_Packets_MSDU > 0:
|
||||||
Max_MAC_MSDU_Rate_1 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_1
|
Max_MAC_MSDU_Rate_1 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_1
|
||||||
Max_MAC_MSDU_Rate_2 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_2
|
Max_MAC_MSDU_Rate_2 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_2
|
||||||
Max_MAC_MSDU_Rate_3 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_3
|
Max_MAC_MSDU_Rate_3 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_3
|
||||||
Max_MAC_MSDU_Rate_4 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_4
|
Max_MAC_MSDU_Rate_4 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_4
|
||||||
Max_MAC_MSDU_Rate_5 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_5
|
Max_MAC_MSDU_Rate_5 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_5
|
||||||
Max_MAC_MSDU_Rate_6 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_6
|
Max_MAC_MSDU_Rate_6 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_6
|
||||||
Max_MAC_MSDU_Rate_7 = (IP_Packets_MSDU) * Max_MAC_MPDU_Rate_7
|
Max_MAC_MSDU_Rate_7 = IP_Packets_MSDU * Max_MAC_MPDU_Rate_7
|
||||||
|
|
||||||
else:
|
else:
|
||||||
Max_MAC_MSDU_Rate_1 = Max_MAC_MPDU_Rate_1
|
Max_MAC_MSDU_Rate_1 = Max_MAC_MPDU_Rate_1
|
||||||
@@ -2162,8 +2151,10 @@ class ac11_calculator(n11_calculator):
|
|||||||
if self.Maximum_Theoretical_R_value > 100:
|
if self.Maximum_Theoretical_R_value > 100:
|
||||||
self.Estimated_MOS_Score = 4.5
|
self.Estimated_MOS_Score = 4.5
|
||||||
else:
|
else:
|
||||||
Estimated_MOS_Score_1 = (1 + 0.035 * self.Maximum_Theoretical_R_value + self.Maximum_Theoretical_R_value * (
|
Estimated_MOS_Score_1 = (
|
||||||
self.Maximum_Theoretical_R_value - 60) * (100 - self.Maximum_Theoretical_R_value) * 7 * 0.000001)
|
1 + 0.035 * self.Maximum_Theoretical_R_value + self.Maximum_Theoretical_R_value * (
|
||||||
|
self.Maximum_Theoretical_R_value - 60) * (
|
||||||
|
100 - self.Maximum_Theoretical_R_value) * 7 * 0.000001)
|
||||||
self.Estimated_MOS_Score = format(Estimated_MOS_Score_1, '.2f')
|
self.Estimated_MOS_Score = format(Estimated_MOS_Score_1, '.2f')
|
||||||
|
|
||||||
# Voice_Call_Range
|
# Voice_Call_Range
|
||||||
@@ -2207,7 +2198,6 @@ class ac11_calculator(n11_calculator):
|
|||||||
else:
|
else:
|
||||||
self.Maximum_Bidirectional_Voice_Calls = round(Maximum_Bidirectional, 2)
|
self.Maximum_Bidirectional_Voice_Calls = round(Maximum_Bidirectional, 2)
|
||||||
|
|
||||||
|
|
||||||
def get_result(self):
|
def get_result(self):
|
||||||
|
|
||||||
print("\n" + "******************Station : 11ac Calculator*****************************" + "\n")
|
print("\n" + "******************Station : 11ac Calculator*****************************" + "\n")
|
||||||
@@ -2228,4 +2218,4 @@ class ac11_calculator(n11_calculator):
|
|||||||
All_theoretical_voice = {'Maximum Theoretical R-value': self.Maximum_Theoretical_R_value,
|
All_theoretical_voice = {'Maximum Theoretical R-value': self.Maximum_Theoretical_R_value,
|
||||||
'Estimated MOS Score': self.Estimated_MOS_Score,
|
'Estimated MOS Score': self.Estimated_MOS_Score,
|
||||||
'Maximum Bidirectional Voice Calls(calls)': self.Maximum_Bidirectional_Voice_Calls}
|
'Maximum Bidirectional Voice Calls(calls)': self.Maximum_Bidirectional_Voice_Calls}
|
||||||
print(json.dumps(All_theoretical_voice, indent=4))
|
print(json.dumps(All_theoretical_voice, indent=4))
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import traceback
|
|||||||
from time import sleep
|
from time import sleep
|
||||||
import websocket
|
import websocket
|
||||||
import re
|
import re
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import thread
|
import thread
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@@ -27,25 +28,23 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit()
|
exit()
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
||||||
|
|
||||||
|
cre = {
|
||||||
cre={
|
"phy": re.compile(r'^(1\.\d+):\s+(\S+)\s+\(phy', re.I),
|
||||||
"phy": re.compile(r'^(1\.\d+):\s+(\S+)\s+\(phy', re.I),
|
"ifname": re.compile(r'(1\.\d+):\s+IFNAME=(\S+)\s+', re.I),
|
||||||
"ifname": re.compile(r'(1\.\d+):\s+IFNAME=(\S+)\s+', re.I),
|
"port": re.compile(r'Port (\S+)', re.I),
|
||||||
"port": re.compile(r'Port (\S+)', re.I),
|
"connected": re.compile(r'.*?CTRL-EVENT-CONNECTED - Connection to ([a-f0-9:]+) complete', re.I),
|
||||||
"connected": re.compile(r'.*?CTRL-EVENT-CONNECTED - Connection to ([a-f0-9:]+) complete', re.I),
|
"associated": re.compile(r'^.*?Associated with ([a-f0-9:]+)$', re.I),
|
||||||
"associated": re.compile(r'^.*?Associated with ([a-f0-9:]+)$', re.I),
|
"auth": re.compile(r'.*: auth ([a-f0-9:]+) -> ([a-f0-9:]+) status: 0: Successful', re.I),
|
||||||
"auth": re.compile(r'.*: auth ([a-f0-9:]+) -> ([a-f0-9:]+) status: 0: Successful', re.I),
|
"authenticated": re.compile(r'.*?Authenticated with ([a-f0-9:]+)', re.I),
|
||||||
"authenticated": re.compile(r'.*?Authenticated with ([a-f0-9:]+)', re.I),
|
"associating": re.compile(r'.*?Trying to associate with ([a-f0-9:]+)', re.I),
|
||||||
"associating": re.compile(r'.*?Trying to associate with ([a-f0-9:]+)', re.I),
|
"authenticating": re.compile(r'.*?[>]SME: Trying to authenticate with ([a-f0-9:]+)', re.I),
|
||||||
"authenticating": re.compile(r'.*?[>]SME: Trying to authenticate with ([a-f0-9:]+)', re.I),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ignore=[
|
ignore = [
|
||||||
": scan finished",
|
": scan finished",
|
||||||
": scan started",
|
": scan started",
|
||||||
": scan aborted: ",
|
": scan aborted: ",
|
||||||
@@ -68,13 +67,14 @@ ignore=[
|
|||||||
]
|
]
|
||||||
|
|
||||||
rebank = {
|
rebank = {
|
||||||
"ifname" : re.compile("IFNAME=(\S+)")
|
"ifname": re.compile("IFNAME=(\S+)")
|
||||||
}
|
}
|
||||||
websock = None
|
websock = None
|
||||||
host = "localhost"
|
host = "localhost"
|
||||||
base_url = None
|
base_url = None
|
||||||
port = 8081
|
port = 8081
|
||||||
|
|
||||||
|
|
||||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||||
def usage():
|
def usage():
|
||||||
print("""Example: __file__ --host 192.168.1.101 --port 8081\n""")
|
print("""Example: __file__ --host 192.168.1.101 --port 8081\n""")
|
||||||
@@ -94,7 +94,7 @@ def main():
|
|||||||
base_url = "unset"
|
base_url = "unset"
|
||||||
try:
|
try:
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
if (args.host is None):
|
if args.host is None:
|
||||||
host = "localhost"
|
host = "localhost"
|
||||||
elif (type(args) is tuple) or (type(args) is list):
|
elif (type(args) is tuple) or (type(args) is list):
|
||||||
host = args.host[0]
|
host = args.host[0]
|
||||||
@@ -104,7 +104,7 @@ def main():
|
|||||||
base_url = "ws://%s:%s" % (host, port)
|
base_url = "ws://%s:%s" % (host, port)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("Exception: "+e)
|
print("Exception: " + e)
|
||||||
logging.exception(e)
|
logging.exception(e)
|
||||||
usage()
|
usage()
|
||||||
exit(2)
|
exit(2)
|
||||||
@@ -125,79 +125,74 @@ def sock_filter(wsock, text):
|
|||||||
resource = None
|
resource = None
|
||||||
|
|
||||||
for test in ignore:
|
for test in ignore:
|
||||||
if (test in text):
|
if test in text:
|
||||||
if (debug):
|
if debug:
|
||||||
print (" ignoring ",text)
|
print(" ignoring ", text)
|
||||||
return;
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
message = json.loads(text)
|
message = json.loads(text)
|
||||||
except Exception as ex:
|
|
||||||
print ("Json Exception: ", repr(ex))
|
|
||||||
traceback.print_exc()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# big generic filter for wifi-message or details keys
|
# big generic filter for wifi-message or details keys
|
||||||
try:
|
try:
|
||||||
if ("details" in message.keys()):
|
if "details" in message.keys():
|
||||||
for test in ignore:
|
for test in ignore:
|
||||||
if (test in message["details"]):
|
if test in message["details"]:
|
||||||
return;
|
return
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print("Message lacks key 'details'")
|
print("Message lacks key 'details'")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if ("wifi-event" in message.keys()):
|
if "wifi-event" in message.keys():
|
||||||
for test in ignore:
|
for test in ignore:
|
||||||
# print (" is ",test, " in ", message["wifi-event"])
|
# print (" is ",test, " in ", message["wifi-event"])
|
||||||
if (test in message["wifi-event"]):
|
if test in message["wifi-event"]:
|
||||||
return;
|
return
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print("Message lacks key 'wifi-event'")
|
print("Message lacks key 'wifi-event'")
|
||||||
|
|
||||||
if (("time" in message.keys()) and ("timestamp" in message.keys())):
|
if ("time" in message.keys()) and ("timestamp" in message.keys()):
|
||||||
return
|
return
|
||||||
|
|
||||||
if ("name" in message.keys()):
|
if "name" in message.keys():
|
||||||
station_name = message["name"]
|
station_name = message["name"]
|
||||||
if ("resource" in message.keys()):
|
if "resource" in message.keys():
|
||||||
resource = "1.", message["resource"]
|
resource = "1.", message["resource"]
|
||||||
|
|
||||||
if ("event_type" in message.keys()):
|
if "event_type" in message.keys():
|
||||||
match_result = cre["port"].match(message["details"])
|
match_result = cre["port"].match(message["details"])
|
||||||
if (match_result is not None):
|
if match_result is not None:
|
||||||
station_name = match_result.group(1)
|
station_name = match_result.group(1)
|
||||||
|
|
||||||
if (message["is_alert"]):
|
if message["is_alert"]:
|
||||||
print("alert: ", message["details"])
|
print("alert: ", message["details"])
|
||||||
# LFUtils.debug_printer.pprint(message)
|
# LFUtils.debug_printer.pprint(message)
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
# LFUtils.debug_printer.pprint(message)
|
# LFUtils.debug_printer.pprint(message)
|
||||||
if (" IP change from " in message["details"]):
|
if " IP change from " in message["details"]:
|
||||||
if (" to 0.0.0.0" in messsage["details"]):
|
if " to 0.0.0.0" in message["details"]:
|
||||||
print("e: %s.%s lost IP address", [resource, station_name])
|
print("e: %s.%s lost IP address", [resource, station_name])
|
||||||
else:
|
else:
|
||||||
print("e: %s.%s gained IP address", [resource, station_name])
|
print("e: %s.%s gained IP address", [resource, station_name])
|
||||||
if ("Link DOWN" in message["details"]):
|
if "Link DOWN" in message["details"]:
|
||||||
return # duplicates alert
|
return # duplicates alert
|
||||||
|
|
||||||
print("event: ", message["details"])
|
print("event: ", message["details"])
|
||||||
return
|
return
|
||||||
|
|
||||||
if ("wifi-event" in message.keys()):
|
if "wifi-event" in message.keys():
|
||||||
if ("CTRL-EVENT-CONNECTED" in message["wifi-event"]):
|
if "CTRL-EVENT-CONNECTED" in message["wifi-event"]:
|
||||||
# redunant
|
# redunant
|
||||||
return
|
return
|
||||||
if (("CTRL-EVENT-CONNECTED - Connection to " in message["wifi-event"]) and (
|
if (("CTRL-EVENT-CONNECTED - Connection to " in message["wifi-event"]) and (
|
||||||
" complete" in message["wifi-event"])):
|
" complete" in message["wifi-event"])):
|
||||||
return;
|
|
||||||
if ((": assoc " in message["wifi-event"]) and ("status: 0: Successful" in message["wifi-event"])):
|
|
||||||
return
|
return
|
||||||
if ((station_name is None) or (resource is None)):
|
if (": assoc " in message["wifi-event"]) and ("status: 0: Successful" in message["wifi-event"]):
|
||||||
|
return
|
||||||
|
if (station_name is None) or (resource is None):
|
||||||
try:
|
try:
|
||||||
match_result = cre["phy"].match(message["wifi-event"])
|
match_result = cre["phy"].match(message["wifi-event"])
|
||||||
if (match_result is not None):
|
if match_result is not None:
|
||||||
# LFUtils.debug_printer.pprint(match_result)
|
# LFUtils.debug_printer.pprint(match_result)
|
||||||
# LFUtils.debug_printer.pprint(match_result.groups())
|
# LFUtils.debug_printer.pprint(match_result.groups())
|
||||||
resource = match_result.group(1)
|
resource = match_result.group(1)
|
||||||
@@ -206,7 +201,7 @@ def sock_filter(wsock, text):
|
|||||||
match_result = cre["ifname"].match(message["wifi-event"])
|
match_result = cre["ifname"].match(message["wifi-event"])
|
||||||
# LFUtils.debug_printer.pprint(match_result)
|
# LFUtils.debug_printer.pprint(match_result)
|
||||||
# LFUtils.debug_printer.pprint(match_result.groups())
|
# LFUtils.debug_printer.pprint(match_result.groups())
|
||||||
if (match_result is not None):
|
if match_result is not None:
|
||||||
resource = match_result.group(1)
|
resource = match_result.group(1)
|
||||||
station_name = match_result.group(2)
|
station_name = match_result.group(2)
|
||||||
else:
|
else:
|
||||||
@@ -221,9 +216,9 @@ def sock_filter(wsock, text):
|
|||||||
sleep(1)
|
sleep(1)
|
||||||
|
|
||||||
# print ("Determined station name: as %s.%s"%(resource, station_name))
|
# print ("Determined station name: as %s.%s"%(resource, station_name))
|
||||||
if ((": auth ") and ("status: 0: Successful" in message["wifi-event"])):
|
if ": auth " and ("status: 0: Successful" in message["wifi-event"]):
|
||||||
match_result = cre["auth"].match(message["wifi-event"])
|
match_result = cre["auth"].match(message["wifi-event"])
|
||||||
if (match_result and match_result.groups()):
|
if match_result and match_result.groups():
|
||||||
bssid = match_result.group(1)
|
bssid = match_result.group(1)
|
||||||
print("station %s.%s auth with %s" % (resource, station_name, bssid))
|
print("station %s.%s auth with %s" % (resource, station_name, bssid))
|
||||||
return
|
return
|
||||||
@@ -231,9 +226,9 @@ def sock_filter(wsock, text):
|
|||||||
print("station %s.%s auth with ??" % (resource, station_name))
|
print("station %s.%s auth with ??" % (resource, station_name))
|
||||||
LFUtils.debug_printer.pprint(match_result)
|
LFUtils.debug_printer.pprint(match_result)
|
||||||
|
|
||||||
if ("Associated with " in message["wifi-event"]):
|
if "Associated with " in message["wifi-event"]:
|
||||||
match_result = cre["associated"].match(message["wifi-event"])
|
match_result = cre["associated"].match(message["wifi-event"])
|
||||||
if (match_result and match_result.groups()):
|
if match_result and match_result.groups():
|
||||||
bssid = match_result.group(1)
|
bssid = match_result.group(1)
|
||||||
print("station %s.%s assocated with %s" % (resource, station_name, bssid))
|
print("station %s.%s assocated with %s" % (resource, station_name, bssid))
|
||||||
return
|
return
|
||||||
@@ -241,9 +236,9 @@ def sock_filter(wsock, text):
|
|||||||
print("station %s.%s assocated with ??" % (resource, station_name))
|
print("station %s.%s assocated with ??" % (resource, station_name))
|
||||||
LFUtils.debug_printer.pprint(match_result)
|
LFUtils.debug_printer.pprint(match_result)
|
||||||
|
|
||||||
if (" - Connection to " in message["wifi-event"]):
|
if " - Connection to " in message["wifi-event"]:
|
||||||
match_result = cre["connected"].match(message["wifi-event"])
|
match_result = cre["connected"].match(message["wifi-event"])
|
||||||
if (match_result and match_result.groups()):
|
if match_result and match_result.groups():
|
||||||
bssid = match_result.group(1)
|
bssid = match_result.group(1)
|
||||||
print("station %s.%s connected to %s" % (resource, station_name, bssid))
|
print("station %s.%s connected to %s" % (resource, station_name, bssid))
|
||||||
return
|
return
|
||||||
@@ -251,14 +246,14 @@ def sock_filter(wsock, text):
|
|||||||
print("station %s.%s connected to ??" % (resource, station_name))
|
print("station %s.%s connected to ??" % (resource, station_name))
|
||||||
LFUtils.debug_printer.pprint(match_result)
|
LFUtils.debug_printer.pprint(match_result)
|
||||||
|
|
||||||
if ("disconnected" in message["wifi-event"]):
|
if "disconnected" in message["wifi-event"]:
|
||||||
print("Station %s.%s down" % (resource, station_name))
|
print("Station %s.%s down" % (resource, station_name))
|
||||||
return
|
return
|
||||||
|
|
||||||
if ("Trying to associate with " in message["wifi-event"]):
|
if "Trying to associate with " in message["wifi-event"]:
|
||||||
match_result = cre["associating"].match(message["wifi-event"])
|
match_result = cre["associating"].match(message["wifi-event"])
|
||||||
|
|
||||||
if (match_result and match_result.groups()):
|
if match_result and match_result.groups():
|
||||||
bssid = match_result.group(1)
|
bssid = match_result.group(1)
|
||||||
print("station %s.%s associating with %s" % (resource, station_name, bssid))
|
print("station %s.%s associating with %s" % (resource, station_name, bssid))
|
||||||
return
|
return
|
||||||
@@ -266,10 +261,10 @@ def sock_filter(wsock, text):
|
|||||||
print("station %s.%s associating with ??" % (resource, station_name))
|
print("station %s.%s associating with ??" % (resource, station_name))
|
||||||
LFUtils.debug_printer.pprint(match_result)
|
LFUtils.debug_printer.pprint(match_result)
|
||||||
|
|
||||||
if ("Trying to authenticate" in message["wifi-event"]):
|
if "Trying to authenticate" in message["wifi-event"]:
|
||||||
match_result = cre["authenticating"].match(message["wifi-event"])
|
match_result = cre["authenticating"].match(message["wifi-event"])
|
||||||
|
|
||||||
if (match_result and match_result.groups()):
|
if match_result and match_result.groups():
|
||||||
bssid = match_result.group(1)
|
bssid = match_result.group(1)
|
||||||
print("station %s.%s authenticating with %s" % (resource, station_name, bssid))
|
print("station %s.%s authenticating with %s" % (resource, station_name, bssid))
|
||||||
return
|
return
|
||||||
@@ -277,10 +272,10 @@ def sock_filter(wsock, text):
|
|||||||
print("station %s.%s authenticating with ??" % (resource, station_name))
|
print("station %s.%s authenticating with ??" % (resource, station_name))
|
||||||
LFUtils.debug_printer.pprint(match_result)
|
LFUtils.debug_printer.pprint(match_result)
|
||||||
|
|
||||||
if ("Authenticated" in message["wifi-event"]):
|
if "Authenticated" in message["wifi-event"]:
|
||||||
match_result = cre["authenticed"].match(message["wifi-event"])
|
match_result = cre["authenticed"].match(message["wifi-event"])
|
||||||
LFUtils.debug_printer.pprint(match_result)
|
LFUtils.debug_printer.pprint(match_result)
|
||||||
if (match_result and match_result.groups()):
|
if match_result and match_result.groups():
|
||||||
bssid = match_result.group(1)
|
bssid = match_result.group(1)
|
||||||
print("station %s.%s authenticated with %s" % (resource, station_name, bssid))
|
print("station %s.%s authenticated with %s" % (resource, station_name, bssid))
|
||||||
else:
|
else:
|
||||||
@@ -291,6 +286,10 @@ def sock_filter(wsock, text):
|
|||||||
print("\nUnhandled: ")
|
print("\nUnhandled: ")
|
||||||
LFUtils.debug_printer.pprint(message)
|
LFUtils.debug_printer.pprint(message)
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
traceback.print_exc()
|
||||||
|
raise ("Json Exception: ", repr(ex))
|
||||||
|
|
||||||
except KeyError as kerr:
|
except KeyError as kerr:
|
||||||
print("# ----- Bad Key: ----- ----- ----- ----- ----- ----- ----- ----- ----- -----")
|
print("# ----- Bad Key: ----- ----- ----- ----- ----- ----- ----- ----- ----- -----")
|
||||||
print("input: ", text)
|
print("input: ", text)
|
||||||
@@ -317,6 +316,7 @@ def sock_filter(wsock, text):
|
|||||||
sleep(1)
|
sleep(1)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
# ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
|
# ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----
|
||||||
def m_error(wsock, err):
|
def m_error(wsock, err):
|
||||||
print("# ----- Error: ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----\n")
|
print("# ----- Error: ----- ----- ----- ----- ----- ----- ----- ----- ----- ----- -----\n")
|
||||||
@@ -355,7 +355,6 @@ def start_websocket(uri, websock):
|
|||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
||||||
####
|
####
|
||||||
####
|
####
|
||||||
####
|
####
|
||||||
|
|||||||
@@ -13,13 +13,11 @@ WS_Listener has three arguments in general : lfclient_host, _scriptname, _callba
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class WS_Listener():
|
|
||||||
|
class WS_Listener:
|
||||||
def __init__(self, lfclient_host="localhost", _scriptname=None, _callback=None):
|
def __init__(self, lfclient_host="localhost", _scriptname=None, _callback=None):
|
||||||
import websocket
|
import websocket
|
||||||
self.scriptname = _scriptname
|
self.scriptname = _scriptname
|
||||||
websocket.enableTrace(True)
|
websocket.enableTrace(True)
|
||||||
self.ws = websocket.WebSocketApp("ws://"+lfclient_host+":8081", on_message=_callback)
|
self.ws = websocket.WebSocketApp("ws://" + lfclient_host + ":8081", on_message=_callback)
|
||||||
self.ws.run_forever()
|
self.ws.run_forever()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -368,8 +368,8 @@ td.scriptdetails span.copybtn {
|
|||||||
}
|
}
|
||||||
td.scriptdetails:hover span.copybtn {
|
td.scriptdetails:hover span.copybtn {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
padding: 5px;
|
padding: 2px;
|
||||||
font-size: 12px;
|
font-size:10px;
|
||||||
float: left;
|
float: left;
|
||||||
color: #050;
|
color: #050;
|
||||||
background: white;
|
background: white;
|
||||||
|
|||||||
179
py-scripts/asus_ap.py
Normal file
179
py-scripts/asus_ap.py
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
'''
|
||||||
|
NAME:
|
||||||
|
asus_ap.py
|
||||||
|
|
||||||
|
PURPOSE:
|
||||||
|
Generic AP library that will work for the ASUS ap's
|
||||||
|
|
||||||
|
EXAMPLE:
|
||||||
|
|
||||||
|
./asus_ap.py --ap_port '/dev/ttyUSB0' --ap_baud '115200' --ap_cmd "wl -i wl1 bs_data"
|
||||||
|
|
||||||
|
./asus_ap.py --ap_port '/dev/ttyUSB0' --ap_baud '115200' --ap_cmd "wl -i wl1 bs_data" --ap_file 'ap_file.txt'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
NOTES:
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
import sys
|
||||||
|
if sys.version_info[0] != 3:
|
||||||
|
print("This script requires Python3")
|
||||||
|
exit()
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import pexpect
|
||||||
|
import serial
|
||||||
|
from pexpect_serial import SerialSpawn
|
||||||
|
|
||||||
|
|
||||||
|
# see https://stackoverflow.com/a/13306095/11014343
|
||||||
|
class FileAdapter(object):
|
||||||
|
def __init__(self, logger):
|
||||||
|
self.logger = logger
|
||||||
|
def write(self, data):
|
||||||
|
# NOTE: data can be a partial line, multiple lines
|
||||||
|
data = data.strip() # ignore leading/trailing whitespace
|
||||||
|
if data: # non-blank
|
||||||
|
self.logger.info(data)
|
||||||
|
def flush(self):
|
||||||
|
pass # leave it to logging to flush properly
|
||||||
|
|
||||||
|
|
||||||
|
class lf_ap():
|
||||||
|
def __init__(self,
|
||||||
|
_ap_test_mode = False,
|
||||||
|
_ap_2G_interface = "wl0",
|
||||||
|
_ap_5G_interface = "wl1",
|
||||||
|
_ap_6G_interface = "wl2",
|
||||||
|
_ap_scheme = 'serial',
|
||||||
|
_ap_serial_port = '/dev/ttyUSB0',
|
||||||
|
_ap_ssh_port = "22",
|
||||||
|
_ap_telnet_port = "23",
|
||||||
|
_ap_serial_baud = '115200',
|
||||||
|
_ap_report_dir = "",
|
||||||
|
_ap_log_file = ""):
|
||||||
|
self.ap_test_mode = _ap_test_mode
|
||||||
|
self.ap_2G_interface = _ap_2G_interface
|
||||||
|
self.ap_5G_interface = _ap_5G_interface
|
||||||
|
self.ap_6G_interface = _ap_6G_interface
|
||||||
|
self.ap_scheme = _ap_scheme
|
||||||
|
self.ap_serial_port = _ap_serial_port
|
||||||
|
self.ap_telnet_port = _ap_ssh_port
|
||||||
|
self.ap_telnet = _ap_telnet_port
|
||||||
|
self.ap_serial_baud = _ap_serial_baud
|
||||||
|
self.ap_report_dir = _ap_report_dir
|
||||||
|
self.ap_log_file = _ap_log_file
|
||||||
|
|
||||||
|
def ap_action(self):
|
||||||
|
|
||||||
|
print("ap_cmd: {}".format(self.ap_cmd))
|
||||||
|
try:
|
||||||
|
ser = serial.Serial(self.ap_port, int(self.ap_baud), timeout=5)
|
||||||
|
ss = SerialSpawn(ser)
|
||||||
|
ss.sendline(str(self.ap_cmd))
|
||||||
|
ss.expect([pexpect.TIMEOUT], timeout=2) # do not detete line, waits for output
|
||||||
|
ap_results = ss.before.decode('utf-8','ignore')
|
||||||
|
print("ap_results {}".format(ap_results))
|
||||||
|
except:
|
||||||
|
ap_results = "exception on accessing {} Command: {}\r\n".format(self.ap_port,self.ap_cmd)
|
||||||
|
print("{}".format(ap_results))
|
||||||
|
|
||||||
|
if(self.ap_file != None):
|
||||||
|
ap_file = open(str(self.ap_file),"a")
|
||||||
|
ap_file.write(ap_results)
|
||||||
|
ap_file.close()
|
||||||
|
print("ap file written {}".format(str(self.ap_file)))
|
||||||
|
|
||||||
|
# ASUS
|
||||||
|
def ap_clear_stats(self,band):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# ASUS bs_data
|
||||||
|
def ap_ul_data(self,band):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# ASUS rx_report
|
||||||
|
def ap_dl_data(self,band):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# ASUS chanel info (channel utilization)
|
||||||
|
def ap_chanim(self,band):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def ap_ul_stats(self,band):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def ap_dl_stats(self,band):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def ap_store_dl_scheduler_stats(self,band):
|
||||||
|
if band is "6G":
|
||||||
|
pass
|
||||||
|
|
||||||
|
def ap_store_ul_scheduler_stats(self,band):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def ap_ofdma_stats(self,band):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog='lf_ap.py',
|
||||||
|
#formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
epilog='''\
|
||||||
|
Useful Information:
|
||||||
|
1. Useful Information goes here
|
||||||
|
''',
|
||||||
|
|
||||||
|
description='''\
|
||||||
|
lf_ap.py:
|
||||||
|
--------------------
|
||||||
|
Summary :
|
||||||
|
----------
|
||||||
|
This file is used for verification
|
||||||
|
|
||||||
|
Commands: (wl2 == 6GHz wl1 == 5GHz , wl0 == 24ghz)
|
||||||
|
|
||||||
|
read ap data:: 'wl -i wl1 bs_data'
|
||||||
|
reset scheduler's counters:: 'wl -i wl1 dump_clear'
|
||||||
|
UL scheduler statistics:: 'wl -i wl1 dump umsched'
|
||||||
|
DL scheduler statistics:: 'wl -i wl1 dump msched'
|
||||||
|
|
||||||
|
Generic command layout:
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
''')
|
||||||
|
parser.add_argument('--ap_test_mode', help='--ap_mode ',default=True)
|
||||||
|
parser.add_argument('--ap_port', help='--ap_port \'/dev/ttyUSB0\'',default='/dev/ttyUSB0')
|
||||||
|
parser.add_argument('--ap_baud', help='--ap_baud \'115200\'',default='115200')
|
||||||
|
parser.add_argument('--ap_cmd', help='--ap_cmd \'wl -i wl1 bs_data\'',default='wl -i wl1 bs_data')
|
||||||
|
parser.add_argument('--ap_file', help='--ap_file \'ap_file.txt\'')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
__ap_port = args.ap_port
|
||||||
|
__ap_baud = args.ap_baud
|
||||||
|
__ap_cmd = args.ap_cmd
|
||||||
|
__ap_file = args.ap_file
|
||||||
|
|
||||||
|
ap_dut = lf_ap(
|
||||||
|
_ap_port = __ap_port,
|
||||||
|
_ap_baud = __ap_baud,
|
||||||
|
_ap_cmd = __ap_cmd ,
|
||||||
|
_ap_file = __ap_file)
|
||||||
|
|
||||||
|
ap_dut.ap_action()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
|
||||||
|
|
||||||
@@ -5,13 +5,6 @@ import pandas as pd
|
|||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
|
||||||
def get_tag(x, tag):
|
|
||||||
try:
|
|
||||||
return x[tag]
|
|
||||||
except:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog="check_argparse.py",
|
prog="check_argparse.py",
|
||||||
@@ -29,7 +22,7 @@ def main():
|
|||||||
text = open(os.path.join(args.path, file)).read()
|
text = open(os.path.join(args.path, file)).read()
|
||||||
results_file = dict()
|
results_file = dict()
|
||||||
results_file['argparse'] = 'argparse.' in text
|
results_file['argparse'] = 'argparse.' in text
|
||||||
if results_file['argparse'] is True:
|
if results_file['argparse']:
|
||||||
results_file['create_basic'] = 'create_basic_argparse' in text
|
results_file['create_basic'] = 'create_basic_argparse' in text
|
||||||
results_file['create_bare'] = 'create_bare_argparse' in text
|
results_file['create_bare'] = 'create_bare_argparse' in text
|
||||||
results_file['prog'] = 'prog=' in text
|
results_file['prog'] = 'prog=' in text
|
||||||
@@ -48,7 +41,9 @@ def main():
|
|||||||
'description',
|
'description',
|
||||||
'epilog',
|
'epilog',
|
||||||
'usage']:
|
'usage']:
|
||||||
df[tag] = [get_tag(x, tag) for x in df['results']]
|
for result in df['results']:
|
||||||
|
if tag in result:
|
||||||
|
df[tag] = df['results'][tag]
|
||||||
df['details'] = df['description'] + df['epilog'] + df['usage']
|
df['details'] = df['description'] + df['epilog'] + df['usage']
|
||||||
df.to_csv(args.output + '.csv', index=False)
|
df.to_csv(args.output + '.csv', index=False)
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -58,7 +58,8 @@ class CreateBond(LFCliBase):
|
|||||||
"resource": self.resource,
|
"resource": self.resource,
|
||||||
"port": "bond0000",
|
"port": "bond0000",
|
||||||
"current_flags": 0x80000000,
|
"current_flags": 0x80000000,
|
||||||
"interest": 0x4000 # (0x2 + 0x4000 + 0x800000) # current, dhcp, down
|
# (0x2 + 0x4000 + 0x800000) # current, dhcp, down
|
||||||
|
"interest": 0x4000
|
||||||
}
|
}
|
||||||
self.json_post("cli-json/set_port", bond_set_port)
|
self.json_post("cli-json/set_port", bond_set_port)
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -24,7 +23,7 @@ Realm = realm.Realm
|
|||||||
|
|
||||||
|
|
||||||
class CreateBridge(Realm):
|
class CreateBridge(Realm):
|
||||||
def __init__(self,sta_list,resource,target_device,radio,
|
def __init__(self, sta_list, resource, target_device, radio,
|
||||||
_ssid=None,
|
_ssid=None,
|
||||||
_security=None,
|
_security=None,
|
||||||
_password=None,
|
_password=None,
|
||||||
@@ -54,7 +53,6 @@ class CreateBridge(Realm):
|
|||||||
pprint.pprint(self.sta_list)
|
pprint.pprint(self.sta_list)
|
||||||
print("---- ~bridge List ----- ----- ----- ----- ----- ----- \n")
|
print("---- ~bridge List ----- ----- ----- ----- ----- ----- \n")
|
||||||
|
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
# Build bridges
|
# Build bridges
|
||||||
|
|
||||||
@@ -71,13 +69,12 @@ class CreateBridge(Realm):
|
|||||||
"resource": self.resource,
|
"resource": self.resource,
|
||||||
"port": "br0",
|
"port": "br0",
|
||||||
"current_flags": 0x80000000,
|
"current_flags": 0x80000000,
|
||||||
"interest": 0x4000 # (0x2 + 0x4000 + 0x800000) # current, dhcp, down
|
# (0x2 + 0x4000 + 0x800000) # current, dhcp, down
|
||||||
|
"interest": 0x4000
|
||||||
}
|
}
|
||||||
self.json_post("cli-json/set_port", bridge_set_port)
|
self.json_post("cli-json/set_port", bridge_set_port)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = LFCliBase.create_basic_argparse(
|
parser = LFCliBase.create_basic_argparse(
|
||||||
prog='create_bridge.py',
|
prog='create_bridge.py',
|
||||||
@@ -101,17 +98,19 @@ Command example:
|
|||||||
--debug
|
--debug
|
||||||
''')
|
''')
|
||||||
required = parser.add_argument_group('required arguments')
|
required = parser.add_argument_group('required arguments')
|
||||||
required.add_argument('--target_device', help='Where the bridges should be connecting', required=True)
|
required.add_argument(
|
||||||
#required.add_argument('--security', help='WiFi Security protocol: < open | wep | wpa | wpa2 | wpa3 >', required=True)
|
'--target_device', help='Where the bridges should be connecting', required=True)
|
||||||
|
# required.add_argument('--security', help='WiFi Security protocol: < open | wep | wpa | wpa2 | wpa3 >', required=True)
|
||||||
|
|
||||||
optional = parser.add_argument_group('optional arguments')
|
optional = parser.add_argument_group('optional arguments')
|
||||||
optional.add_argument('--num_bridges', help='Number of bridges to Create', required=False)
|
optional.add_argument(
|
||||||
|
'--num_bridges', help='Number of bridges to Create', required=False)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
#if args.debug:
|
# if args.debug:
|
||||||
# pprint.pprint(args)
|
# pprint.pprint(args)
|
||||||
# time.sleep(5)
|
# time.sleep(5)
|
||||||
if (args.radio is None):
|
if args.radio is None:
|
||||||
raise ValueError("--radio required")
|
raise ValueError("--radio required")
|
||||||
|
|
||||||
num_bridge = 2
|
num_bridge = 2
|
||||||
if (args.num_bridges is not None) and (int(args.num_bridges) > 0):
|
if (args.num_bridges is not None) and (int(args.num_bridges) > 0):
|
||||||
@@ -119,25 +118,26 @@ Command example:
|
|||||||
num_bridge = num_bridges_converted
|
num_bridge = num_bridges_converted
|
||||||
|
|
||||||
bridge_list = LFUtils.port_name_series(prefix="bridge",
|
bridge_list = LFUtils.port_name_series(prefix="bridge",
|
||||||
start_id=0,
|
start_id=0,
|
||||||
end_id=num_bridge-1,
|
end_id=num_bridge - 1,
|
||||||
padding_number=10000,
|
padding_number=10000,
|
||||||
radio=args.radio)
|
radio=args.radio)
|
||||||
|
|
||||||
create_bridge = CreateBridge(_host=args.mgr,
|
create_bridge = CreateBridge(_host=args.mgr,
|
||||||
_port=args.mgr_port,
|
_port=args.mgr_port,
|
||||||
_ssid=args.ssid,
|
_ssid=args.ssid,
|
||||||
_password=args.passwd,
|
_password=args.passwd,
|
||||||
_security=args.security,
|
_security=args.security,
|
||||||
_bridge_list=bridge_list,
|
_bridge_list=bridge_list,
|
||||||
radio=args.radio,
|
radio=args.radio,
|
||||||
_debug_on=args.debug,
|
_debug_on=args.debug,
|
||||||
sta_list=bridge_list,
|
sta_list=bridge_list,
|
||||||
resource=1,
|
resource=1,
|
||||||
target_device=args.target_device)
|
target_device=args.target_device)
|
||||||
|
|
||||||
create_bridge.build()
|
create_bridge.build()
|
||||||
print('Created %s bridges' % num_bridge)
|
print('Created %s bridges' % num_bridge)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -41,7 +41,6 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
|
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
|
||||||
@@ -52,28 +51,31 @@ class CreateChamberview(cv):
|
|||||||
def __init__(self,
|
def __init__(self,
|
||||||
lfmgr="localhost",
|
lfmgr="localhost",
|
||||||
port="8080",
|
port="8080",
|
||||||
):
|
):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
lfclient_host=lfmgr,
|
lfclient_host=lfmgr,
|
||||||
lfclient_port=port,
|
lfclient_port=port,
|
||||||
)
|
)
|
||||||
self.lfmgr = lfmgr
|
self.lfmgr = lfmgr
|
||||||
self.port = port
|
self.port = port
|
||||||
|
|
||||||
def clean_cv_scenario(self,type="Network-Connectivity",scenario_name=None):
|
def clean_cv_scenario(
|
||||||
self.rm_cv_text_blob(type,scenario_name)
|
self,
|
||||||
|
cv_type="Network-Connectivity",
|
||||||
|
scenario_name=None):
|
||||||
|
self.rm_cv_text_blob(cv_type, scenario_name)
|
||||||
|
|
||||||
def setup(self,
|
def setup(self,
|
||||||
create_scenario="",
|
create_scenario="",
|
||||||
line="",
|
line="",
|
||||||
raw_line=[]):
|
raw_line=None):
|
||||||
|
|
||||||
if raw_line:
|
if raw_line:
|
||||||
print("creating %s scenario" % create_scenario)
|
print("creating %s scenario" % create_scenario)
|
||||||
for create_lines in raw_line:
|
for create_lines in raw_line:
|
||||||
self.pass_raw_lines_to_cv(create_scenario,create_lines[0])
|
self.pass_raw_lines_to_cv(create_scenario, create_lines[0])
|
||||||
|
|
||||||
#check for lines
|
# check for lines
|
||||||
if line:
|
if line:
|
||||||
scenario_name = create_scenario
|
scenario_name = create_scenario
|
||||||
line = line
|
line = line
|
||||||
@@ -88,85 +90,80 @@ class CreateChamberview(cv):
|
|||||||
Freq = "-1"
|
Freq = "-1"
|
||||||
VLAN = ""
|
VLAN = ""
|
||||||
|
|
||||||
for i in range(len(line)):
|
for item in line:
|
||||||
if " " in line[i][0]:
|
if " " in item[0]:
|
||||||
line[i][0] = (re.split(' ', line[i][0]))
|
item[0] = (re.split(' ', item[0]))
|
||||||
elif "," in line[i][0]:
|
elif "," in item[0]:
|
||||||
line[i][0] = (re.split(',', line[i][0]))
|
item[0] = (re.split(',', item[0]))
|
||||||
elif ", " in line[i][0]:
|
|
||||||
line[i][0] = (re.split(',', line[i][0]))
|
|
||||||
elif " ," in line[i][0]:
|
|
||||||
line[i][0] = (re.split(',', line[i][0]))
|
|
||||||
else:
|
else:
|
||||||
print("Wrong arguments entered !")
|
print("Wrong arguments entered !")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
print("creating %s scenario" % scenario_name)
|
print("creating %s scenario" % scenario_name)
|
||||||
for j in range(len(line[i][0])):
|
for sub_item in item[0]:
|
||||||
line[i][0][j] = line[i][0][j].split("=")
|
sub_item = sub_item.split("=")
|
||||||
for k in range(len(line[i][0][j])):
|
if sub_item[0] == "Resource" or str(
|
||||||
name = line[i][0][j][k]
|
sub_item[0]) == "Res" or sub_item[0] == "R":
|
||||||
if str(name) == "Resource" or str(name) == "Res" or str(name) == "R":
|
Resource = sub_item[1]
|
||||||
Resource = line[i][0][j][k + 1]
|
elif sub_item[0] == "Profile" or sub_item[0] == "Prof" or sub_item[0] == "P":
|
||||||
elif str(name) == "Profile" or str(name) == "Prof" or str(name) == "P":
|
Profile = sub_item[1]
|
||||||
Profile = line[i][0][j][k + 1]
|
elif sub_item[0] == "Amount" or sub_item[0] == "Sta" or sub_item[0] == "A":
|
||||||
elif str(name) == "Amount" or str(name) == "Sta" or str(name) == "A":
|
Amount = sub_item[1]
|
||||||
Amount = line[i][0][j][k + 1]
|
elif sub_item[0] == "Uses-1" or sub_item[0] == "U1" or sub_item[0] == "U-1":
|
||||||
elif str(name) == "Uses-1" or str(name) == "U1" or str(name) == "U-1":
|
Uses1 = sub_item[1]
|
||||||
Uses1 = line[i][0][j][k + 1]
|
elif sub_item[0] == "Uses-2" or sub_item[0] == "U2" or sub_item[0] == "U-2":
|
||||||
elif str(name) == "Uses-2" or str(name) == "U2" or str(name) == "U-2":
|
Uses2 = sub_item[1]
|
||||||
Uses2 = line[i][0][j][k + 1]
|
elif sub_item[0] == "Freq" or sub_item[0] == "Freq" or sub_item[0] == "F":
|
||||||
elif str(name) == "Freq" or str(name) == "Freq" or str(name) == "F":
|
Freq = sub_item[1]
|
||||||
Freq = line[i][0][j][k + 1]
|
elif sub_item[0] == "DUT" or sub_item[0] == "dut" or sub_item[0] == "D":
|
||||||
elif str(name) == "DUT" or str(name) == "dut" or str(name) == "D":
|
DUT = sub_item[1]
|
||||||
DUT = line[i][0][j][k + 1]
|
elif sub_item[0] == "DUT_Radio" or sub_item[0] == "dr" or sub_item[0] == "DR":
|
||||||
elif str(name) == "DUT_Radio" or str(name) == "dr" or str(name) == "DR":
|
DUT_Radio = sub_item[1]
|
||||||
DUT_Radio = line[i][0][j][k + 1]
|
elif sub_item[0] == "Traffic" or sub_item[0] == "Traf" or sub_item[0] == "T":
|
||||||
elif str(name) == "Traffic" or str(name) == "Traf" or str(name) == "T":
|
Traffic = sub_item[1]
|
||||||
Traffic = line[i][0][j][k + 1]
|
elif sub_item[0] == "VLAN" or sub_item[0] == "Vlan" or sub_item[0] == "V":
|
||||||
elif str(name) == "VLAN" or str(name) == "Vlan" or str(name) == "V":
|
VLAN = sub_item[1]
|
||||||
VLAN = line[i][0][j][k + 1]
|
else:
|
||||||
else:
|
continue
|
||||||
continue
|
|
||||||
|
|
||||||
self.add_text_blob_line(scenario_name,
|
self.add_text_blob_line(scenario_name,
|
||||||
Resource,
|
Resource,
|
||||||
Profile,
|
Profile,
|
||||||
Amount,
|
Amount,
|
||||||
DUT,
|
DUT,
|
||||||
DUT_Radio,
|
DUT_Radio,
|
||||||
Uses1,
|
Uses1,
|
||||||
Uses2,
|
Uses2,
|
||||||
Traffic,
|
Traffic,
|
||||||
Freq,
|
Freq,
|
||||||
VLAN
|
VLAN
|
||||||
); # To manage scenario
|
) # To manage scenario
|
||||||
if not line and not raw_line:
|
if not line and not raw_line:
|
||||||
raise Exception("scenario creation failed")
|
raise Exception("scenario creation failed")
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def build(self,scenario_name):
|
def build(self, scenario_name):
|
||||||
self.sync_cv() # chamberview sync
|
self.sync_cv() # chamberview sync
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
self.apply_cv_scenario(scenario_name) # Apply scenario
|
self.apply_cv_scenario(scenario_name) # Apply scenario
|
||||||
self.show_text_blob(None, None, False) # Show changes on GUI
|
self.show_text_blob(None, None, False) # Show changes on GUI
|
||||||
self.apply_cv_scenario(scenario_name) # Apply scenario
|
self.apply_cv_scenario(scenario_name) # Apply scenario
|
||||||
self.build_cv_scenario() # build scenario
|
self.build_cv_scenario() # build scenario
|
||||||
tries = 0
|
tries = 0
|
||||||
while (True):
|
while True:
|
||||||
self.get_popup_info_and_close()
|
self.get_popup_info_and_close()
|
||||||
if not self.get_cv_is_built():
|
if not self.get_cv_is_built():
|
||||||
# It can take a while to build a large scenario, so wait-time
|
# It can take a while to build a large scenario, so wait-time
|
||||||
# is currently max of 5 minutes.
|
# is currently max of 5 minutes.
|
||||||
print("Waiting %i/300 for Chamber-View to be built." % (tries))
|
print("Waiting %i/300 for Chamber-View to be built." % tries)
|
||||||
tries += 1
|
tries += 1
|
||||||
if (tries > 300):
|
if tries > 300:
|
||||||
break
|
break
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
print("completed building %s scenario" %scenario_name)
|
print("completed building %s scenario" % scenario_name)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -177,36 +174,53 @@ def main():
|
|||||||
For Two line scenario use --line twice as shown in example, for multi line scenario
|
For Two line scenario use --line twice as shown in example, for multi line scenario
|
||||||
use --line argument to create multiple lines
|
use --line argument to create multiple lines
|
||||||
\n
|
\n
|
||||||
create_chamberview.py -m "localhost" -o "8080" -cs "scenario_name"
|
create_chamberview.py -m "localhost" -o "8080" -cs "scenario_name"
|
||||||
--line "Resource=1.1 Profile=STA-AC Amount=1 Uses-1=wiphy0 Uses-2=AUTO Freq=-1
|
--line "Resource=1.1 Profile=STA-AC Amount=1 Uses-1=wiphy0 Uses-2=AUTO Freq=-1
|
||||||
DUT=Test DUT_Radio=Radio-1 Traffic=http VLAN="
|
|
||||||
--line "Resource=1.1 Profile=upstream Amount=1 Uses-1=eth1 Uses-2=AUTO Freq=-1
|
|
||||||
DUT=Test DUT_Radio=Radio-1 Traffic=http VLAN="
|
DUT=Test DUT_Radio=Radio-1 Traffic=http VLAN="
|
||||||
******************************** OR ********************************
|
--line "Resource=1.1 Profile=upstream Amount=1 Uses-1=eth1 Uses-2=AUTO Freq=-1
|
||||||
|
DUT=Test DUT_Radio=Radio-1 Traffic=http VLAN="
|
||||||
|
******************************** OR ********************************
|
||||||
create_chamberview.py -m "localhost" -o "8080" -cs "scenario_name"
|
create_chamberview.py -m "localhost" -o "8080" -cs "scenario_name"
|
||||||
--raw_line "profile_link 1.1 STA-AC 10 'DUT: temp Radio-1' tcp-dl-6m-vi wiphy0,AUTO -1"
|
--raw_line "profile_link 1.1 STA-AC 10 'DUT: temp Radio-1' tcp-dl-6m-vi wiphy0,AUTO -1"
|
||||||
--raw_line "profile_link 1.1 upstream 1 'DUT: temp Radio-1' tcp-dl-6m-vi eth1,AUTO -1"
|
--raw_line "profile_link 1.1 upstream 1 'DUT: temp Radio-1' tcp-dl-6m-vi eth1,AUTO -1"
|
||||||
|
|
||||||
""")
|
""")
|
||||||
parser.add_argument("-m", "--lfmgr", type=str,
|
parser.add_argument(
|
||||||
help="address of the LANforge GUI machine (localhost is default)")
|
"-m",
|
||||||
parser.add_argument("-o", "--port", type=int, default=8080,
|
"--lfmgr",
|
||||||
help="IP Port the LANforge GUI is listening on (8080 is default)")
|
type=str,
|
||||||
parser.add_argument("-cs", "--create_scenario", "--create_lf_scenario", type=str,
|
help="address of the LANforge GUI machine (localhost is default)")
|
||||||
help="name of scenario to be created")
|
parser.add_argument(
|
||||||
|
"-o",
|
||||||
|
"--port",
|
||||||
|
type=int,
|
||||||
|
default=8080,
|
||||||
|
help="IP Port the LANforge GUI is listening on (8080 is default)")
|
||||||
|
parser.add_argument(
|
||||||
|
"-cs",
|
||||||
|
"--create_scenario",
|
||||||
|
"--create_lf_scenario",
|
||||||
|
type=str,
|
||||||
|
help="name of scenario to be created")
|
||||||
parser.add_argument("-l", "--line", action='append', nargs='+',
|
parser.add_argument("-l", "--line", action='append', nargs='+',
|
||||||
help="line number", default=[])
|
help="line number", default=[])
|
||||||
parser.add_argument("-rl", "--raw_line", action='append', nargs=1,
|
parser.add_argument("-rl", "--raw_line", action='append', nargs=1,
|
||||||
help="raw lines", default=[])
|
help="raw lines", default=[])
|
||||||
parser.add_argument("-ds", "--delete_scenario", default=False, action='store_true',
|
parser.add_argument(
|
||||||
help="delete scenario (by default: False)")
|
"-ds",
|
||||||
|
"--delete_scenario",
|
||||||
|
default=False,
|
||||||
|
action='store_true',
|
||||||
|
help="delete scenario (by default: False)")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
Create_Chamberview = CreateChamberview(lfmgr=args.lfmgr,
|
Create_Chamberview = CreateChamberview(lfmgr=args.lfmgr,
|
||||||
port=args.port,
|
port=args.port,
|
||||||
)
|
)
|
||||||
if args.delete_scenario:
|
if args.delete_scenario:
|
||||||
Create_Chamberview.clean_cv_scenario(type="Network-Connectivity", scenario_name=args.create_scenario)
|
Create_Chamberview.clean_cv_scenario(
|
||||||
|
cv_type="Network-Connectivity",
|
||||||
|
scenario_name=args.create_scenario)
|
||||||
|
|
||||||
Create_Chamberview.setup(create_scenario=args.create_scenario,
|
Create_Chamberview.setup(create_scenario=args.create_scenario,
|
||||||
line=args.line,
|
line=args.line,
|
||||||
|
|||||||
@@ -54,7 +54,6 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
# from cv_dut_profile import cv_dut as dut
|
# from cv_dut_profile import cv_dut as dut
|
||||||
@@ -70,7 +69,7 @@ class DUT(dut):
|
|||||||
lfmgr="localhost",
|
lfmgr="localhost",
|
||||||
port="8080",
|
port="8080",
|
||||||
dut_name="DUT",
|
dut_name="DUT",
|
||||||
ssid=[],
|
ssid=None,
|
||||||
sw_version="NA",
|
sw_version="NA",
|
||||||
hw_version="NA",
|
hw_version="NA",
|
||||||
serial_num="NA",
|
serial_num="NA",
|
||||||
@@ -87,12 +86,13 @@ class DUT(dut):
|
|||||||
desired_dut_flags=dut_flags,
|
desired_dut_flags=dut_flags,
|
||||||
desired_dut_flags_mask=dut_flags
|
desired_dut_flags_mask=dut_flags
|
||||||
)
|
)
|
||||||
|
if ssid is None:
|
||||||
|
ssid = []
|
||||||
self.cv_dut_name = dut_name
|
self.cv_dut_name = dut_name
|
||||||
self.cv_test = cvtest(lfmgr, port)
|
self.cv_test = cvtest(lfmgr, port)
|
||||||
self.dut_name = dut_name
|
self.dut_name = dut_name
|
||||||
self.ssid = ssid
|
self.ssid = ssid
|
||||||
|
|
||||||
|
|
||||||
def setup(self):
|
def setup(self):
|
||||||
self.create_dut()
|
self.create_dut()
|
||||||
|
|
||||||
@@ -115,16 +115,14 @@ class DUT(dut):
|
|||||||
d[item[0].lower()] = item[1]
|
d[item[0].lower()] = item[1]
|
||||||
self.ssid[j] = d
|
self.ssid[j] = d
|
||||||
self.ssid[j]['flag'] = []
|
self.ssid[j]['flag'] = []
|
||||||
self.ssid[j].keys
|
|
||||||
|
|
||||||
flag=0x0
|
flag = 0x0
|
||||||
if 'security' in self.ssid[j].keys():
|
if 'security' in self.ssid[j].keys():
|
||||||
self.ssid[j]['security'] = self.ssid[j]['security'].split('|')
|
self.ssid[j]['security'] = self.ssid[j]['security'].split(
|
||||||
|
'|')
|
||||||
for security in self.ssid[j]['security']:
|
for security in self.ssid[j]['security']:
|
||||||
try:
|
if security.lower() in flags:
|
||||||
flag |= flags[security.lower()]
|
flag |= flags[security.lower()]
|
||||||
except:
|
|
||||||
pass
|
|
||||||
self.ssid[j]['flag'] = flag
|
self.ssid[j]['flag'] = flag
|
||||||
|
|
||||||
if 'bssid' not in self.ssid[j].keys():
|
if 'bssid' not in self.ssid[j].keys():
|
||||||
@@ -148,34 +146,55 @@ def main():
|
|||||||
prog='create_chamberview_dut.py',
|
prog='create_chamberview_dut.py',
|
||||||
formatter_class=argparse.RawTextHelpFormatter,
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
description="""
|
description="""
|
||||||
./create_chamberview_dut -m "localhost" -o "8080" -d "dut_name"
|
./create_chamberview_dut -m "localhost" -o "8080" -d "dut_name"
|
||||||
--ssid "ssid_idx=0 ssid=NET1 security=WPA|WEP|11r|EAP-PEAP bssid=78:d2:94:bf:16:41"
|
--ssid "ssid_idx=0 ssid=NET1 security=WPA|WEP|11r|EAP-PEAP bssid=78:d2:94:bf:16:41"
|
||||||
--ssid "ssid_idx=1 ssid=NET1 security=WPA password=test bssid=78:d2:94:bf:16:40"
|
--ssid "ssid_idx=1 ssid=NET1 security=WPA password=test bssid=78:d2:94:bf:16:40"
|
||||||
""")
|
""")
|
||||||
parser.add_argument("-m", "--lfmgr", type=str, default="localhost",
|
parser.add_argument(
|
||||||
help="address of the LANforge GUI machine (localhost is default)")
|
"-m",
|
||||||
parser.add_argument("-o", "--port", type=str, default="8080",
|
"--lfmgr",
|
||||||
help="IP Port the LANforge GUI is listening on (8080 is default)")
|
type=str,
|
||||||
|
default="localhost",
|
||||||
|
help="address of the LANforge GUI machine (localhost is default)")
|
||||||
|
parser.add_argument(
|
||||||
|
"-o",
|
||||||
|
"--port",
|
||||||
|
type=str,
|
||||||
|
default="8080",
|
||||||
|
help="IP Port the LANforge GUI is listening on (8080 is default)")
|
||||||
parser.add_argument("-d", "--dut_name", type=str, default="DUT",
|
parser.add_argument("-d", "--dut_name", type=str, default="DUT",
|
||||||
help="set dut name")
|
help="set dut name")
|
||||||
parser.add_argument("-s", "--ssid", action='append', nargs=1,
|
parser.add_argument("-s", "--ssid", action='append', nargs=1,
|
||||||
help="SSID", default=[])
|
help="SSID", default=[])
|
||||||
|
|
||||||
parser.add_argument("--sw_version", default="NA", help="DUT Software version.")
|
parser.add_argument(
|
||||||
parser.add_argument("--hw_version", default="NA", help="DUT Hardware version.")
|
"--sw_version",
|
||||||
parser.add_argument("--serial_num", default="NA", help="DUT Serial number.")
|
default="NA",
|
||||||
|
help="DUT Software version.")
|
||||||
|
parser.add_argument(
|
||||||
|
"--hw_version",
|
||||||
|
default="NA",
|
||||||
|
help="DUT Hardware version.")
|
||||||
|
parser.add_argument(
|
||||||
|
"--serial_num",
|
||||||
|
default="NA",
|
||||||
|
help="DUT Serial number.")
|
||||||
parser.add_argument("--model_num", default="NA", help="DUT Model Number.")
|
parser.add_argument("--model_num", default="NA", help="DUT Model Number.")
|
||||||
parser.add_argument('--dut_flag', help='station flags to add', default=None, action='append')
|
parser.add_argument(
|
||||||
|
'--dut_flag',
|
||||||
|
help='station flags to add',
|
||||||
|
default=None,
|
||||||
|
action='append')
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
new_dut = DUT(lfmgr=args.lfmgr,
|
new_dut = DUT(lfmgr=args.lfmgr,
|
||||||
port=args.port,
|
port=args.port,
|
||||||
dut_name=args.dut_name,
|
dut_name=args.dut_name,
|
||||||
ssid=args.ssid,
|
ssid=args.ssid,
|
||||||
sw_version = args.sw_version,
|
sw_version=args.sw_version,
|
||||||
hw_version = args.hw_version,
|
hw_version=args.hw_version,
|
||||||
serial_num = args.serial_num,
|
serial_num=args.serial_num,
|
||||||
model_num = args.model_num,
|
model_num=args.model_num,
|
||||||
dut_flags=args.dut_flag
|
dut_flags=args.dut_flag
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
LANforge = importlib.import_module("py-json.LANforge")
|
LANforge = importlib.import_module("py-json.LANforge")
|
||||||
@@ -46,10 +45,12 @@ class CreateL3(Realm):
|
|||||||
self.endp_a = endp_a
|
self.endp_a = endp_a
|
||||||
self.mode = mode
|
self.mode = mode
|
||||||
self.name_prefix = name_prefix
|
self.name_prefix = name_prefix
|
||||||
self.station_profile = self.new_station_profile()
|
# self.station_profile = self.new_station_profile()
|
||||||
|
# self.station_profile.lfclient_url = self.lfclient_url
|
||||||
|
# self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0,
|
||||||
|
# end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user
|
||||||
|
# defined variable from terminal.
|
||||||
self.cx_profile = self.new_l3_cx_profile()
|
self.cx_profile = self.new_l3_cx_profile()
|
||||||
self.station_profile.lfclient_url = self.lfclient_url
|
|
||||||
self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user defined variable from terminal.
|
|
||||||
self.cx_profile.host = self.host
|
self.cx_profile.host = self.host
|
||||||
self.cx_profile.port = self.port
|
self.cx_profile.port = self.port
|
||||||
self.cx_profile.name_prefix = self.name_prefix
|
self.cx_profile.name_prefix = self.name_prefix
|
||||||
@@ -66,47 +67,16 @@ class CreateL3(Realm):
|
|||||||
side_a=self.endp_a,
|
side_a=self.endp_a,
|
||||||
side_b=self.endp_b,
|
side_b=self.endp_b,
|
||||||
sleep_time=0)
|
sleep_time=0)
|
||||||
|
# self.cx_profile.start_cx()
|
||||||
self._pass("PASS: Cross-connect build finished")
|
self._pass("PASS: Cross-connect build finished")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main(args):
|
||||||
parser = LFCliBase.create_basic_argparse(
|
|
||||||
prog='create_l3.py',
|
|
||||||
formatter_class=argparse.RawTextHelpFormatter,
|
|
||||||
epilog='''\
|
|
||||||
Generate traffic between ports
|
|
||||||
''',
|
|
||||||
description='''\
|
|
||||||
''')
|
|
||||||
|
|
||||||
required_args = None
|
num_sta = 0
|
||||||
for group in parser._action_groups:
|
# if (args.num_stations is not None) and (int(args.num_stations) > 0):
|
||||||
if group.title == "required arguments":
|
# num_sta = int(args.num_stations)
|
||||||
required_args = group
|
|
||||||
break
|
|
||||||
if required_args is not None:
|
|
||||||
required_args.add_argument('--min_rate_a', help='--min_rate_a bps rate minimum for side_a', default=56000)
|
|
||||||
required_args.add_argument('--min_rate_b', help='--min_rate_b bps rate minimum for side_b', default=56000)
|
|
||||||
required_args.add_argument('--endp_a', help='--endp_a station list', default=["eth1"], action="append")
|
|
||||||
required_args.add_argument('--endp_b', help='--upstream port', default="eth2")
|
|
||||||
|
|
||||||
optional_args = None
|
|
||||||
for group in parser._action_groups:
|
|
||||||
if group.title == "optional arguments":
|
|
||||||
optional_args = group
|
|
||||||
break;
|
|
||||||
if optional_args is not None:
|
|
||||||
optional_args.add_argument('--mode', help='Used to force mode of stations', default=0)
|
|
||||||
optional_args.add_argument('--ap', help='Used to force a connection to a particular AP')
|
|
||||||
optional_args.add_argument('--number_template', help='Start the station numbering with a particular number. Default is 0000', default=0000)
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
num_sta = 2
|
|
||||||
if (args.num_stations is not None) and (int(args.num_stations) > 0):
|
|
||||||
num_sta = int(args.num_stations)
|
|
||||||
|
|
||||||
# station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=int(args.number_template), end_id_=num_sta+int(args.number_template) - 1, padding_number_=10000,
|
|
||||||
# radio=args.radio)
|
|
||||||
ip_var_test = CreateL3(host=args.mgr,
|
ip_var_test = CreateL3(host=args.mgr,
|
||||||
port=args.mgr_port,
|
port=args.mgr_port,
|
||||||
name_prefix="VT",
|
name_prefix="VT",
|
||||||
@@ -117,13 +87,53 @@ def main():
|
|||||||
mode=args.mode,
|
mode=args.mode,
|
||||||
_debug_on=args.debug)
|
_debug_on=args.debug)
|
||||||
|
|
||||||
ip_var_test.pre_cleanup()
|
# ip_var_test.pre_cleanup()
|
||||||
ip_var_test.build()
|
ip_var_test.build()
|
||||||
if not ip_var_test.passes():
|
if not ip_var_test.passes():
|
||||||
print(ip_var_test.get_fail_message())
|
print(ip_var_test.get_fail_message())
|
||||||
ip_var_test.exit_fail()
|
ip_var_test.exit_fail()
|
||||||
print('Created %s stations and connections' % num_sta)
|
print(f'Created {num_sta} stations and connections')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
parser = LFCliBase.create_basic_argparse(
|
||||||
|
prog='create_l3.py',
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
epilog='''\
|
||||||
|
Generate traffic between ports
|
||||||
|
''',
|
||||||
|
description='''\
|
||||||
|
''')
|
||||||
|
parser.add_argument(
|
||||||
|
'--min_rate_a',
|
||||||
|
help='--min_rate_a bps rate minimum for side_a',
|
||||||
|
default=56000)
|
||||||
|
parser.add_argument(
|
||||||
|
'--min_rate_b',
|
||||||
|
help='--min_rate_b bps rate minimum for side_b',
|
||||||
|
default=56000)
|
||||||
|
parser.add_argument(
|
||||||
|
'--endp_a',
|
||||||
|
help='--endp_a station list',
|
||||||
|
default=[],
|
||||||
|
action="append",
|
||||||
|
required=True)
|
||||||
|
parser.add_argument(
|
||||||
|
'--endp_b',
|
||||||
|
help='--upstream port',
|
||||||
|
default="eth2",
|
||||||
|
required=True)
|
||||||
|
parser.add_argument(
|
||||||
|
'--mode',
|
||||||
|
help='Used to force mode of stations',
|
||||||
|
default=0)
|
||||||
|
parser.add_argument(
|
||||||
|
'--ap',
|
||||||
|
help='Used to force a connection to a particular AP')
|
||||||
|
parser.add_argument(
|
||||||
|
'--number_template',
|
||||||
|
help='Start the station numbering with a particular number. Default is 0000',
|
||||||
|
default=0000)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
main(args)
|
||||||
|
|||||||
@@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
Example script:
|
Example script:
|
||||||
'./create_l3_stations.py --radio wiphy0 --ssid lanforge --password password --security wpa2'
|
'./create_l3_stations.py --radio wiphy0 --ssid lanforge --password password --security wpa2'
|
||||||
|
'./create_l3_stations.py --station_list sta00,sta01 --radio wiphy0 --ssid lanforge --password password --security wpa2'
|
||||||
|
'./create_l3_stations.py --station_list sta00 sta01 --radio wiphy0 --ssid lanforge --password password --security wpa2'
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
@@ -24,15 +26,28 @@ from realm import Realm
|
|||||||
|
|
||||||
|
|
||||||
class CreateL3(Realm):
|
class CreateL3(Realm):
|
||||||
def __init__(self,
|
def __init__(
|
||||||
ssid, security, password, sta_list, name_prefix, upstream, radio,
|
self,
|
||||||
host="localhost", port=8080, mode=0, ap=None,
|
ssid,
|
||||||
side_a_min_rate=56, side_a_max_rate=0,
|
security,
|
||||||
side_b_min_rate=56, side_b_max_rate=0,
|
password,
|
||||||
number_template="00000", use_ht160=False,
|
sta_list,
|
||||||
_debug_on=False,
|
name_prefix,
|
||||||
_exit_on_error=False,
|
upstream,
|
||||||
_exit_on_fail=False):
|
radio,
|
||||||
|
host="localhost",
|
||||||
|
port=8080,
|
||||||
|
mode=0,
|
||||||
|
ap=None,
|
||||||
|
side_a_min_rate=56,
|
||||||
|
side_a_max_rate=0,
|
||||||
|
side_b_min_rate=56,
|
||||||
|
side_b_max_rate=0,
|
||||||
|
number_template="00000",
|
||||||
|
use_ht160=False,
|
||||||
|
_debug_on=False,
|
||||||
|
_exit_on_error=False,
|
||||||
|
_exit_on_fail=False):
|
||||||
super().__init__(host, port)
|
super().__init__(host, port)
|
||||||
self.upstream = upstream
|
self.upstream = upstream
|
||||||
self.host = host
|
self.host = host
|
||||||
@@ -61,7 +76,9 @@ class CreateL3(Realm):
|
|||||||
self.station_profile.mode = mode
|
self.station_profile.mode = mode
|
||||||
if self.ap is not None:
|
if self.ap is not None:
|
||||||
self.station_profile.set_command_param("add_sta", "ap", self.ap)
|
self.station_profile.set_command_param("add_sta", "ap", self.ap)
|
||||||
# self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user defined variable from terminal.
|
# self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0,
|
||||||
|
# end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user
|
||||||
|
# defined variable from terminal.
|
||||||
|
|
||||||
self.cx_profile.host = self.host
|
self.cx_profile.host = self.host
|
||||||
self.cx_profile.port = self.port
|
self.cx_profile.port = self.port
|
||||||
@@ -74,7 +91,7 @@ class CreateL3(Realm):
|
|||||||
def pre_cleanup(self):
|
def pre_cleanup(self):
|
||||||
self.cx_profile.cleanup_prefix()
|
self.cx_profile.cleanup_prefix()
|
||||||
for sta in self.sta_list:
|
for sta in self.sta_list:
|
||||||
self.rm_port(sta, check_exists=True)
|
self.rm_port(sta, check_exists=True, debug_=False)
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
|
|
||||||
@@ -83,8 +100,10 @@ class CreateL3(Realm):
|
|||||||
self.password)
|
self.password)
|
||||||
self.station_profile.set_number_template(self.number_template)
|
self.station_profile.set_number_template(self.number_template)
|
||||||
print("Creating stations")
|
print("Creating stations")
|
||||||
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
|
self.station_profile.set_command_flag(
|
||||||
self.station_profile.set_command_param("set_port", "report_timer", 1500)
|
"add_sta", "create_admin_down", 1)
|
||||||
|
self.station_profile.set_command_param(
|
||||||
|
"set_port", "report_timer", 1500)
|
||||||
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
|
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
|
||||||
self.station_profile.create(radio=self.radio,
|
self.station_profile.create(radio=self.radio,
|
||||||
sta_names_=self.sta_list,
|
sta_names_=self.sta_list,
|
||||||
@@ -108,7 +127,7 @@ def main():
|
|||||||
create_l3_stations.py:
|
create_l3_stations.py:
|
||||||
--------------------
|
--------------------
|
||||||
Generic command layout:
|
Generic command layout:
|
||||||
|
|
||||||
python3 ./create_l3_stations.py
|
python3 ./create_l3_stations.py
|
||||||
--upstream_port eth1
|
--upstream_port eth1
|
||||||
--radio wiphy0
|
--radio wiphy0
|
||||||
@@ -136,39 +155,92 @@ def main():
|
|||||||
--ap "00:0e:8e:78:e1:76"
|
--ap "00:0e:8e:78:e1:76"
|
||||||
--number_template 0000
|
--number_template 0000
|
||||||
--debug
|
--debug
|
||||||
|
|
||||||
|
python3 ./create_l3_stations.py
|
||||||
|
--upstream_port eth1
|
||||||
|
--radio wiphy0
|
||||||
|
--station_list sta00,sta01
|
||||||
|
--security {open|wep|wpa|wpa2|wpa3} \\
|
||||||
|
--mode 1
|
||||||
|
{"auto" : "0",
|
||||||
|
"a" : "1",
|
||||||
|
"b" : "2",
|
||||||
|
"g" : "3",
|
||||||
|
"abg" : "4",
|
||||||
|
"abgn" : "5",
|
||||||
|
"bgn" : "6",
|
||||||
|
"bg" : "7",
|
||||||
|
"abgnAC" : "8",
|
||||||
|
"anAC" : "9",
|
||||||
|
"an" : "10",
|
||||||
|
"bgnAC" : "11",
|
||||||
|
"abgnAX" : "12",
|
||||||
|
"bgnAX" : "13",
|
||||||
|
--ssid netgear
|
||||||
|
--password admin123
|
||||||
|
--a_min 1000
|
||||||
|
--b_min 1000
|
||||||
|
--ap "00:0e:8e:78:e1:76"
|
||||||
|
--number_template 0000
|
||||||
|
--debug
|
||||||
''')
|
''')
|
||||||
|
|
||||||
required_args = None
|
required_args = None
|
||||||
for group in parser._action_groups:
|
for group in parser._action_groups:
|
||||||
if group.title == "required arguments":
|
if group.title == "required arguments":
|
||||||
required_args = group
|
required_args = group
|
||||||
break;
|
break
|
||||||
if required_args is not None:
|
if required_args is not None:
|
||||||
required_args.add_argument('--a_min', help='--a_min bps rate minimum for side_a', default=256000)
|
required_args.add_argument(
|
||||||
required_args.add_argument('--b_min', help='--b_min bps rate minimum for side_b', default=256000)
|
'--a_min',
|
||||||
|
help='--a_min bps rate minimum for side_a',
|
||||||
|
default=256000)
|
||||||
|
required_args.add_argument(
|
||||||
|
'--b_min',
|
||||||
|
help='--b_min bps rate minimum for side_b',
|
||||||
|
default=256000)
|
||||||
|
|
||||||
optional_args = None
|
optional_args = None
|
||||||
for group in parser._action_groups:
|
for group in parser._action_groups:
|
||||||
if group.title == "optional arguments":
|
if group.title == "optional arguments":
|
||||||
optional_args = group
|
optional_args = group
|
||||||
break;
|
break
|
||||||
if optional_args is not None:
|
if optional_args:
|
||||||
optional_args.add_argument('--mode', help='Used to force mode of stations')
|
optional_args.add_argument(
|
||||||
optional_args.add_argument('--ap', help='Used to force a connection to a particular AP')
|
'--mode', help='Used to force mode of stations')
|
||||||
optional_args.add_argument('--number_template', help='Start the station numbering with a particular number. Default is 0000', default=0000)
|
optional_args.add_argument(
|
||||||
optional_args.add_argument('--station_list', help='Optional: User defined station names', action='append',default=None)
|
'--ap', help='Used to force a connection to a particular AP')
|
||||||
|
optional_args.add_argument(
|
||||||
|
'--number_template',
|
||||||
|
help='Start the station numbering with a particular number. Default is 0000',
|
||||||
|
default=0000)
|
||||||
|
optional_args.add_argument(
|
||||||
|
'--station_list',
|
||||||
|
help='Optional: User defined station names, can be a comma or space separated list',
|
||||||
|
nargs='+',
|
||||||
|
default=None)
|
||||||
|
optional_args.add_argument(
|
||||||
|
'--no_cleanup',
|
||||||
|
help="Optional: Don't cleanup existing stations",
|
||||||
|
action='store_true')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
num_sta = 2
|
num_sta = 2
|
||||||
if (args.num_stations is not None) and (int(args.num_stations) > 0):
|
if (args.num_stations is not None) and (int(args.num_stations) > 0):
|
||||||
num_sta = int(args.num_stations)
|
num_sta = int(args.num_stations)
|
||||||
|
|
||||||
if args.station_list is None:
|
if not args.station_list:
|
||||||
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=int(args.number_template), end_id_=num_sta+int(args.number_template) - 1, padding_number_=10000,
|
station_list = LFUtils.portNameSeries(
|
||||||
radio=args.radio)
|
prefix_="sta", start_id_=int(
|
||||||
|
args.number_template), end_id_=num_sta + int(
|
||||||
|
args.number_template) - 1, padding_number_=10000, radio=args.radio)
|
||||||
else:
|
else:
|
||||||
station_list = args.station_list
|
if ',' in args.station_list[0]:
|
||||||
|
station_list = args.station_list[0].split(',')
|
||||||
|
elif ' ' in args.station_list[0]:
|
||||||
|
station_list = args.station_list[0].split()
|
||||||
|
else:
|
||||||
|
station_list = args.station_list
|
||||||
ip_var_test = CreateL3(host=args.mgr,
|
ip_var_test = CreateL3(host=args.mgr,
|
||||||
port=args.mgr_port,
|
port=args.mgr_port,
|
||||||
number_template=str(args.number_template),
|
number_template=str(args.number_template),
|
||||||
@@ -186,7 +258,8 @@ def main():
|
|||||||
ap=args.ap,
|
ap=args.ap,
|
||||||
_debug_on=args.debug)
|
_debug_on=args.debug)
|
||||||
|
|
||||||
ip_var_test.pre_cleanup()
|
if not args.no_cleanup:
|
||||||
|
ip_var_test.pre_cleanup()
|
||||||
ip_var_test.build()
|
ip_var_test.build()
|
||||||
if not ip_var_test.passes():
|
if not ip_var_test.passes():
|
||||||
print(ip_var_test.get_fail_message())
|
print(ip_var_test.get_fail_message())
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -25,15 +24,28 @@ TestGroupProfile = realm.TestGroupProfile
|
|||||||
|
|
||||||
|
|
||||||
class CreateL4(Realm):
|
class CreateL4(Realm):
|
||||||
def __init__(self,
|
def __init__(
|
||||||
ssid, security, password, sta_list, name_prefix, upstream, radio,
|
self,
|
||||||
host="localhost", port=8080, mode = 0, ap=None,
|
ssid,
|
||||||
side_a_min_rate=56, side_a_max_rate=0,
|
security,
|
||||||
side_b_min_rate=56, side_b_max_rate=0,
|
password,
|
||||||
number_template="00000", use_ht160=False,
|
sta_list,
|
||||||
_debug_on=False,
|
name_prefix,
|
||||||
_exit_on_error=False,
|
upstream,
|
||||||
_exit_on_fail=False):
|
radio,
|
||||||
|
host="localhost",
|
||||||
|
port=8080,
|
||||||
|
mode=0,
|
||||||
|
ap=None,
|
||||||
|
side_a_min_rate=56,
|
||||||
|
side_a_max_rate=0,
|
||||||
|
side_b_min_rate=56,
|
||||||
|
side_b_max_rate=0,
|
||||||
|
number_template="00000",
|
||||||
|
use_ht160=False,
|
||||||
|
_debug_on=False,
|
||||||
|
_exit_on_error=False,
|
||||||
|
_exit_on_fail=False):
|
||||||
super().__init__(host, port)
|
super().__init__(host, port)
|
||||||
self.upstream = upstream
|
self.upstream = upstream
|
||||||
self.host = host
|
self.host = host
|
||||||
@@ -43,8 +55,8 @@ class CreateL4(Realm):
|
|||||||
self.security = security
|
self.security = security
|
||||||
self.password = password
|
self.password = password
|
||||||
self.radio = radio
|
self.radio = radio
|
||||||
self.mode= mode
|
self.mode = mode
|
||||||
self.ap=ap
|
self.ap = ap
|
||||||
self.number_template = number_template
|
self.number_template = number_template
|
||||||
self.debug = _debug_on
|
self.debug = _debug_on
|
||||||
self.name_prefix = name_prefix
|
self.name_prefix = name_prefix
|
||||||
@@ -61,9 +73,10 @@ class CreateL4(Realm):
|
|||||||
self.station_profile.mode = 9
|
self.station_profile.mode = 9
|
||||||
self.station_profile.mode = mode
|
self.station_profile.mode = mode
|
||||||
if self.ap is not None:
|
if self.ap is not None:
|
||||||
self.station_profile.set_command_param("add_sta", "ap",self.ap)
|
self.station_profile.set_command_param("add_sta", "ap", self.ap)
|
||||||
#self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user defined variable from terminal.
|
# self.station_list= LFUtils.portNameSeries(prefix_="sta", start_id_=0,
|
||||||
|
# end_id_=2, padding_number_=10000, radio='wiphy0') #Make radio a user
|
||||||
|
# defined variable from terminal.
|
||||||
|
|
||||||
self.cx_profile.host = self.host
|
self.cx_profile.host = self.host
|
||||||
self.cx_profile.port = self.port
|
self.cx_profile.port = self.port
|
||||||
@@ -76,22 +89,34 @@ class CreateL4(Realm):
|
|||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
self.cx_profile.cleanup()
|
self.cx_profile.cleanup()
|
||||||
self.station_profile.cleanup()
|
self.station_profile.cleanup()
|
||||||
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url,
|
LFUtils.wait_until_ports_disappear(
|
||||||
port_list=self.station_profile.station_names,
|
base_url=self.lfclient_url,
|
||||||
debug=self.debug)
|
port_list=self.station_profile.station_names,
|
||||||
|
debug=self.debug)
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
# Build stations
|
# Build stations
|
||||||
self.station_profile.use_security(self.security, self.ssid, self.password)
|
self.station_profile.use_security(
|
||||||
|
self.security, self.ssid, self.password)
|
||||||
self.station_profile.set_number_template(self.number_template)
|
self.station_profile.set_number_template(self.number_template)
|
||||||
print("Creating stations")
|
print("Creating stations")
|
||||||
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
|
self.station_profile.set_command_flag(
|
||||||
self.station_profile.set_command_param("set_port", "report_timer", 1500)
|
"add_sta", "create_admin_down", 1)
|
||||||
|
self.station_profile.set_command_param(
|
||||||
|
"set_port", "report_timer", 1500)
|
||||||
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
|
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
|
||||||
self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
|
self.station_profile.create(
|
||||||
|
radio=self.radio,
|
||||||
|
sta_names_=self.sta_list,
|
||||||
|
debug=self.debug)
|
||||||
self._pass("PASS: Station build finished")
|
self._pass("PASS: Station build finished")
|
||||||
|
|
||||||
self.cx_profile.create(ports=self.station_profile.station_names, sleep_time=.5, debug_=self.debug, suppress_related_commands_=True)
|
self.cx_profile.create(
|
||||||
|
ports=self.station_profile.station_names,
|
||||||
|
sleep_time=.5,
|
||||||
|
debug_=self.debug,
|
||||||
|
suppress_related_commands_=True)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = LFCliBase.create_basic_argparse(
|
parser = LFCliBase.create_basic_argparse(
|
||||||
@@ -134,46 +159,61 @@ python3 ./layer4.py
|
|||||||
--debug
|
--debug
|
||||||
''')
|
''')
|
||||||
|
|
||||||
required_args=None
|
required_args = None
|
||||||
for group in parser._action_groups:
|
for group in parser._action_groups:
|
||||||
if group.title == "required arguments":
|
if group.title == "required arguments":
|
||||||
required_args=group
|
required_args = group
|
||||||
break;
|
break
|
||||||
if required_args is not None:
|
if required_args is not None:
|
||||||
required_args.add_argument('--a_min', help='--a_min bps rate minimum for side_a', default=256000)
|
required_args.add_argument(
|
||||||
required_args.add_argument('--b_min', help='--b_min bps rate minimum for side_b', default=256000)
|
'--a_min',
|
||||||
|
help='--a_min bps rate minimum for side_a',
|
||||||
|
default=256000)
|
||||||
|
required_args.add_argument(
|
||||||
|
'--b_min',
|
||||||
|
help='--b_min bps rate minimum for side_b',
|
||||||
|
default=256000)
|
||||||
|
|
||||||
optional_args=None
|
optional_args = None
|
||||||
for group in parser._action_groups:
|
for group in parser._action_groups:
|
||||||
if group.title == "optional arguments":
|
if group.title == "optional arguments":
|
||||||
optional_args=group
|
optional_args = group
|
||||||
break;
|
break
|
||||||
if optional_args is not None:
|
if optional_args is not None:
|
||||||
optional_args.add_argument('--mode',help='Used to force mode of stations', default=0)
|
optional_args.add_argument(
|
||||||
optional_args.add_argument('--ap',help='Used to force a connection to a particular AP')
|
'--mode',
|
||||||
|
help='Used to force mode of stations',
|
||||||
|
default=0)
|
||||||
|
optional_args.add_argument(
|
||||||
|
'--ap', help='Used to force a connection to a particular AP')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
num_sta = 2
|
num_sta = 2
|
||||||
if (args.num_stations is not None) and (int(args.num_stations) > 0):
|
if (args.num_stations is not None) and (int(args.num_stations) > 0):
|
||||||
num_sta = int(args.num_stations)
|
num_sta = int(args.num_stations)
|
||||||
|
|
||||||
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=0, end_id_=num_sta-1, padding_number_=10000, radio=args.radio)
|
station_list = LFUtils.portNameSeries(
|
||||||
|
prefix_="sta",
|
||||||
|
start_id_=0,
|
||||||
|
end_id_=num_sta - 1,
|
||||||
|
padding_number_=10000,
|
||||||
|
radio=args.radio)
|
||||||
ip_var_test = CreateL4(host=args.mgr,
|
ip_var_test = CreateL4(host=args.mgr,
|
||||||
port=args.mgr_port,
|
port=args.mgr_port,
|
||||||
number_template="0000",
|
number_template="0000",
|
||||||
sta_list=station_list,
|
sta_list=station_list,
|
||||||
name_prefix="VT",
|
name_prefix="VT",
|
||||||
upstream=args.upstream_port,
|
upstream=args.upstream_port,
|
||||||
ssid=args.ssid,
|
ssid=args.ssid,
|
||||||
password=args.passwd,
|
password=args.passwd,
|
||||||
radio=args.radio,
|
radio=args.radio,
|
||||||
security=args.security,
|
security=args.security,
|
||||||
use_ht160=False,
|
use_ht160=False,
|
||||||
side_a_min_rate=args.a_min,
|
side_a_min_rate=args.a_min,
|
||||||
side_b_min_rate=args.b_min,
|
side_b_min_rate=args.b_min,
|
||||||
mode=args.mode,
|
mode=args.mode,
|
||||||
ap=args.ap,
|
ap=args.ap,
|
||||||
_debug_on=args.debug)
|
_debug_on=args.debug)
|
||||||
|
|
||||||
ip_var_test.cleanup()
|
ip_var_test.cleanup()
|
||||||
ip_var_test.build()
|
ip_var_test.build()
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -29,7 +28,7 @@ class CreateMacVlan(Realm):
|
|||||||
netmask=None,
|
netmask=None,
|
||||||
gateway=None,
|
gateway=None,
|
||||||
dhcp=True,
|
dhcp=True,
|
||||||
port_list=[],
|
port_list=None,
|
||||||
ip_list=None,
|
ip_list=None,
|
||||||
connections_per_port=1,
|
connections_per_port=1,
|
||||||
_debug_on=False,
|
_debug_on=False,
|
||||||
@@ -51,7 +50,6 @@ class CreateMacVlan(Realm):
|
|||||||
|
|
||||||
self.mvlan_profile = self.new_mvlan_profile()
|
self.mvlan_profile = self.new_mvlan_profile()
|
||||||
|
|
||||||
|
|
||||||
self.mvlan_profile.num_macvlans = int(num_ports)
|
self.mvlan_profile.num_macvlans = int(num_ports)
|
||||||
self.mvlan_profile.desired_macvlans = self.port_list
|
self.mvlan_profile.desired_macvlans = self.port_list
|
||||||
self.mvlan_profile.macvlan_parent = self.macvlan_parent
|
self.mvlan_profile.macvlan_parent = self.macvlan_parent
|
||||||
@@ -65,10 +63,14 @@ class CreateMacVlan(Realm):
|
|||||||
def build(self):
|
def build(self):
|
||||||
# Build stations
|
# Build stations
|
||||||
print("Creating MACVLANs")
|
print("Creating MACVLANs")
|
||||||
self.mvlan_profile.create(admin_down=False, sleep_time=.5, debug=self.debug)
|
self.mvlan_profile.create(
|
||||||
|
admin_down=False,
|
||||||
|
sleep_time=.5,
|
||||||
|
debug=self.debug)
|
||||||
self._pass("PASS: MACVLAN build finished")
|
self._pass("PASS: MACVLAN build finished")
|
||||||
self.created_ports += self.mvlan_profile.created_macvlans
|
self.created_ports += self.mvlan_profile.created_macvlans
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = LFCliBase.create_bare_argparse(
|
parser = LFCliBase.create_bare_argparse(
|
||||||
prog='create_macvlan.py',
|
prog='create_macvlan.py',
|
||||||
@@ -80,7 +82,7 @@ def main():
|
|||||||
create_macvlan.py:
|
create_macvlan.py:
|
||||||
--------------------
|
--------------------
|
||||||
Generic command layout:
|
Generic command layout:
|
||||||
./create_macvlan.py --macvlan_parent <port> --num_ports <num ports>
|
./create_macvlan.py --macvlan_parent <port> --num_ports <num ports>
|
||||||
--first_mvlan_ip <first ip in series> --netmask <netmask to use> --gateway <gateway ip addr>
|
--first_mvlan_ip <first ip in series> --netmask <netmask to use> --gateway <gateway ip addr>
|
||||||
|
|
||||||
./create_macvlan.py --macvlan_parent eth2 --num_ports 3 --first_mvlan_ip 192.168.92.13
|
./create_macvlan.py --macvlan_parent eth2 --num_ports 3 --first_mvlan_ip 192.168.92.13
|
||||||
@@ -97,25 +99,58 @@ Generic command layout:
|
|||||||
--use_ports eth1#0=10.40.3.103,eth1#1,eth1#2 --connections_per_port 2
|
--use_ports eth1#0=10.40.3.103,eth1#1,eth1#2 --connections_per_port 2
|
||||||
--netmask 255.255.240.0 --gateway 10.40.0.1
|
--netmask 255.255.240.0 --gateway 10.40.0.1
|
||||||
|
|
||||||
|
You can only add MAC-VLANs to Ethernet, Bonding, Redir, and 802.1Q VLAN devices.
|
||||||
|
|
||||||
''')
|
''')
|
||||||
parser.add_argument('--num_stations', help='Number of stations to create', default=0)
|
parser.add_argument(
|
||||||
|
'--num_stations',
|
||||||
|
help='Number of stations to create',
|
||||||
|
default=0)
|
||||||
parser.add_argument('--radio', help='radio EID, e.g: 1.wiphy2')
|
parser.add_argument('--radio', help='radio EID, e.g: 1.wiphy2')
|
||||||
parser.add_argument('-u', '--upstream_port',
|
parser.add_argument(
|
||||||
help='non-station port that generates traffic: <resource>.<port>, e.g: 1.eth1',
|
'-u',
|
||||||
default='1.eth1')
|
'--upstream_port',
|
||||||
parser.add_argument('--macvlan_parent', help='specifies parent port for macvlan creation', default=None)
|
help='non-station port that generates traffic: <resource>.<port>, e.g: 1.eth1',
|
||||||
parser.add_argument('--first_port', help='specifies name of first port to be used', default=None)
|
default='1.eth1')
|
||||||
parser.add_argument('--num_ports', help='number of ports to create', default=1)
|
parser.add_argument(
|
||||||
parser.add_argument('--connections_per_port', help='specifies number of connections to be used per port', default=1,
|
'--macvlan_parent',
|
||||||
type=int)
|
help='specifies parent port for macvlan creation',
|
||||||
parser.add_argument('--use_ports', help='list of comma separated ports to use with ips, \'=\' separates name and ip'
|
required=True)
|
||||||
'{ port_name1=ip_addr1,port_name1=ip_addr2 }. '
|
parser.add_argument(
|
||||||
'Ports without ips will be left alone', default=None)
|
'--first_port',
|
||||||
parser.add_argument('--first_mvlan_ip', help='specifies first static ip address to be used or dhcp', default=None)
|
help='specifies name of first port to be used',
|
||||||
parser.add_argument('--netmask', help='specifies netmask to be used with static ip addresses', default=None)
|
default=None)
|
||||||
parser.add_argument('--gateway', help='specifies default gateway to be used with static addressing', default=None)
|
parser.add_argument(
|
||||||
parser.add_argument('--cxs', help='list of cxs to add/remove depending on use of --add_to_group or --del_from_group'
|
'--num_ports',
|
||||||
, default=None)
|
help='number of ports to create',
|
||||||
|
default=1)
|
||||||
|
parser.add_argument(
|
||||||
|
'--connections_per_port',
|
||||||
|
help='specifies number of connections to be used per port',
|
||||||
|
default=1,
|
||||||
|
type=int)
|
||||||
|
parser.add_argument(
|
||||||
|
'--use_ports',
|
||||||
|
help='list of comma separated ports to use with ips, \'=\' separates name and ip'
|
||||||
|
'{ port_name1=ip_addr1,port_name1=ip_addr2 }. '
|
||||||
|
'Ports without ips will be left alone',
|
||||||
|
default=None)
|
||||||
|
parser.add_argument(
|
||||||
|
'--first_mvlan_ip',
|
||||||
|
help='specifies first static ip address to be used or dhcp',
|
||||||
|
default=None)
|
||||||
|
parser.add_argument(
|
||||||
|
'--netmask',
|
||||||
|
help='specifies netmask to be used with static ip addresses',
|
||||||
|
default=None)
|
||||||
|
parser.add_argument(
|
||||||
|
'--gateway',
|
||||||
|
help='specifies default gateway to be used with static addressing',
|
||||||
|
default=None)
|
||||||
|
parser.add_argument(
|
||||||
|
'--cxs',
|
||||||
|
help='list of cxs to add/remove depending on use of --add_to_group or --del_from_group',
|
||||||
|
default=None)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
port_list = []
|
port_list = []
|
||||||
@@ -125,27 +160,38 @@ Generic command layout:
|
|||||||
if (args.num_ports is not None) and (int(args.num_ports) > 0):
|
if (args.num_ports is not None) and (int(args.num_ports) > 0):
|
||||||
start_num = int(args.first_port[3:])
|
start_num = int(args.first_port[3:])
|
||||||
num_ports = int(args.num_ports)
|
num_ports = int(args.num_ports)
|
||||||
port_list = LFUtils.port_name_series(prefix="sta", start_id=start_num, end_id=start_num + num_ports - 1,
|
port_list = LFUtils.port_name_series(
|
||||||
padding_number=10000,
|
prefix="sta",
|
||||||
radio=args.radio)
|
start_id=start_num,
|
||||||
|
end_id=start_num + num_ports - 1,
|
||||||
|
padding_number=10000,
|
||||||
|
radio=args.radio)
|
||||||
else:
|
else:
|
||||||
if (args.num_ports is not None) and args.macvlan_parent is not None and (int(args.num_ports) > 0) \
|
if (args.num_ports is not None) and args.macvlan_parent is not None and (
|
||||||
and args.macvlan_parent in args.first_port:
|
int(args.num_ports) > 0) and args.macvlan_parent in args.first_port:
|
||||||
start_num = int(args.first_port[args.first_port.index('#') + 1:])
|
start_num = int(
|
||||||
|
args.first_port[args.first_port.index('#') + 1:])
|
||||||
num_ports = int(args.num_ports)
|
num_ports = int(args.num_ports)
|
||||||
port_list = LFUtils.port_name_series(prefix=args.macvlan_parent + "#", start_id=start_num,
|
port_list = LFUtils.port_name_series(
|
||||||
end_id=start_num + num_ports - 1, padding_number=100000,
|
prefix=args.macvlan_parent + "#",
|
||||||
radio=args.radio)
|
start_id=start_num,
|
||||||
|
end_id=start_num + num_ports - 1,
|
||||||
|
padding_number=100000,
|
||||||
|
radio=args.radio)
|
||||||
else:
|
else:
|
||||||
raise ValueError("Invalid values for num_ports [%s], macvlan_parent [%s], and/or first_port [%s].\n"
|
raise ValueError(
|
||||||
"first_port must contain parent port and num_ports must be greater than 0"
|
"Invalid values for num_ports [%s], macvlan_parent [%s], and/or first_port [%s].\n"
|
||||||
% (args.num_ports, args.macvlan_parent, args.first_port))
|
"first_port must contain parent port and num_ports must be greater than 0" %
|
||||||
|
(args.num_ports, args.macvlan_parent, args.first_port))
|
||||||
else:
|
else:
|
||||||
if args.use_ports is None:
|
if args.use_ports is None:
|
||||||
num_ports = int(args.num_ports)
|
num_ports = int(args.num_ports)
|
||||||
port_list = LFUtils.port_name_series(prefix=args.macvlan_parent + "#", start_id=0,
|
port_list = LFUtils.port_name_series(
|
||||||
end_id=num_ports - 1, padding_number=100000,
|
prefix=args.macvlan_parent + "#",
|
||||||
radio=args.radio)
|
start_id=0,
|
||||||
|
end_id=num_ports - 1,
|
||||||
|
padding_number=100000,
|
||||||
|
radio=args.radio)
|
||||||
else:
|
else:
|
||||||
temp_list = args.use_ports.split(',')
|
temp_list = args.use_ports.split(',')
|
||||||
for port in temp_list:
|
for port in temp_list:
|
||||||
@@ -156,7 +202,8 @@ Generic command layout:
|
|||||||
ip_list.append(0)
|
ip_list.append(0)
|
||||||
|
|
||||||
if len(port_list) != len(ip_list):
|
if len(port_list) != len(ip_list):
|
||||||
raise ValueError(temp_list, " ports must have matching ip addresses!")
|
raise ValueError(
|
||||||
|
temp_list, " ports must have matching ip addresses!")
|
||||||
|
|
||||||
if args.first_mvlan_ip is not None:
|
if args.first_mvlan_ip is not None:
|
||||||
if args.first_mvlan_ip.lower() == "dhcp":
|
if args.first_mvlan_ip.lower() == "dhcp":
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -29,11 +29,15 @@ class CreateQVlan(Realm):
|
|||||||
netmask=None,
|
netmask=None,
|
||||||
first_qvlan_ip=None,
|
first_qvlan_ip=None,
|
||||||
gateway=None,
|
gateway=None,
|
||||||
port_list=[],
|
port_list=None,
|
||||||
ip_list=[],
|
ip_list=None,
|
||||||
exit_on_error=False,
|
exit_on_error=False,
|
||||||
debug=False):
|
debug=False):
|
||||||
super().__init__(host, port)
|
super().__init__(host, port)
|
||||||
|
if port_list is None:
|
||||||
|
port_list = []
|
||||||
|
if ip_list is None:
|
||||||
|
ip_list = []
|
||||||
self.host = host
|
self.host = host
|
||||||
self.port = port
|
self.port = port
|
||||||
self.qvlan_parent = qvlan_parent
|
self.qvlan_parent = qvlan_parent
|
||||||
@@ -54,7 +58,8 @@ class CreateQVlan(Realm):
|
|||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
print("Creating QVLAN stations")
|
print("Creating QVLAN stations")
|
||||||
self.qvlan_profile.create(admin_down=False, sleep_time=.5, debug=self.debug)
|
self.qvlan_profile.create(
|
||||||
|
sleep_time=.5)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -68,21 +73,50 @@ def main():
|
|||||||
---------------------
|
---------------------
|
||||||
Generic command ''')
|
Generic command ''')
|
||||||
parser.add_argument('--radio', help='radio EID, e.g: 1.wiphy2')
|
parser.add_argument('--radio', help='radio EID, e.g: 1.wiphy2')
|
||||||
parser.add_argument('--qvlan_parent', help='specifies parent port for qvlan creation', default=None)
|
parser.add_argument(
|
||||||
parser.add_argument('--first_port', help='specifies name of first port to be used', default=None)
|
'--qvlan_parent',
|
||||||
parser.add_argument('--num_ports', help='number of ports to create', default=1)
|
help='specifies parent port for qvlan creation',
|
||||||
parser.add_argument('--first_qvlan_ip', help='specifies first static ip address to be used or dhcp', default=None)
|
default=None,
|
||||||
parser.add_argument('--netmask', help='specifies netmask to be used with static ip addresses', default=None)
|
required=True)
|
||||||
parser.add_argument('--gateway', help='specifies default gateway to be used with static addressing', default=None)
|
parser.add_argument(
|
||||||
parser.add_argument('--use_ports',
|
'--first_port',
|
||||||
help='list of comma separated ports to use with ips, \'=\' separates name and ip { port_name1=ip_addr1,port_name1=ip_addr2 }. Ports without ips will be left alone',
|
help='specifies name of first port to be used',
|
||||||
default=None)
|
default=None)
|
||||||
|
parser.add_argument(
|
||||||
|
'--num_ports',
|
||||||
|
type=int,
|
||||||
|
help='number of ports to create',
|
||||||
|
default=1)
|
||||||
|
parser.add_argument(
|
||||||
|
'--first_qvlan_ip',
|
||||||
|
help='specifies first static ip address to be used or dhcp',
|
||||||
|
default=None)
|
||||||
|
parser.add_argument(
|
||||||
|
'--netmask',
|
||||||
|
help='specifies netmask to be used with static ip addresses',
|
||||||
|
default=None)
|
||||||
|
parser.add_argument(
|
||||||
|
'--gateway',
|
||||||
|
help='specifies default gateway to be used with static addressing',
|
||||||
|
default=None)
|
||||||
|
parser.add_argument(
|
||||||
|
'--use_ports',
|
||||||
|
help='list of comma separated ports to use with ips, \'=\' separates name and ip { port_name1=ip_addr1,port_name1=ip_addr2 }. Ports without ips will be left alone',
|
||||||
|
default=None)
|
||||||
tg_group = parser.add_mutually_exclusive_group()
|
tg_group = parser.add_mutually_exclusive_group()
|
||||||
tg_group.add_argument('--add_to_group', help='name of test group to add cxs to', default=None)
|
tg_group.add_argument(
|
||||||
parser.add_argument('--cxs', help='list of cxs to add/remove depending on use of --add_to_group or --del_from_group'
|
'--add_to_group',
|
||||||
, default=None)
|
help='name of test group to add cxs to',
|
||||||
parser.add_argument('--use_qvlans', help='will create qvlans', action='store_true', default=False)
|
default=None)
|
||||||
|
parser.add_argument(
|
||||||
|
'--cxs',
|
||||||
|
help='list of cxs to add/remove depending on use of --add_to_group or --del_from_group',
|
||||||
|
default=None)
|
||||||
|
parser.add_argument(
|
||||||
|
'--use_qvlans',
|
||||||
|
help='will create qvlans',
|
||||||
|
action='store_true',
|
||||||
|
default=False)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@@ -100,34 +134,44 @@ def main():
|
|||||||
update_group_args['cxs'] = args.cxs
|
update_group_args['cxs'] = args.cxs
|
||||||
port_list = []
|
port_list = []
|
||||||
ip_list = []
|
ip_list = []
|
||||||
if args.first_port is not None and args.use_ports is not None:
|
if args.first_port and args.use_ports:
|
||||||
if args.first_port.startswith("sta"):
|
if args.first_port.startswith("sta"):
|
||||||
if (args.num_ports is not None) and (int(args.num_ports) > 0):
|
if args.num_ports and args.num_ports > 0:
|
||||||
start_num = int(args.first_port[3:])
|
start_num = int(args.first_port[3:])
|
||||||
num_ports = int(args.num_ports)
|
port_list = LFUtils.port_name_series(
|
||||||
port_list = LFUtils.port_name_series(prefix="sta", start_id=start_num, end_id=start_num + num_ports - 1,
|
prefix="sta",
|
||||||
padding_number=10000,
|
start_id=start_num,
|
||||||
radio=args.radio)
|
end_id=start_num + args.num_ports - 1,
|
||||||
|
padding_number=10000,
|
||||||
|
radio=args.radio)
|
||||||
print(1)
|
print(1)
|
||||||
else:
|
else:
|
||||||
if (args.num_ports is not None) and args.qvlan_parent is not None and (int(args.num_ports) > 0) \
|
if args.num_ports and args.qvlan_parent and (args.num_ports > 0) and args.qvlan_parent in args.first_port:
|
||||||
and args.qvlan_parent in args.first_port:
|
start_num = int(
|
||||||
start_num = int(args.first_port[args.first_port.index('#') + 1:])
|
args.first_port[args.first_port.index('#') + 1:])
|
||||||
num_ports = int(args.num_ports)
|
port_list = LFUtils.port_name_series(
|
||||||
port_list = LFUtils.port_name_series(prefix=args.qvlan_parent + "#", start_id=start_num,
|
prefix=str(
|
||||||
end_id=start_num + num_ports - 1, padding_number=10000,
|
args.qvlan_parent) + "#",
|
||||||
radio=args.radio)
|
start_id=start_num,
|
||||||
|
end_id=start_num + args.num_ports - 1,
|
||||||
|
padding_number=10000,
|
||||||
|
radio=args.radio)
|
||||||
print(2)
|
print(2)
|
||||||
else:
|
else:
|
||||||
raise ValueError("Invalid values for num_ports [%s], qvlan_parent [%s], and/or first_port [%s].\n"
|
raise ValueError(
|
||||||
"first_port must contain parent port and num_ports must be greater than 0"
|
"Invalid values for num_ports [%s], qvlan_parent [%s], and/or first_port [%s].\n"
|
||||||
% (args.num_ports, args.qvlan_parent, args.first_port))
|
"first_port must contain parent port and num_ports must be greater than 0" %
|
||||||
|
(args.num_ports, args.qvlan_parent, args.first_port))
|
||||||
else:
|
else:
|
||||||
if args.use_ports is None:
|
if not args.use_ports:
|
||||||
num_ports = int(args.num_ports)
|
num_ports = int(args.num_ports)
|
||||||
port_list = LFUtils.port_name_series(prefix=args.qvlan_parent + "#", start_id=1,
|
port_list = LFUtils.port_name_series(
|
||||||
end_id=num_ports, padding_number=10000,
|
prefix=str(
|
||||||
radio=args.radio)
|
args.qvlan_parent) + "#",
|
||||||
|
start_id=1,
|
||||||
|
end_id=num_ports,
|
||||||
|
padding_number=10000,
|
||||||
|
radio=args.radio)
|
||||||
print(3)
|
print(3)
|
||||||
else:
|
else:
|
||||||
temp_list = args.use_ports.split(',')
|
temp_list = args.use_ports.split(',')
|
||||||
@@ -139,7 +183,8 @@ def main():
|
|||||||
ip_list.append(0)
|
ip_list.append(0)
|
||||||
|
|
||||||
if len(port_list) != len(ip_list):
|
if len(port_list) != len(ip_list):
|
||||||
raise ValueError(temp_list, " ports must have matching ip addresses!")
|
raise ValueError(
|
||||||
|
temp_list, " ports must have matching ip addresses!")
|
||||||
|
|
||||||
print(port_list)
|
print(port_list)
|
||||||
print(ip_list)
|
print(ip_list)
|
||||||
@@ -155,7 +200,8 @@ def main():
|
|||||||
ip_list=ip_list,
|
ip_list=ip_list,
|
||||||
debug=args.debug)
|
debug=args.debug)
|
||||||
create_qvlan.build()
|
create_qvlan.build()
|
||||||
print('Created %s QVLAN stations' % num_ports)
|
print('Created %s QVLAN stations' % args.num_ports)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -74,23 +74,31 @@ class CreateStation(Realm):
|
|||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
# Build stations
|
# Build stations
|
||||||
self.station_profile.use_security(self.security, self.ssid, self.password)
|
self.station_profile.use_security(
|
||||||
|
self.security, self.ssid, self.password)
|
||||||
self.station_profile.set_number_template(self.number_template)
|
self.station_profile.set_number_template(self.number_template)
|
||||||
|
|
||||||
print("Creating stations")
|
print("Creating stations")
|
||||||
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
|
self.station_profile.set_command_flag(
|
||||||
self.station_profile.set_command_param("set_port", "report_timer", 1500)
|
"add_sta", "create_admin_down", 1)
|
||||||
|
self.station_profile.set_command_param(
|
||||||
|
"set_port", "report_timer", 1500)
|
||||||
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
|
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
|
||||||
if self.set_txo_data is not None:
|
if self.set_txo_data is not None:
|
||||||
self.station_profile.set_wifi_txo(txo_ena=self.set_txo_data["txo_enable"],
|
self.station_profile.set_wifi_txo(
|
||||||
tx_power=self.set_txo_data["txpower"],
|
txo_ena=self.set_txo_data["txo_enable"],
|
||||||
pream=self.set_txo_data["pream"],
|
tx_power=self.set_txo_data["txpower"],
|
||||||
mcs=self.set_txo_data["mcs"],
|
pream=self.set_txo_data["pream"],
|
||||||
nss=self.set_txo_data["nss"],
|
mcs=self.set_txo_data["mcs"],
|
||||||
bw=self.set_txo_data["bw"],
|
nss=self.set_txo_data["nss"],
|
||||||
retries=self.set_txo_data["retries"],
|
bw=self.set_txo_data["bw"],
|
||||||
sgi=self.set_txo_data["sgi"], )
|
retries=self.set_txo_data["retries"],
|
||||||
self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
|
sgi=self.set_txo_data["sgi"],
|
||||||
|
)
|
||||||
|
self.station_profile.create(
|
||||||
|
radio=self.radio,
|
||||||
|
sta_names_=self.sta_list,
|
||||||
|
debug=self.debug)
|
||||||
if self.up:
|
if self.up:
|
||||||
self.station_profile.admin_up()
|
self.station_profile.admin_up()
|
||||||
|
|
||||||
@@ -98,7 +106,7 @@ class CreateStation(Realm):
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = LFCliBase.create_basic_argparse( # see create_basic_argparse in ../py-json/LANforge/lfcli_base.py
|
parser = LFCliBase.create_basic_argparse( # see create_basic_argparse in ../py-json/LANforge/lfcli_base.py
|
||||||
prog='create_station.py',
|
prog='create_station.py',
|
||||||
formatter_class=argparse.RawTextHelpFormatter,
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
epilog='''\
|
epilog='''\
|
||||||
@@ -119,21 +127,32 @@ def main():
|
|||||||
--debug
|
--debug
|
||||||
''')
|
''')
|
||||||
required = parser.add_argument_group('required arguments')
|
required = parser.add_argument_group('required arguments')
|
||||||
required.add_argument('--start_id', help='--start_id <value> default 0', default=0)
|
required.add_argument(
|
||||||
|
'--start_id',
|
||||||
|
help='--start_id <value> default 0',
|
||||||
|
default=0)
|
||||||
|
|
||||||
optional = parser.add_argument_group('Optional arguments')
|
optional = parser.add_argument_group('Optional arguments')
|
||||||
optional.add_argument('--mode', help='Mode for your station (as a number)',default=0)
|
optional.add_argument(
|
||||||
optional.add_argument('--station_flag', help='station flags to add', required=False, default=None, action='append')
|
'--mode',
|
||||||
|
help='Mode for your station (as a number)',
|
||||||
|
default=0)
|
||||||
|
optional.add_argument(
|
||||||
|
'--station_flag',
|
||||||
|
help='station flags to add',
|
||||||
|
required=False,
|
||||||
|
default=None,
|
||||||
|
action='append')
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
# if args.debug:
|
# if args.debug:
|
||||||
# pprint.pprint(args)
|
# pprint.pprint(args)
|
||||||
# time.sleep(5)
|
# time.sleep(5)
|
||||||
if (args.radio is None):
|
if args.radio is None:
|
||||||
raise ValueError("--radio required")
|
raise ValueError("--radio required")
|
||||||
|
|
||||||
start_id = 0
|
start_id = 0
|
||||||
if (args.start_id != 0):
|
if args.start_id != 0:
|
||||||
start_id = int(args.start_id)
|
start_id = int(args.start_id)
|
||||||
|
|
||||||
num_sta = 2
|
num_sta = 2
|
||||||
@@ -148,16 +167,6 @@ def main():
|
|||||||
radio=args.radio)
|
radio=args.radio)
|
||||||
|
|
||||||
print("station_list {}".format(station_list))
|
print("station_list {}".format(station_list))
|
||||||
set_txo_data={
|
|
||||||
"txo_enable": 1,
|
|
||||||
"txpower": 255,
|
|
||||||
"pream": 0,
|
|
||||||
"mcs": 0,
|
|
||||||
"nss": 0,
|
|
||||||
"bw": 3,
|
|
||||||
"retries": 1,
|
|
||||||
"sgi": 0
|
|
||||||
}
|
|
||||||
|
|
||||||
create_station = CreateStation(_host=args.mgr,
|
create_station = CreateStation(_host=args.mgr,
|
||||||
_port=args.mgr_port,
|
_port=args.mgr_port,
|
||||||
@@ -175,5 +184,6 @@ def main():
|
|||||||
create_station.build()
|
create_station.build()
|
||||||
print('Created %s stations' % num_sta)
|
print('Created %s stations' % num_sta)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -62,7 +61,6 @@ class CreateStation(Realm):
|
|||||||
pprint.pprint(self.sta_list)
|
pprint.pprint(self.sta_list)
|
||||||
print("---- ~Station List ----- ----- ----- ----- ----- ----- \n")
|
print("---- ~Station List ----- ----- ----- ----- ----- ----- \n")
|
||||||
|
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
# Build stations
|
# Build stations
|
||||||
self.station_profile.use_security(self.security, self.ssid, self.password)
|
self.station_profile.use_security(self.security, self.ssid, self.password)
|
||||||
@@ -80,8 +78,6 @@ class CreateStation(Realm):
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
required=[]
|
|
||||||
required.append({'name':'--df','help':'Which file you want to build stations off of?'})
|
|
||||||
parser = LFCliBase.create_basic_argparse(
|
parser = LFCliBase.create_basic_argparse(
|
||||||
prog='create_station_from_df.py',
|
prog='create_station_from_df.py',
|
||||||
formatter_class=argparse.RawTextHelpFormatter,
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
@@ -99,29 +95,29 @@ def main():
|
|||||||
--ssid netgear
|
--ssid netgear
|
||||||
--passwd BLANK
|
--passwd BLANK
|
||||||
--debug
|
--debug
|
||||||
''',
|
''')
|
||||||
more_required=required)
|
required = parser.add_argument_group('required arguments')
|
||||||
|
required.add_argument('df', help='Which file do you want to build stations off of?', required=True)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
df=pd.read_csv(args.df)
|
df = pd.read_csv(args.df)
|
||||||
unique=df[['radio','ssid','passwd','security']].drop_duplicates().reset_index(drop=True)
|
unique = df[['radio', 'ssid', 'passwd', 'security']].drop_duplicates().reset_index(drop=True)
|
||||||
for item in unique.index:
|
for item in unique.index:
|
||||||
uniquedf=unique.iloc[item]
|
uniquedf = unique.iloc[item]
|
||||||
df1=df.merge(pd.DataFrame(uniquedf).transpose(),on=['radio','ssid','passwd','security'])
|
df1 = df.merge(pd.DataFrame(uniquedf).transpose(), on=['radio', 'ssid', 'passwd', 'security'])
|
||||||
try:
|
if uniquedf['radio']:
|
||||||
radio=uniquedf['radio']
|
radio = uniquedf['radio']
|
||||||
except:
|
else:
|
||||||
radio=args.radio
|
radio = args.radio
|
||||||
station_list=df1['station']
|
station_list = df1['station']
|
||||||
try:
|
if uniquedf['ssid']:
|
||||||
ssid=uniquedf['ssid']
|
ssid = uniquedf['ssid']
|
||||||
passwd=uniquedf['passwd']
|
passwd = uniquedf['passwd']
|
||||||
security=uniquedf['security']
|
security = uniquedf['security']
|
||||||
except:
|
else:
|
||||||
ssid=args.ssid
|
ssid = args.ssid
|
||||||
passwd=args.passwd
|
passwd = args.passwd
|
||||||
security=args.security
|
security = args.security
|
||||||
create_station = CreateStation(_host=args.mgr,
|
create_station = CreateStation(_host=args.mgr,
|
||||||
_port=args.mgr_port,
|
_port=args.mgr_port,
|
||||||
_ssid=ssid,
|
_ssid=ssid,
|
||||||
@@ -135,5 +131,6 @@ def main():
|
|||||||
create_station.build()
|
create_station.build()
|
||||||
print('Created %s stations' % len(unique.index))
|
print('Created %s stations' % len(unique.index))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -55,7 +55,10 @@ class CreateVAP(Realm):
|
|||||||
self.vap_list = _vap_list
|
self.vap_list = _vap_list
|
||||||
self.resource = _resource
|
self.resource = _resource
|
||||||
if _vap_flags is None:
|
if _vap_flags is None:
|
||||||
self.vap_flags = ["wpa2_enable", "80211u_enable", "create_admin_down"]
|
self.vap_flags = [
|
||||||
|
"wpa2_enable",
|
||||||
|
"80211u_enable",
|
||||||
|
"create_admin_down"]
|
||||||
else:
|
else:
|
||||||
self.vap_flags = _vap_flags
|
self.vap_flags = _vap_flags
|
||||||
self.mode = _mode
|
self.mode = _mode
|
||||||
@@ -74,8 +77,10 @@ class CreateVAP(Realm):
|
|||||||
self.vap_profile.ssid_pass = self.password
|
self.vap_profile.ssid_pass = self.password
|
||||||
self.vap_profile.dhcp = self.dhcp
|
self.vap_profile.dhcp = self.dhcp
|
||||||
self.vap_profile.mode = self.mode
|
self.vap_profile.mode = self.mode
|
||||||
self.vap_profile.desired_add_vap_flags = self.vap_flags + ["wpa2_enable", "80211u_enable", "create_admin_down"]
|
self.vap_profile.desired_add_vap_flags = self.vap_flags + \
|
||||||
self.vap_profile.desired_add_vap_flags_mask = self.vap_flags + ["wpa2_enable", "80211u_enable", "create_admin_down"]
|
["wpa2_enable", "80211u_enable", "create_admin_down"]
|
||||||
|
self.vap_profile.desired_add_vap_flags_mask = self.vap_flags + \
|
||||||
|
["wpa2_enable", "80211u_enable", "create_admin_down"]
|
||||||
if self.debug:
|
if self.debug:
|
||||||
print("----- VAP List ----- ----- ----- ----- ----- ----- \n")
|
print("----- VAP List ----- ----- ----- ----- ----- ----- \n")
|
||||||
pprint.pprint(self.vap_list)
|
pprint.pprint(self.vap_list)
|
||||||
@@ -83,19 +88,19 @@ class CreateVAP(Realm):
|
|||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
# Build VAPs
|
# Build VAPs
|
||||||
self.vap_profile.use_security(self.security, self.ssid, passwd=self.password)
|
self.vap_profile.use_security(
|
||||||
|
self.security, self.ssid, passwd=self.password)
|
||||||
|
|
||||||
print("Creating VAPs")
|
print("Creating VAPs")
|
||||||
self.vap_profile.create(resource = self.resource,
|
self.vap_profile.create(resource=self.resource,
|
||||||
radio = self.radio,
|
radio=self.radio,
|
||||||
channel = self.channel,
|
channel=self.channel,
|
||||||
country=self.country_code,
|
up_=True,
|
||||||
up_ = True,
|
debug=False,
|
||||||
debug = False,
|
|
||||||
use_ht40=True,
|
use_ht40=True,
|
||||||
use_ht80=True,
|
use_ht80=True,
|
||||||
use_ht160=False,
|
use_ht160=False,
|
||||||
suppress_related_commands_ = True,
|
suppress_related_commands_=True,
|
||||||
use_radius=False,
|
use_radius=False,
|
||||||
hs20_enable=False,
|
hs20_enable=False,
|
||||||
bridge=self.bridge)
|
bridge=self.bridge)
|
||||||
@@ -125,57 +130,73 @@ Command example:
|
|||||||
''')
|
''')
|
||||||
|
|
||||||
optional = parser.add_argument_group('optional arguments')
|
optional = parser.add_argument_group('optional arguments')
|
||||||
optional.add_argument('--num_vaps', help='Number of VAPs to Create', required=False, default=1)
|
optional.add_argument(
|
||||||
optional.add_argument('--vap_flag', help='VAP flags to add', required=False, default=None, action='append')
|
'--num_vaps',
|
||||||
optional.add_argument('--bridge', help='Create a bridge connecting the VAP to a port', required=False, default=False)
|
help='Number of VAPs to Create',
|
||||||
optional.add_argument('--mac', help='Custom mac address', default="xx:xx:xx:xx:*:xx")
|
required=False,
|
||||||
|
default=1)
|
||||||
|
optional.add_argument(
|
||||||
|
'--vap_flag',
|
||||||
|
help='VAP flags to add',
|
||||||
|
required=False,
|
||||||
|
default=None,
|
||||||
|
action='append')
|
||||||
|
optional.add_argument(
|
||||||
|
'--bridge',
|
||||||
|
help='Create a bridge connecting the VAP to a port',
|
||||||
|
required=False,
|
||||||
|
default=False)
|
||||||
|
optional.add_argument(
|
||||||
|
'--mac',
|
||||||
|
help='Custom mac address',
|
||||||
|
default="xx:xx:xx:xx:*:xx")
|
||||||
optional.add_argument('--mode', default='AUTO')
|
optional.add_argument('--mode', default='AUTO')
|
||||||
optional.add_argument('--channel', default=36)
|
optional.add_argument('--channel', default=36)
|
||||||
optional.add_argument('--country_code', default=0)
|
optional.add_argument('--country_code', default=0)
|
||||||
optional.add_argument('--nss', default=False)
|
optional.add_argument('--nss', default=False)
|
||||||
optional.add_argument('--resource', default=1)
|
optional.add_argument('--resource', default=1)
|
||||||
optional.add_argument('--start_id', default=0)
|
optional.add_argument('--start_id', default=0)
|
||||||
optional.add_argument('--vap_name',default=None)
|
optional.add_argument('--vap_name', default=None)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
#if args.debug:
|
# if args.debug:
|
||||||
# pprint.pprint(args)
|
# pprint.pprint(args)
|
||||||
# time.sleep(5)
|
# time.sleep(5)
|
||||||
if (args.radio is None):
|
if args.radio is None:
|
||||||
raise ValueError("--radio required")
|
raise ValueError("--radio required")
|
||||||
|
|
||||||
num_vap = int(args.num_vaps)
|
num_vap = int(args.num_vaps)
|
||||||
|
|
||||||
vap_list = LFUtils.port_name_series(prefix="vap",
|
vap_list = LFUtils.port_name_series(prefix="vap",
|
||||||
start_id=int(args.start_id),
|
start_id=int(args.start_id),
|
||||||
end_id=num_vap-1,
|
end_id=num_vap - 1,
|
||||||
padding_number=10000,
|
padding_number=10000,
|
||||||
radio=args.radio)
|
radio=args.radio)
|
||||||
#print(args.passwd)
|
# print(args.passwd)
|
||||||
#print(args.ssid)
|
# print(args.ssid)
|
||||||
|
|
||||||
if args.vap_name is None:
|
if args.vap_name is None:
|
||||||
for vap in vap_list:
|
for vap in vap_list:
|
||||||
create_vap = CreateVAP(_host=args.mgr,
|
create_vap = CreateVAP(_host=args.mgr,
|
||||||
_port=args.mgr_port,
|
_port=args.mgr_port,
|
||||||
_ssid=args.ssid,
|
_ssid=args.ssid,
|
||||||
_password=args.passwd,
|
_password=args.passwd,
|
||||||
_security=args.security,
|
_security=args.security,
|
||||||
_mode=args.mode,
|
_mode=args.mode,
|
||||||
_vap_list=vap,
|
_vap_list=vap,
|
||||||
_resource=args.resource,
|
_resource=args.resource,
|
||||||
_vap_flags=args.vap_flag,
|
_vap_flags=args.vap_flag,
|
||||||
_radio=args.radio,
|
_radio=args.radio,
|
||||||
_channel=args.channel,
|
_channel=args.channel,
|
||||||
_country_code=args.country_code,
|
_country_code=args.country_code,
|
||||||
_nss=args.nss,
|
_nss=args.nss,
|
||||||
_proxy_str=args.proxy,
|
_proxy_str=args.proxy,
|
||||||
_bridge=args.bridge,
|
_bridge=args.bridge,
|
||||||
_debug_on=args.debug)
|
_debug_on=args.debug)
|
||||||
print('Creating VAP')
|
print('Creating VAP')
|
||||||
|
|
||||||
create_vap.build()
|
create_vap.build()
|
||||||
else:
|
else:
|
||||||
vap_name = "vap"+args.vap_name
|
vap_name = "vap" + args.vap_name
|
||||||
create_vap = CreateVAP(_host=args.mgr,
|
create_vap = CreateVAP(_host=args.mgr,
|
||||||
_port=args.mgr_port,
|
_port=args.mgr_port,
|
||||||
_ssid=args.ssid,
|
_ssid=args.ssid,
|
||||||
@@ -196,5 +217,6 @@ Command example:
|
|||||||
|
|
||||||
create_vap.build()
|
create_vap.build()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -50,7 +50,8 @@ class CreateVR(Realm):
|
|||||||
self.vr_profile = self.new_vr_profile()
|
self.vr_profile = self.new_vr_profile()
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
if (self.vr_name is None) or (self.vr_profile.vr_eid is None) and (self.vr_profile.vr_eid) == "":
|
if (self.vr_name is None) or (self.vr_profile.vr_eid is None) and (
|
||||||
|
self.vr_profile.vr_eid) == "":
|
||||||
print("No vr_eid to clean")
|
print("No vr_eid to clean")
|
||||||
return
|
return
|
||||||
self.rm_port("1.1.rd90a", debug_=self.debug)
|
self.rm_port("1.1.rd90a", debug_=self.debug)
|
||||||
@@ -59,13 +60,13 @@ class CreateVR(Realm):
|
|||||||
debug_=self.debug)
|
debug_=self.debug)
|
||||||
|
|
||||||
if (self.vr_profile.vr_eid is not None) \
|
if (self.vr_profile.vr_eid is not None) \
|
||||||
and (self.vr_profile.vr_eid[1] is not None) \
|
and (self.vr_profile.vr_eid[1] is not None) \
|
||||||
and (self.vr_profile.vr_eid[2] is not None):
|
and (self.vr_profile.vr_eid[2] is not None):
|
||||||
self.vr_profile.cleanup(debug=self.debug)
|
self.vr_profile.cleanup(debug=self.debug)
|
||||||
|
|
||||||
if (self.vr_name is not None) \
|
if (self.vr_name is not None) \
|
||||||
and (self.vr_name[1] is not None) \
|
and (self.vr_name[1] is not None) \
|
||||||
and (self.vr_name[2] is not None):
|
and (self.vr_name[2] is not None):
|
||||||
data = {
|
data = {
|
||||||
"shelf": 1,
|
"shelf": 1,
|
||||||
"resource": self.vr_name[1],
|
"resource": self.vr_name[1],
|
||||||
@@ -84,9 +85,9 @@ class CreateVR(Realm):
|
|||||||
"cx_name": "all"
|
"cx_name": "all"
|
||||||
}, debug_=self.debug)
|
}, debug_=self.debug)
|
||||||
|
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
self.vr_profile.apply_netsmith(self.vr_name[1], delay=5, debug=self.debug)
|
self.vr_profile.apply_netsmith(
|
||||||
|
self.vr_name[1], delay=5, debug=self.debug)
|
||||||
self.json_post("/cli-json/add_rdd", {
|
self.json_post("/cli-json/add_rdd", {
|
||||||
"shelf": 1,
|
"shelf": 1,
|
||||||
"resource": self.vr_name[1],
|
"resource": self.vr_name[1],
|
||||||
@@ -101,10 +102,17 @@ class CreateVR(Realm):
|
|||||||
"peer_ifname": "rd90a",
|
"peer_ifname": "rd90a",
|
||||||
"report_timer": "3000"
|
"report_timer": "3000"
|
||||||
})
|
})
|
||||||
self.wait_until_ports_appear(sta_list=["1.1.rd90a", "1.1.rd90b"], debug_=self.debug)
|
self.wait_until_ports_appear(
|
||||||
self.vr_profile.vrcx_list(resource=self.vr_name[1], do_sync=True) # do_sync
|
sta_list=[
|
||||||
|
"1.1.rd90a",
|
||||||
|
"1.1.rd90b"],
|
||||||
|
debug_=self.debug)
|
||||||
|
self.vr_profile.vrcx_list(
|
||||||
|
resource=self.vr_name[1],
|
||||||
|
do_sync=True) # do_sync
|
||||||
self.vr_profile.create(vr_name=self.vr_name, debug=self.debug)
|
self.vr_profile.create(vr_name=self.vr_name, debug=self.debug)
|
||||||
self.vr_profile.sync_netsmith(resource=self.vr_name[1], debug=self.debug)
|
self.vr_profile.sync_netsmith(
|
||||||
|
resource=self.vr_name[1], debug=self.debug)
|
||||||
self._pass("created router")
|
self._pass("created router")
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
@@ -113,25 +121,39 @@ class CreateVR(Realm):
|
|||||||
:return: void
|
:return: void
|
||||||
"""
|
"""
|
||||||
# move rd90a into router
|
# move rd90a into router
|
||||||
self.vr_profile.refresh_netsmith(resource=self.vr_name[1], debug=self.debug)
|
self.vr_profile.refresh_netsmith(
|
||||||
|
resource=self.vr_name[1], debug=self.debug)
|
||||||
if self.debug:
|
if self.debug:
|
||||||
pprint(("vr_eid", self.vr_name))
|
pprint(("vr_eid", self.vr_name))
|
||||||
self.vr_profile.wait_until_vrcx_appear(resource=self.vr_name[1], name_list=["rd90a", "rd90b"])
|
self.vr_profile.wait_until_vrcx_appear(
|
||||||
self.vr_profile.add_vrcx(vr_eid=self.vr_name, connection_name_list="rd90a", debug=True)
|
resource=self.vr_name[1], name_list=[
|
||||||
|
"rd90a", "rd90b"])
|
||||||
|
self.vr_profile.add_vrcx(
|
||||||
|
vr_eid=self.vr_name,
|
||||||
|
connection_name_list="rd90a",
|
||||||
|
debug=True)
|
||||||
|
|
||||||
self.vr_profile.refresh_netsmith(resource=self.vr_name[1], debug=self.debug)
|
self.vr_profile.refresh_netsmith(
|
||||||
|
resource=self.vr_name[1], debug=self.debug)
|
||||||
# test to make sure that vrcx is inside vr we expect
|
# test to make sure that vrcx is inside vr we expect
|
||||||
self.vr_profile.vrcx_list(resource=self.vr_name[1], do_sync=True)
|
self.vr_profile.vrcx_list(resource=self.vr_name[1], do_sync=True)
|
||||||
vr_list = self.vr_profile.router_list(resource=self.vr_name[1], do_refresh=True)
|
vr_list = self.vr_profile.router_list(
|
||||||
router = self.vr_profile.find_cached_router(resource=self.vr_name[1], router_name=self.vr_name[2])
|
resource=self.vr_name[1], do_refresh=True)
|
||||||
|
router = self.vr_profile.find_cached_router(
|
||||||
|
resource=self.vr_name[1], router_name=self.vr_name[2])
|
||||||
pprint(("cached router 120: ", router))
|
pprint(("cached router 120: ", router))
|
||||||
router_eid = LFUtils.name_to_eid(router["eid"])
|
router_eid = LFUtils.name_to_eid(router["eid"])
|
||||||
pprint(("router eid 122: ", router_eid))
|
pprint(("router eid 122: ", router_eid))
|
||||||
full_router = self.json_get("/vr/1/%s/%s/%s" %(router_eid[0], router_eid[1], self.vr_name[2]), debug_=True)
|
full_router = self.json_get(
|
||||||
|
"/vr/1/%s/%s/%s" %
|
||||||
|
(router_eid[0],
|
||||||
|
router_eid[1],
|
||||||
|
self.vr_name[2]),
|
||||||
|
debug_=True)
|
||||||
pprint(("full router: ", full_router))
|
pprint(("full router: ", full_router))
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
if router is None:
|
if router is None:
|
||||||
self._fail("Unable to find router after vrcx move "+self.vr_name)
|
self._fail("Unable to find router after vrcx move " + self.vr_name)
|
||||||
self.exit_fail()
|
self.exit_fail()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
@@ -146,8 +168,8 @@ def main():
|
|||||||
--------------------
|
--------------------
|
||||||
Command example:
|
Command example:
|
||||||
{f} --vr_name 1.vr0 --ports 1.br0,1.rdd0a --services 1.br0=dhcp,nat --services 1.vr0=radvd
|
{f} --vr_name 1.vr0 --ports 1.br0,1.rdd0a --services 1.br0=dhcp,nat --services 1.vr0=radvd
|
||||||
{f} --vr_name 2.vr0 --ports 2.br0,2.vap2 --services
|
{f} --vr_name 2.vr0 --ports 2.br0,2.vap2 --services
|
||||||
|
|
||||||
--debug
|
--debug
|
||||||
""".format(f=__file__))
|
""".format(f=__file__))
|
||||||
required = parser.add_argument_group('required arguments')
|
required = parser.add_argument_group('required arguments')
|
||||||
@@ -156,8 +178,11 @@ Command example:
|
|||||||
|
|
||||||
optional = parser.add_argument_group('optional arguments')
|
optional = parser.add_argument_group('optional arguments')
|
||||||
|
|
||||||
optional.add_argument('--ports', default=None, required=False,
|
optional.add_argument(
|
||||||
help='Comma separated list of ports to add to virtual router')
|
'--ports',
|
||||||
|
default=None,
|
||||||
|
required=False,
|
||||||
|
help='Comma separated list of ports to add to virtual router')
|
||||||
optional.add_argument('--services', default=None, required=False,
|
optional.add_argument('--services', default=None, required=False,
|
||||||
help='Add router services to a port, "br0=nat,dhcp"')
|
help='Add router services to a port, "br0=nat,dhcp"')
|
||||||
|
|
||||||
@@ -175,10 +200,11 @@ Command example:
|
|||||||
create_vr.build()
|
create_vr.build()
|
||||||
create_vr.start()
|
create_vr.start()
|
||||||
create_vr.monitor()
|
create_vr.monitor()
|
||||||
#create_vr.stop()
|
# create_vr.stop()
|
||||||
#create_vr.clean()
|
# create_vr.clean()
|
||||||
print('Created Virtual Router')
|
print('Created Virtual Router')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|||||||
@@ -1,314 +0,0 @@
|
|||||||
#!/usr/bin/python3
|
|
||||||
"""
|
|
||||||
|
|
||||||
Create and modify WAN Links Using LANforge JSON AP : http://www.candelatech.com/cookbook.php?vol=cli&book=JSON:+Managing+WANlinks+using+JSON+and+Python
|
|
||||||
Written by Candela Technologies Inc.
|
|
||||||
Updated by: Erin Grimes
|
|
||||||
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
|
|
||||||
if sys.version_info[0] != 3:
|
|
||||||
print("This script requires Python 3")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
from time import sleep
|
|
||||||
import urllib
|
|
||||||
import pprint
|
|
||||||
|
|
||||||
sys.path.append("../py-json")
|
|
||||||
from LANforge import LFRequest
|
|
||||||
from LANforge import LFUtils
|
|
||||||
from LANforge.lfcli_base import LFCliBase
|
|
||||||
|
|
||||||
j_printer = pprint.PrettyPrinter(indent=2)
|
|
||||||
# todo: this needs to change
|
|
||||||
resource_id = 1
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = LFCliBase.create_basic_argparse()
|
|
||||||
args = parser.parse_args()
|
|
||||||
base_url = 'http://%s:%s' % (args.mgr, args.mgr_port)
|
|
||||||
print(base_url)
|
|
||||||
json_post = ""
|
|
||||||
json_response = ""
|
|
||||||
num_wanlinks = -1
|
|
||||||
|
|
||||||
# force a refresh on the ports and wanlinks
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/nc_show_ports", debug_=True)
|
|
||||||
lf_r.addPostData({
|
|
||||||
"shelf": 1,
|
|
||||||
"resource": 1,
|
|
||||||
"port": "all",
|
|
||||||
})
|
|
||||||
json_response = lf_r.jsonPost(debug=True)
|
|
||||||
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/nc_show_endpoints", debug_=True)
|
|
||||||
lf_r.addPostData({
|
|
||||||
"endpoint": "all"
|
|
||||||
})
|
|
||||||
json_response = lf_r.jsonPost(debug=True)
|
|
||||||
|
|
||||||
sleep(1)
|
|
||||||
|
|
||||||
# see if there are old wanlinks to remove
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/wl_ep/list", debug_=True)
|
|
||||||
json_reponse = lf_r.get_as_json()
|
|
||||||
|
|
||||||
endpA = args['name']+"-A"
|
|
||||||
endpB = args['name']+"-B"
|
|
||||||
|
|
||||||
# count the number of wanlink endpoints
|
|
||||||
if "endpoint" in json_response:
|
|
||||||
endpoint_map = LFUtils.list_to_alias_map(json_list=json_reponse, from_element="endpoint")
|
|
||||||
if endpA in endpoint_map:
|
|
||||||
num_wanlinks += 1
|
|
||||||
if endpB in endpoint_map:
|
|
||||||
num_wanlinks += 1
|
|
||||||
|
|
||||||
# remove old wanlinks
|
|
||||||
if (num_wanlinks > 0):
|
|
||||||
print("Removing old wanlinks...")
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_cx", debug_=True)
|
|
||||||
lf_r.addPostData({
|
|
||||||
'test_mgr': 'all',
|
|
||||||
'cx_name': args['name']
|
|
||||||
})
|
|
||||||
lf_r.jsonPost()
|
|
||||||
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp", debug_=True)
|
|
||||||
lf_r.addPostData({
|
|
||||||
'endp_name': endpA
|
|
||||||
})
|
|
||||||
lf_r.jsonPost()
|
|
||||||
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/rm_endp", debug_=True)
|
|
||||||
lf_r.addPostData({
|
|
||||||
'endp_name': endpB
|
|
||||||
})
|
|
||||||
lf_r.jsonPost()
|
|
||||||
sleep(1)
|
|
||||||
|
|
||||||
# check to see if we have ports
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/ports/1/1/list", debug_=True)
|
|
||||||
port_response = lf_r.getAsJson()
|
|
||||||
|
|
||||||
if "interfaces" not in port_response:
|
|
||||||
print("No interfaces in port_response!")
|
|
||||||
pprint.pprint(port_response)
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
if "interfaces" in port_response:
|
|
||||||
port_map = LFUtils.list_to_alias_map(json_list=port_response, from_element="interfaces")
|
|
||||||
ports_created = 0
|
|
||||||
if args["port_A"] not in port_map:
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_rdd", debug_=True)
|
|
||||||
lf_r.addPostData({
|
|
||||||
"shelf": 1,
|
|
||||||
"resource": 1,
|
|
||||||
"port": args["port_A"],
|
|
||||||
"peer_ifname": args["port_A"]+"b",
|
|
||||||
})
|
|
||||||
json_reponse = lf_r.jsonPost(debug=True)
|
|
||||||
if not json_response:
|
|
||||||
print("could not create port "+args["port_A"])
|
|
||||||
exit(1)
|
|
||||||
sleep(0.1)
|
|
||||||
ports_created += 1
|
|
||||||
if args["port_B"] not in port_map:
|
|
||||||
lf_r.addPostData({
|
|
||||||
"shelf": 1,
|
|
||||||
"resource": 1,
|
|
||||||
"port": args["port_B"],
|
|
||||||
"peer_ifname": args["port_B"]+"b",
|
|
||||||
})
|
|
||||||
json_reponse = lf_r.jsonPost(debug=True)
|
|
||||||
if not json_response:
|
|
||||||
print("could not create port " + args["port_B"])
|
|
||||||
exit(1)
|
|
||||||
ports_created += 1
|
|
||||||
sleep(0.1)
|
|
||||||
if ports_created > 0:
|
|
||||||
LFUtils.wait_until_ports_appear(base_url=base_url,
|
|
||||||
port_list=(args["port_A"], args["port_B"]),
|
|
||||||
debug=True)
|
|
||||||
print("Created {} ports".format(ports_created))
|
|
||||||
|
|
||||||
# create wanlink endpoint A
|
|
||||||
print("Adding WL Endpoints...", end='')
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp", debug_=True)
|
|
||||||
lf_r.addPostData({
|
|
||||||
'alias': endpA,
|
|
||||||
'shelf': 1,
|
|
||||||
'resource': '1',
|
|
||||||
'port': args['port_A'],
|
|
||||||
'latency': args['latency_A'],
|
|
||||||
'max_rate': args['rate_A'],
|
|
||||||
})
|
|
||||||
json_response = lf_r.jsonPost(debug=True)
|
|
||||||
if not json_response:
|
|
||||||
print("Unable to create "+endpA)
|
|
||||||
else:
|
|
||||||
print("A, ", end='')
|
|
||||||
# create wanlink endpoint B
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_wl_endp", debug_=True)
|
|
||||||
lf_r.addPostData({
|
|
||||||
'alias': endpB,
|
|
||||||
'shelf': 1,
|
|
||||||
'resource': '1',
|
|
||||||
'port': args['port_B'],
|
|
||||||
'latency': args['latency_B'],
|
|
||||||
'max_rate': args['rate_B'],
|
|
||||||
})
|
|
||||||
json_response = lf_r.jsonPost()
|
|
||||||
if not json_response:
|
|
||||||
print("Unable to create "+endpB)
|
|
||||||
else:
|
|
||||||
print("B")
|
|
||||||
sleep(1)
|
|
||||||
|
|
||||||
# create cx
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/add_cx", debug_=True)
|
|
||||||
lf_r.addPostData({
|
|
||||||
'alias': args['name'],
|
|
||||||
'test_mgr': 'default_tm',
|
|
||||||
'tx_endp': endpA,
|
|
||||||
'rx_endp': endpB
|
|
||||||
})
|
|
||||||
lf_r.jsonPost(debug=True)
|
|
||||||
sleep(0.5)
|
|
||||||
|
|
||||||
# modify wanlink endpoint A
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_wanlink_info", debug_=True)
|
|
||||||
lf_r.addPostData({
|
|
||||||
'name': endpA,
|
|
||||||
'max_jitter': args['jitter_A'],
|
|
||||||
'jitter_freq': args['jitter_freq_A'],
|
|
||||||
'drop_freq': args['drop_A']
|
|
||||||
})
|
|
||||||
lf_r.jsonPost(debug=True)
|
|
||||||
|
|
||||||
# modify wanlink endpoint B
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_wanlink_info", debug_=True)
|
|
||||||
lf_r.addPostData({
|
|
||||||
'name': endpB,
|
|
||||||
'max_jitter': args['jitter_B'],
|
|
||||||
'jitter_freq': args['jitter_freq_B'],
|
|
||||||
'drop_freq': args['drop_B']
|
|
||||||
})
|
|
||||||
lf_r.jsonPost()
|
|
||||||
|
|
||||||
# start wanlink once we see it
|
|
||||||
seen = 0
|
|
||||||
print("Looking for {} and {}: ".format(endpA, endpB), end='')
|
|
||||||
while (seen < 2):
|
|
||||||
sleep(1)
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/wl_ep/list?fields=name,eid")
|
|
||||||
try:
|
|
||||||
json_response = lf_r.getAsJson()
|
|
||||||
if json_response is None:
|
|
||||||
print(".", end="")
|
|
||||||
continue
|
|
||||||
LFUtils.debug_printer.pprint(json_response)
|
|
||||||
if "endpoint" not in json_response:
|
|
||||||
print("-", end="")
|
|
||||||
continue
|
|
||||||
|
|
||||||
endpoint_map = LFUtils.list_to_alias_map(json_list=json_response["endpoint"],
|
|
||||||
from_element="endpoint")
|
|
||||||
if endpA in endpoint_map:
|
|
||||||
seen += 1
|
|
||||||
print("+", end="")
|
|
||||||
if endpB in endpoint_map:
|
|
||||||
seen += 1
|
|
||||||
print("+", end="")
|
|
||||||
|
|
||||||
except urllib.error.HTTPError as error:
|
|
||||||
print("Error code {}".format(error.code))
|
|
||||||
continue
|
|
||||||
print("")
|
|
||||||
print("Starting wanlink:")
|
|
||||||
# print("the latency is {laten}".format(laten=latency))
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
|
|
||||||
lf_r.addPostData({
|
|
||||||
'test_mgr': 'all',
|
|
||||||
'cx_name': args['name'],
|
|
||||||
'cx_state': 'RUNNING'
|
|
||||||
})
|
|
||||||
lf_r.jsonPost()
|
|
||||||
|
|
||||||
running = 0
|
|
||||||
while (running < 1):
|
|
||||||
sleep(1)
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,state,_links")
|
|
||||||
try:
|
|
||||||
json_response = lf_r.getAsJson()
|
|
||||||
if (json_response is None):
|
|
||||||
continue
|
|
||||||
for key, value in json_response.items():
|
|
||||||
if (isinstance(value, dict)):
|
|
||||||
if ("_links" in value):
|
|
||||||
if (value["name"] == args['name']):
|
|
||||||
if (value["state"].startswith("Run")):
|
|
||||||
LFUtils.debug_printer.pprint(json_response)
|
|
||||||
running = 1
|
|
||||||
|
|
||||||
except urllib.error.HTTPError as error:
|
|
||||||
print("Error code {}".format(error.code))
|
|
||||||
continue
|
|
||||||
|
|
||||||
print("Wanlink is running")
|
|
||||||
|
|
||||||
# stop wanlink
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/cli-json/set_cx_state")
|
|
||||||
lf_r.addPostData({
|
|
||||||
'test_mgr': 'all',
|
|
||||||
'cx_name': args['name'],
|
|
||||||
'cx_state': 'STOPPED'
|
|
||||||
})
|
|
||||||
lf_r.jsonPost()
|
|
||||||
running = 1
|
|
||||||
while (running > 0):
|
|
||||||
sleep(1)
|
|
||||||
lf_r = LFRequest.LFRequest(base_url+"/wl/"+args['name']+"?fields=name,eid,state,_links")
|
|
||||||
LFUtils.debug_printer.pprint(json_response)
|
|
||||||
try:
|
|
||||||
json_response = lf_r.getAsJson()
|
|
||||||
if (json_response is None):
|
|
||||||
continue
|
|
||||||
for key, value in json_response.items():
|
|
||||||
if (isinstance(value, dict)):
|
|
||||||
if ("_links" in value):
|
|
||||||
if (value["name"] == args['name']):
|
|
||||||
if (value["state"].startswith("Stop")):
|
|
||||||
LFUtils.debug_printer.pprint(json_response)
|
|
||||||
running = 0
|
|
||||||
|
|
||||||
except urllib.error.HTTPError as error:
|
|
||||||
print("Error code {}".format(error.code))
|
|
||||||
continue
|
|
||||||
|
|
||||||
print("Wanlink is stopped.")
|
|
||||||
|
|
||||||
# print("Wanlink info:")
|
|
||||||
# lf_r = LFRequest.LFRequest(base_url+"/wl/wl_eg1")
|
|
||||||
# json_response = lf_r.getAsJson()
|
|
||||||
# LFUtils.debug_printer.pprint(json_response)
|
|
||||||
|
|
||||||
# lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-A")
|
|
||||||
# json_response = lf_r.getAsJson()
|
|
||||||
# LFUtils.debug_printer.pprint(json_response)
|
|
||||||
|
|
||||||
# lf_r = LFRequest.LFRequest(base_url+"/wl_ep/wl_eg1-B")
|
|
||||||
# json_response = lf_r.getAsJson()
|
|
||||||
# LFUtils.debug_printer.pprint(json_response)
|
|
||||||
|
|
||||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
||||||
@@ -23,14 +23,12 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
|
|
||||||
class CSVParcer():
|
class CSVParcer:
|
||||||
def __init__(self,csv_infile=None,csv_outfile=None):
|
def __init__(self, csv_infile=None, csv_outfile=None):
|
||||||
|
|
||||||
idx = 0
|
|
||||||
i_atten = -1
|
i_atten = -1
|
||||||
i_rotation = -1
|
i_rotation = -1
|
||||||
i_rxbps = -1
|
i_rxbps = -1
|
||||||
@@ -46,34 +44,33 @@ class CSVParcer():
|
|||||||
x = line.split(",")
|
x = line.split(",")
|
||||||
cni = 0
|
cni = 0
|
||||||
for cn in x:
|
for cn in x:
|
||||||
if (cn == "Attenuation [dB]"):
|
if cn == "Attenuation [dB]":
|
||||||
i_atten = cni
|
i_atten = cni
|
||||||
if (cn == "Position [Deg]"):
|
if cn == "Position [Deg]":
|
||||||
i_rotation = cni
|
i_rotation = cni
|
||||||
if (cn == "Throughput [Mbps]"):
|
if cn == "Throughput [Mbps]":
|
||||||
i_rxbps = cni
|
i_rxbps = cni
|
||||||
if (cn == "Beacon RSSI [dBm]"):
|
if cn == "Beacon RSSI [dBm]":
|
||||||
i_beacon_rssi = cni
|
i_beacon_rssi = cni
|
||||||
if (cn == "Data RSSI [dBm]"):
|
if cn == "Data RSSI [dBm]":
|
||||||
i_data_rssi = cni
|
i_data_rssi = cni
|
||||||
cni += 1
|
cni += 1
|
||||||
|
|
||||||
# Write out out header for the new file.
|
# Write out out header for the new file.
|
||||||
fpo.write("Test Run,Position [Deg],Attenuation 1 [dB],Pal Stats Endpoint 1 Control Rssi [dBm],Pal Stats Endpoint 1 Data Rssi [dBm]\n")
|
fpo.write(
|
||||||
|
"Test Run,Position [Deg],Attenuation 1 [dB],Pal Stats Endpoint 1 Control Rssi [dBm],Pal Stats Endpoint 1 Data Rssi [dBm]\n")
|
||||||
|
|
||||||
# Read rest of the input lines, processing one at a time. Covert the columns as
|
# Read rest of the input lines, processing one at a time. Covert the columns as
|
||||||
# needed, and write out new data to the output file.
|
# needed, and write out new data to the output file.
|
||||||
line = fp.readline()
|
line = fp.readline()
|
||||||
|
|
||||||
bottom_half="Step Index,Position [Deg],Attenuation [dB],Traffic Pair 1 Throughput [Mbps]\n"
|
bottom_half = "Step Index,Position [Deg],Attenuation [dB],Traffic Pair 1 Throughput [Mbps]\n"
|
||||||
|
|
||||||
test_run="1"
|
test_run = "1"
|
||||||
|
|
||||||
step_i = 0
|
step_i = 0
|
||||||
while line:
|
while line:
|
||||||
x = line.split(",")
|
x = line.split(",")
|
||||||
#print(x)
|
|
||||||
#print([test_run, x[i_rotation], x[i_atten], x[i_beacon_rssi], x[i_data_rssi]])
|
|
||||||
fpo.write("%s,%s,%s,%s,%s" % (test_run, x[i_rotation], x[i_atten], x[i_beacon_rssi], x[i_data_rssi]))
|
fpo.write("%s,%s,%s,%s,%s" % (test_run, x[i_rotation], x[i_atten], x[i_beacon_rssi], x[i_data_rssi]))
|
||||||
bottom_half += ("%s,%s,%s,%s\n" % (step_i, x[i_rotation], x[i_atten], x[i_rxbps]))
|
bottom_half += ("%s,%s,%s,%s\n" % (step_i, x[i_rotation], x[i_atten], x[i_rxbps]))
|
||||||
line = fp.readline()
|
line = fp.readline()
|
||||||
@@ -83,37 +80,36 @@ class CSVParcer():
|
|||||||
fpo.write("\n\n# RvRvO Data\n\n")
|
fpo.write("\n\n# RvRvO Data\n\n")
|
||||||
fpo.write(bottom_half)
|
fpo.write(bottom_half)
|
||||||
|
|
||||||
def main():
|
|
||||||
|
|
||||||
#debug_on = False
|
def main():
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog='csv_convert.py',
|
prog='csv_convert.py',
|
||||||
formatter_class=argparse.RawTextHelpFormatter,
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
epilog='''\
|
epilog='''\
|
||||||
Useful Information:
|
Useful Information:
|
||||||
''',
|
''',
|
||||||
|
|
||||||
description='''
|
description='''
|
||||||
csv_convert.py:
|
csv_convert.py:
|
||||||
converts the candela brief csv into the data for specific customer,
|
converts the candela brief csv into the data for specific customer,
|
||||||
''')
|
''')
|
||||||
|
|
||||||
# for testing parser.add_argument('-i','--infile', help="input file of csv data", default='text-csv-0-candela.csv')
|
parser.add_argument('-i', '--infile', help="input file of csv data", required=True)
|
||||||
parser.add_argument('-i','--infile', help="input file of csv data", required=True)
|
parser.add_argument('-o', '--outfile', help="output file in .csv format", default='outfile.csv')
|
||||||
parser.add_argument('-o','--outfile', help="output file in .csv format", default='outfile.csv')
|
|
||||||
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
csv_outfile_name = None
|
csv_outfile_name = None
|
||||||
|
csv_infile_name = None
|
||||||
|
|
||||||
if args.infile:
|
if args.infile:
|
||||||
csv_infile_name = args.infile
|
csv_infile_name = args.infile
|
||||||
if args.outfile:
|
if args.outfile:
|
||||||
csv_outfile_name = args.outfile
|
csv_outfile_name = args.outfile
|
||||||
|
|
||||||
print("infile: %s outfile: %s"%(csv_infile_name, csv_outfile_name))
|
print("infile: %s outfile: %s" % (csv_infile_name, csv_outfile_name))
|
||||||
|
|
||||||
CSVParcer(csv_infile_name, csv_outfile_name)
|
CSVParcer(csv_infile_name, csv_outfile_name)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
0
py-scripts/cv_examples/ap_auto_example.bash
Normal file
0
py-scripts/cv_examples/ap_auto_example.bash
Normal file
@@ -8,7 +8,6 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
|
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
|
||||||
@@ -29,6 +28,7 @@ class CVManager(cv_test):
|
|||||||
self.apply_cv_scenario(self.scenario)
|
self.apply_cv_scenario(self.scenario)
|
||||||
self.build_cv_scenario()
|
self.build_cv_scenario()
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog='cv_manager.py',
|
prog='cv_manager.py',
|
||||||
@@ -45,5 +45,6 @@ def main():
|
|||||||
lfclient_host=args.mgr)
|
lfclient_host=args.mgr)
|
||||||
manager.apply_and_build_scenario()
|
manager.apply_and_build_scenario()
|
||||||
|
|
||||||
if __name__ =="__main__":
|
|
||||||
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
'''
|
"""
|
||||||
This script loads and builds a Chamber View Scenario, runs WiFi Capacity Test, runs Dataplane Test,
|
This script loads and builds a Chamber View Scenario, runs WiFi Capacity Test, runs Dataplane Test,
|
||||||
and posts the results to Influx.
|
and posts the results to Influx.
|
||||||
There are optional arguments which will create a Grafana dashboard which will import the data posted to
|
There are optional arguments which will create a Grafana dashboard which will import the data posted to
|
||||||
@@ -59,7 +59,7 @@ AP Auto test has the following argument:
|
|||||||
DUT syntax is somewhat tricky: DUT-name SSID BSID (bssid-idx), example: linksys-8450 Default-SSID-5gl c4:41:1e:f5:3f:25 (2)
|
DUT syntax is somewhat tricky: DUT-name SSID BSID (bssid-idx), example: linksys-8450 Default-SSID-5gl c4:41:1e:f5:3f:25 (2)
|
||||||
* radio2: Specify 2.4Ghz radio. May be specified multiple times.
|
* radio2: Specify 2.4Ghz radio. May be specified multiple times.
|
||||||
* radio5: Specify 5Ghz radio. May be specified multiple times.
|
* radio5: Specify 5Ghz radio. May be specified multiple times.
|
||||||
'''
|
"""
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import importlib
|
import importlib
|
||||||
@@ -70,20 +70,24 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lf_wifi_capacity_test = importlib.import_module("py-scripts.lf_wifi_capacity_test")
|
lf_wifi_capacity_test = importlib.import_module("py-scripts.lf_wifi_capacity_test")
|
||||||
WiFiCapacityTest = lf_wifi_capacity_test.WiFiCapacityTest
|
WiFiCapacityTest = lf_wifi_capacity_test.WiFiCapacityTest
|
||||||
cv_test_manager = importlib.import_module("py-scripts.cv_test_manager")
|
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
|
||||||
create_chamberview = importlib.import_module("py-scripts.create_chamberview")
|
create_chamberview = importlib.import_module("py-scripts.create_chamberview")
|
||||||
CreateChamberview = create_chamberview.CreateChamberview
|
CreateChamberview = create_chamberview.CreateChamberview
|
||||||
DUT = create_chamberview.DUT
|
create_chamberview_dut = importlib.import_module("py-scripts.create_chamberview_dut")
|
||||||
|
DUT = create_chamberview_dut.DUT
|
||||||
lf_dataplane_test = importlib.import_module("py-scripts.lf_dataplane_test")
|
lf_dataplane_test = importlib.import_module("py-scripts.lf_dataplane_test")
|
||||||
DataplaneTest = lf_dataplane_test.DataplaneTest
|
DataplaneTest = lf_dataplane_test.DataplaneTest
|
||||||
grafana_profile = importlib.import_module("py-scripts.grafana_profile")
|
grafana_profile = importlib.import_module("py-scripts.grafana_profile")
|
||||||
UseGrafana = grafana_profile.UseGrafana
|
UseGrafana = grafana_profile.UseGrafana
|
||||||
lf_ap_auto_test = importlib.import_module("py-scripts.lf_ap_auto_test")
|
lf_ap_auto_test = importlib.import_module("py-scripts.lf_ap_auto_test")
|
||||||
|
ApAutoTest = lf_ap_auto_test.ApAutoTest
|
||||||
|
|
||||||
|
cv_add_base_parser = cv_test_manager.cv_add_base_parser
|
||||||
|
cv_base_adjust_parser = cv_add_base_parser.cv_base_adjust_parser
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -169,7 +173,7 @@ def main():
|
|||||||
parser.add_argument('--grafana_port', help='Grafana port if different from 3000', default=3000)
|
parser.add_argument('--grafana_port', help='Grafana port if different from 3000', default=3000)
|
||||||
parser.add_argument('--grafana_host', help='Grafana host', default='localhost')
|
parser.add_argument('--grafana_host', help='Grafana host', default='localhost')
|
||||||
|
|
||||||
#Flags for AP-Auto Test config
|
# Flags for AP-Auto Test config
|
||||||
|
|
||||||
parser.add_argument("--max_stations_2", type=int, default=-1,
|
parser.add_argument("--max_stations_2", type=int, default=-1,
|
||||||
help="Specify maximum 2.4Ghz stations")
|
help="Specify maximum 2.4Ghz stations")
|
||||||
@@ -187,14 +191,15 @@ def main():
|
|||||||
parser.add_argument("--radio5", action='append', nargs=1, default=[],
|
parser.add_argument("--radio5", action='append', nargs=1, default=[],
|
||||||
help="Specify 5Ghz radio. May be specified multiple times.")
|
help="Specify 5Ghz radio. May be specified multiple times.")
|
||||||
|
|
||||||
#Flags for Grafana
|
# Flags for Grafana
|
||||||
|
|
||||||
parser.add_argument('--dashboard_title', help='Titles of dashboards', default=None, action='append')
|
parser.add_argument('--dashboard_title', help='Titles of dashboards', default=None, action='append')
|
||||||
parser.add_argument('--scripts', help='Scripts to graph in Grafana', default=None, action='append')
|
parser.add_argument('--scripts', help='Scripts to graph in Grafana', default=None, action='append')
|
||||||
parser.add_argument('--title', help='title of your Grafana Dashboard', default=None)
|
parser.add_argument('--title', help='title of your Grafana Dashboard', default=None)
|
||||||
parser.add_argument('--testbed', help='Which testbed you want to query', default=None)
|
parser.add_argument('--testbed', help='Which testbed you want to query', default=None)
|
||||||
parser.add_argument('--graph_groups_file', help='File which determines how you want to filter your graphs on your dashboard',
|
parser.add_argument('--graph_groups_file',
|
||||||
default=None)
|
help='File which determines how you want to filter your graphs on your dashboard',
|
||||||
|
default=None)
|
||||||
parser.add_argument('--kpi', help='KPI file(s) which you want to graph form', action='append', default=None)
|
parser.add_argument('--kpi', help='KPI file(s) which you want to graph form', action='append', default=None)
|
||||||
parser.add_argument('--datasource', help='Name of Influx database if different from InfluxDB', default='InfluxDB')
|
parser.add_argument('--datasource', help='Name of Influx database if different from InfluxDB', default='InfluxDB')
|
||||||
parser.add_argument('--from_date', help='Date you want to start your Grafana dashboard from', default='now-1y')
|
parser.add_argument('--from_date', help='Date you want to start your Grafana dashboard from', default='now-1y')
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
lfcli_base = importlib.import_module("py-json.LANforge.lfcli_base")
|
||||||
@@ -19,7 +19,7 @@ Realm = realm.Realm
|
|||||||
|
|
||||||
|
|
||||||
class IPv4Test(LFCliBase):
|
class IPv4Test(LFCliBase):
|
||||||
def __init__(self, ssid, security, password, sta_list=None, ap=None, mode = 0, number_template="00000", host="localhost", port=8080,radio = "wiphy0",_debug_on=False,
|
def __init__(self, ssid, security, password, sta_list=None, ap=None, mode=0, number_template="00000", host="localhost", port=8080, radio="wiphy0", _debug_on=False,
|
||||||
_exit_on_error=False,
|
_exit_on_error=False,
|
||||||
_exit_on_fail=False):
|
_exit_on_fail=False):
|
||||||
super().__init__(host, port, _debug=_debug_on, _exit_on_fail=_exit_on_fail)
|
super().__init__(host, port, _debug=_debug_on, _exit_on_fail=_exit_on_fail)
|
||||||
@@ -35,43 +35,49 @@ class IPv4Test(LFCliBase):
|
|||||||
self.timeout = 120
|
self.timeout = 120
|
||||||
self.number_template = number_template
|
self.number_template = number_template
|
||||||
self.debug = _debug_on
|
self.debug = _debug_on
|
||||||
self.local_realm = realm.Realm(lfclient_host=self.host, lfclient_port=self.port)
|
self.local_realm = realm.Realm(
|
||||||
|
lfclient_host=self.host, lfclient_port=self.port)
|
||||||
self.station_profile = self.local_realm.new_station_profile()
|
self.station_profile = self.local_realm.new_station_profile()
|
||||||
|
|
||||||
self.station_profile.lfclient_url = self.lfclient_url
|
self.station_profile.lfclient_url = self.lfclient_url
|
||||||
self.station_profile.ssid = self.ssid
|
self.station_profile.ssid = self.ssid
|
||||||
self.station_profile.ssid_pass = self.password
|
self.station_profile.ssid_pass = self.password
|
||||||
self.station_profile.mode =self.mode
|
self.station_profile.mode = self.mode
|
||||||
self.station_profile.security = self.security
|
self.station_profile.security = self.security
|
||||||
self.station_profile.number_template_ = self.number_template
|
self.station_profile.number_template_ = self.number_template
|
||||||
self.station_profile.mode = mode
|
self.station_profile.mode = mode
|
||||||
if self.ap is not None:
|
if self.ap is not None:
|
||||||
self.station_profile.set_command_param("add_sta", "ap",self.ap)
|
self.station_profile.set_command_param("add_sta", "ap", self.ap)
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
# Build stations
|
# Build stations
|
||||||
#print("We've gotten into the build stations function")
|
#print("We've gotten into the build stations function")
|
||||||
self.station_profile.use_security(self.security, self.ssid, self.password)
|
self.station_profile.use_security(
|
||||||
|
self.security, self.ssid, self.password)
|
||||||
self.station_profile.set_number_template(self.number_template)
|
self.station_profile.set_number_template(self.number_template)
|
||||||
print("Creating stations")
|
print("Creating stations")
|
||||||
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
|
self.station_profile.set_command_flag(
|
||||||
self.station_profile.set_command_param("set_port", "report_timer", 1500)
|
"add_sta", "create_admin_down", 1)
|
||||||
|
self.station_profile.set_command_param(
|
||||||
|
"set_port", "report_timer", 1500)
|
||||||
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
|
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
|
||||||
self.station_profile.create(radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
|
self.station_profile.create(
|
||||||
|
radio=self.radio, sta_names_=self.sta_list, debug=self.debug)
|
||||||
self.station_profile.admin_up()
|
self.station_profile.admin_up()
|
||||||
if self.local_realm.wait_for_ip(station_list=self.sta_list, debug=self.debug, timeout_sec=30):
|
if self.local_realm.wait_for_ip(station_list=self.sta_list, debug=self.debug, timeout_sec=30):
|
||||||
self._pass("Station build finished")
|
self._pass("Station build finished")
|
||||||
self.exit_success()
|
self.exit_success()
|
||||||
else:
|
else:
|
||||||
self._fail("Stations not able to acquire IP. Please check network input.")
|
self._fail(
|
||||||
|
"Stations not able to acquire IP. Please check network input.")
|
||||||
self.exit_fail()
|
self.exit_fail()
|
||||||
|
|
||||||
|
|
||||||
def cleanup(self, sta_list):
|
def cleanup(self, sta_list):
|
||||||
self.station_profile.cleanup(sta_list)
|
self.station_profile.cleanup(sta_list)
|
||||||
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=sta_list,
|
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_url, port_list=sta_list,
|
||||||
debug=self.debug)
|
debug=self.debug)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
parser = LFCliBase.create_basic_argparse(
|
parser = LFCliBase.create_basic_argparse(
|
||||||
@@ -97,18 +103,19 @@ def main():
|
|||||||
--passwd admin123-wpa3
|
--passwd admin123-wpa3
|
||||||
--debug
|
--debug
|
||||||
''')
|
''')
|
||||||
required=None
|
required = None
|
||||||
for agroup in parser._action_groups:
|
for agroup in parser._action_groups:
|
||||||
if agroup.title == "required arguments":
|
if agroup.title == "required arguments":
|
||||||
required = agroup
|
required = agroup
|
||||||
#if required is not None:
|
# if required is not None:
|
||||||
optional = None
|
optional = None
|
||||||
for agroup in parser._action_groups:
|
for agroup in parser._action_groups:
|
||||||
if agroup.title == "optional arguments":
|
if agroup.title == "optional arguments":
|
||||||
optional = agroup
|
optional = agroup
|
||||||
if optional is not None:
|
if optional is not None:
|
||||||
optional.add_argument('--mode',help=LFCliBase.Help_Mode)
|
optional.add_argument('--mode', help=LFCliBase.Help_Mode)
|
||||||
optional.add_argument('--ap',help='Add BSSID of access point to connect to')
|
optional.add_argument(
|
||||||
|
'--ap', help='Add BSSID of access point to connect to')
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
num_sta = 2
|
num_sta = 2
|
||||||
@@ -117,18 +124,19 @@ def main():
|
|||||||
num_sta = num_stations_converted
|
num_sta = num_stations_converted
|
||||||
|
|
||||||
station_list = LFUtils.portNameSeries(prefix_="sta",
|
station_list = LFUtils.portNameSeries(prefix_="sta",
|
||||||
start_id_=0,
|
start_id_=0,
|
||||||
end_id_=num_sta-1,
|
end_id_=num_sta-1,
|
||||||
padding_number_=10000,
|
padding_number_=10000,
|
||||||
radio=args.radio)
|
radio=args.radio)
|
||||||
ip_test = IPv4Test(host=args.mgr, port=args.mgr_port,
|
ip_test = IPv4Test(host=args.mgr, port=args.mgr_port,
|
||||||
ssid=args.ssid, password=args.passwd,
|
ssid=args.ssid, password=args.passwd,
|
||||||
radio=args.radio, mode= args.mode,
|
radio=args.radio, mode=args.mode,
|
||||||
security=args.security, sta_list=station_list,
|
security=args.security, sta_list=station_list,
|
||||||
ap=args.ap)
|
ap=args.ap)
|
||||||
ip_test.cleanup(station_list)
|
ip_test.cleanup(station_list)
|
||||||
ip_test.timeout = 60
|
ip_test.timeout = 60
|
||||||
ip_test.build()
|
ip_test.build()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -172,13 +172,13 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
|
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
|
||||||
cvtest = cv_test_manager.cv_test
|
cvtest = cv_test_manager.cv_test
|
||||||
cv_add_base_parser = cv_test_manager.cv_add_base_parser
|
cv_add_base_parser = cv_test_manager.cv_add_base_parser
|
||||||
cv_base_adjust_parser = cv_test_manager.cv_base_adjust_parser
|
cv_base_adjust_parser = cv_test_manager.cv_base_adjust_parser
|
||||||
|
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
||||||
|
|
||||||
|
|
||||||
class ApAutoTest(cvtest):
|
class ApAutoTest(cvtest):
|
||||||
@@ -188,10 +188,11 @@ class ApAutoTest(cvtest):
|
|||||||
lf_user="lanforge",
|
lf_user="lanforge",
|
||||||
lf_password="lanforge",
|
lf_password="lanforge",
|
||||||
ssh_port=22,
|
ssh_port=22,
|
||||||
local_lf_report_dir="",
|
local_lf_report_dir=None,
|
||||||
|
lf_report_dir=None,
|
||||||
instance_name="ap_auto_instance",
|
instance_name="ap_auto_instance",
|
||||||
config_name="ap_auto_config",
|
config_name="ap_auto_config",
|
||||||
upstream="1.1.eth1",
|
upstream=None,
|
||||||
pull_report=False,
|
pull_report=False,
|
||||||
dut5_0="NA",
|
dut5_0="NA",
|
||||||
dut2_0="NA",
|
dut2_0="NA",
|
||||||
@@ -199,21 +200,33 @@ class ApAutoTest(cvtest):
|
|||||||
max_stations_2=100,
|
max_stations_2=100,
|
||||||
max_stations_5=100,
|
max_stations_5=100,
|
||||||
max_stations_dual=200,
|
max_stations_dual=200,
|
||||||
radio2=[],
|
radio2=None,
|
||||||
radio5=[],
|
radio5=None,
|
||||||
enables=[],
|
enables=None,
|
||||||
disables=[],
|
disables=None,
|
||||||
raw_lines=[],
|
raw_lines=None,
|
||||||
raw_lines_file="",
|
raw_lines_file="",
|
||||||
sets=[],
|
sets=None,
|
||||||
graph_groups=None
|
graph_groups=None
|
||||||
):
|
):
|
||||||
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
|
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
|
||||||
|
|
||||||
|
if radio2 is None:
|
||||||
|
radio2 = []
|
||||||
|
if radio5 is None:
|
||||||
|
radio5 = []
|
||||||
|
if enables is None:
|
||||||
|
enables = []
|
||||||
|
if disables is None:
|
||||||
|
disables = []
|
||||||
|
if raw_lines is None:
|
||||||
|
raw_lines = []
|
||||||
|
if sets is None:
|
||||||
|
sets = []
|
||||||
self.lf_host = lf_host
|
self.lf_host = lf_host
|
||||||
self.lf_port = lf_port
|
self.lf_port = lf_port
|
||||||
self.lf_user = lf_user
|
self.lf_user = lf_user
|
||||||
self.lf_password =lf_password
|
self.lf_password = lf_password
|
||||||
self.instance_name = instance_name
|
self.instance_name = instance_name
|
||||||
self.config_name = config_name
|
self.config_name = config_name
|
||||||
self.upstream = upstream
|
self.upstream = upstream
|
||||||
@@ -234,19 +247,19 @@ class ApAutoTest(cvtest):
|
|||||||
self.sets = sets
|
self.sets = sets
|
||||||
self.ssh_port = ssh_port
|
self.ssh_port = ssh_port
|
||||||
self.graph_groups = graph_groups
|
self.graph_groups = graph_groups
|
||||||
|
self.lf_report_dir = lf_report_dir
|
||||||
self.local_lf_report_dir = local_lf_report_dir
|
self.local_lf_report_dir = local_lf_report_dir
|
||||||
|
|
||||||
def setup(self):
|
def setup(self):
|
||||||
# Nothing to do at this time.
|
# Nothing to do at this time.
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.sync_cv()
|
self.sync_cv()
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
self.sync_cv()
|
self.sync_cv()
|
||||||
|
|
||||||
blob_test = "%s-"%(self.test_name)
|
blob_test = "%s-" % self.test_name
|
||||||
|
|
||||||
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
|
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
|
||||||
self.show_text_blob(None, None, False)
|
self.show_text_blob(None, None, False)
|
||||||
@@ -256,19 +269,19 @@ class ApAutoTest(cvtest):
|
|||||||
|
|
||||||
ridx = 0
|
ridx = 0
|
||||||
for r in self.radio2:
|
for r in self.radio2:
|
||||||
cfg_options.append("radio2-%i: %s"%(ridx, r[0]))
|
cfg_options.append("radio2-%i: %s" % (ridx, r[0]))
|
||||||
ridx += 1
|
ridx += 1
|
||||||
|
|
||||||
ridx = 0
|
ridx = 0
|
||||||
for r in self.radio5:
|
for r in self.radio5:
|
||||||
cfg_options.append("radio5-%i: %s"%(ridx, r[0]))
|
cfg_options.append("radio5-%i: %s" % (ridx, r[0]))
|
||||||
ridx += 1
|
ridx += 1
|
||||||
|
|
||||||
self.apply_cfg_options(cfg_options, self.enables, self.disables, self.raw_lines, self.raw_lines_file)
|
self.apply_cfg_options(cfg_options, self.enables, self.disables, self.raw_lines, self.raw_lines_file)
|
||||||
|
|
||||||
# Command line args take precedence.
|
# Command line args take precedence.
|
||||||
if self.upstream != "":
|
if self.upstream:
|
||||||
cfg_options.append("upstream_port: " + self.upstream)
|
cfg_options.append("upstream-port: %s" % self.upstream)
|
||||||
if self.dut5_0 != "":
|
if self.dut5_0 != "":
|
||||||
cfg_options.append("dut5-0: " + self.dut5_0)
|
cfg_options.append("dut5-0: " + self.dut5_0)
|
||||||
if self.dut2_0 != "":
|
if self.dut2_0 != "":
|
||||||
@@ -294,7 +307,6 @@ class ApAutoTest(cvtest):
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog="lf_ap_auto_test.py",
|
prog="lf_ap_auto_test.py",
|
||||||
formatter_class=argparse.RawTextHelpFormatter,
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
@@ -302,28 +314,28 @@ def main():
|
|||||||
Open this file in an editor and read the top notes for more details.
|
Open this file in an editor and read the top notes for more details.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
./lf_ap_auto_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \
|
./lf_ap_auto_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \\
|
||||||
--instance_name ap-auto-instance --config_name test_con --upstream 1.1.eth2 \
|
--instance_name ap-auto-instance --config_name test_con --upstream 1.1.eth2 \\
|
||||||
--dut5_0 'linksys-8450 Default-SSID-5gl c4:41:1e:f5:3f:25 (2)' \
|
--dut5_0 'linksys-8450 Default-SSID-5gl c4:41:1e:f5:3f:25 (2)' \\
|
||||||
--dut2_0 'linksys-8450 Default-SSID-2g c4:41:1e:f5:3f:24 (1)' \
|
--dut2_0 'linksys-8450 Default-SSID-2g c4:41:1e:f5:3f:24 (1)' \\
|
||||||
--max_stations_2 100 --max_stations_5 100 --max_stations_dual 200 \
|
--max_stations_2 100 --max_stations_5 100 --max_stations_dual 200 \\
|
||||||
--radio2 1.1.wiphy0 --radio2 1.1.wiphy2 \
|
--radio2 1.1.wiphy0 --radio2 1.1.wiphy2 \\
|
||||||
--radio5 1.1.wiphy1 --radio5 1.1.wiphy3 --radio5 1.1.wiphy4 \
|
--radio5 1.1.wiphy1 --radio5 1.1.wiphy3 --radio5 1.1.wiphy4 \\
|
||||||
--radio5 1.1.wiphy5 --radio5 1.1.wiphy6 --radio5 1.1.wiphy7 \
|
--radio5 1.1.wiphy5 --radio5 1.1.wiphy6 --radio5 1.1.wiphy7 \\
|
||||||
--set 'Basic Client Connectivity' 1 --set 'Multi Band Performance' 1 \
|
--set 'Basic Client Connectivity' 1 --set 'Multi Band Performance' 1 \\
|
||||||
--set 'Skip 2.4Ghz Tests' 1 --set 'Skip 5Ghz Tests' 1 \
|
--set 'Skip 2.4Ghz Tests' 1 --set 'Skip 5Ghz Tests' 1 \\
|
||||||
--set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Stability' 0 --set 'Band-Steering' 0 \
|
--set 'Throughput vs Pkt Size' 0 --set 'Capacity' 0 --set 'Stability' 0 --set 'Band-Steering' 0 \\
|
||||||
--set 'Multi-Station Throughput vs Pkt Size' 0 --set 'Long-Term' 0 \
|
--set 'Multi-Station Throughput vs Pkt Size' 0 --set 'Long-Term' 0 \\
|
||||||
--test_rig Testbed-01 --test_tag ATH10K --pull_report \
|
--test_rig Testbed-01 --test_tag ATH10K --pull_report \\
|
||||||
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \
|
--influx_host c7-graphana --influx_port 8086 --influx_org Candela \\
|
||||||
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \
|
--influx_token=-u_Wd-L8o992701QF0c5UmqEp7w7Z7YOMaWLxOMgmHfATJGnQbbmYyNxHBR9PgD6taM_tcxqJl6U8DjU1xINFQ== \\
|
||||||
--influx_bucket ben \
|
--influx_bucket ben \\
|
||||||
--influx_tag testbed Ferndale-01
|
--influx_tag testbed Ferndale-01
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
cv_add_base_parser(parser) # see cv_test_manager.py
|
cv_add_base_parser(parser) # see cv_test_manager.py
|
||||||
|
|
||||||
parser.add_argument("-u", "--upstream", type=str, default="",
|
parser.add_argument("-u", "--upstream", type=str, default=None,
|
||||||
help="Upstream port for wifi capacity test ex. 1.1.eth1")
|
help="Upstream port for wifi capacity test ex. 1.1.eth1")
|
||||||
|
|
||||||
parser.add_argument("--max_stations_2", type=int, default=-1,
|
parser.add_argument("--max_stations_2", type=int, default=-1,
|
||||||
@@ -341,39 +353,46 @@ def main():
|
|||||||
help="Specify 2.4Ghz radio. May be specified multiple times.")
|
help="Specify 2.4Ghz radio. May be specified multiple times.")
|
||||||
parser.add_argument("--radio5", action='append', nargs=1, default=[],
|
parser.add_argument("--radio5", action='append', nargs=1, default=[],
|
||||||
help="Specify 5Ghz radio. May be specified multiple times.")
|
help="Specify 5Ghz radio. May be specified multiple times.")
|
||||||
parser.add_argument("--local_lf_report_dir", help="--local_lf_report_dir <where to pull reports to> default '' put where dataplane script run from",default="")
|
parser.add_argument("--local_lf_report_dir",
|
||||||
|
help="--local_lf_report_dir <where to pull reports to> default '' put where dataplane script run from",
|
||||||
|
default="")
|
||||||
|
parser.add_argument("--lf_report_dir",
|
||||||
|
help="--lf_report_dir <where to pull reports from> default '' put where dataplane script run from",
|
||||||
|
default="")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
cv_base_adjust_parser(args)
|
cv_base_adjust_parser(args)
|
||||||
|
|
||||||
CV_Test = ApAutoTest(lf_host = args.mgr,
|
CV_Test = ApAutoTest(lf_host=args.mgr,
|
||||||
lf_port = args.port,
|
lf_port=args.port,
|
||||||
lf_user = args.lf_user,
|
lf_user=args.lf_user,
|
||||||
lf_password = args.lf_password,
|
lf_password=args.lf_password,
|
||||||
instance_name = args.instance_name,
|
instance_name=args.instance_name,
|
||||||
config_name = args.config_name,
|
config_name=args.config_name,
|
||||||
upstream = args.upstream,
|
upstream=args.upstream,
|
||||||
pull_report = args.pull_report,
|
pull_report=args.pull_report,
|
||||||
local_lf_report_dir = args.local_lf_report_dir,
|
local_lf_report_dir=args.local_lf_report_dir,
|
||||||
dut5_0 = args.dut5_0,
|
lf_report_dir=args.lf_report_dir,
|
||||||
dut2_0 = args.dut2_0,
|
dut5_0=args.dut5_0,
|
||||||
load_old_cfg = args.load_old_cfg,
|
dut2_0=args.dut2_0,
|
||||||
max_stations_2 = args.max_stations_2,
|
load_old_cfg=args.load_old_cfg,
|
||||||
max_stations_5 = args.max_stations_5,
|
max_stations_2=args.max_stations_2,
|
||||||
max_stations_dual = args.max_stations_dual,
|
max_stations_5=args.max_stations_5,
|
||||||
radio2 = args.radio2,
|
max_stations_dual=args.max_stations_dual,
|
||||||
radio5 = args.radio5,
|
radio2=args.radio2,
|
||||||
enables = args.enable,
|
radio5=args.radio5,
|
||||||
disables = args.disable,
|
enables=args.enable,
|
||||||
raw_lines = args.raw_line,
|
disables=args.disable,
|
||||||
raw_lines_file = args.raw_lines_file,
|
raw_lines=args.raw_line,
|
||||||
sets = args.set
|
raw_lines_file=args.raw_lines_file,
|
||||||
|
sets=args.set
|
||||||
)
|
)
|
||||||
CV_Test.setup()
|
CV_Test.setup()
|
||||||
CV_Test.run()
|
CV_Test.run()
|
||||||
|
|
||||||
CV_Test.check_influx_kpi(args)
|
CV_Test.check_influx_kpi(args)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
38
py-scripts/lf_csv.py
Normal file → Executable file
38
py-scripts/lf_csv.py
Normal file → Executable file
@@ -42,39 +42,9 @@ class lf_csv:
|
|||||||
print(csv_df)
|
print(csv_df)
|
||||||
csv_df.to_csv(self.filename, index=False, encoding='utf-8', na_rep='NA', float_format='%.2f')
|
csv_df.to_csv(self.filename, index=False, encoding='utf-8', na_rep='NA', float_format='%.2f')
|
||||||
|
|
||||||
# this layout may need to change
|
def main():
|
||||||
'''
|
|
||||||
kpi.csv : specific file that is used for the database, dashboard and blog post
|
|
||||||
A blank entry is a valid entry in some cases.
|
|
||||||
|
|
||||||
Date: date of run
|
|
||||||
test-rig : testbed that the tests are run on for example ct_us_001
|
|
||||||
test-tag : test specific information to differenciate the test, LANforge radios used, security modes (wpa2 , open)
|
|
||||||
dut-hw-version : hardware version of the device under test
|
|
||||||
dut-sw-version : software version of the device under test
|
|
||||||
dut-model-num : model number / name of the device under test
|
|
||||||
test-priority : test-priority is arbitrary number, choosing under 95 means it goes down at bottom of blog report, and higher priority goes at top.
|
|
||||||
test-id : script or test name , AP Auto, wifi capacity, data plane, dfs
|
|
||||||
short-description : short description of the test
|
|
||||||
pass/fail : set blank for performance tests
|
|
||||||
numeric-score : this is the value for the y-axis (x-axis is a timestamp), numeric value of what was measured
|
|
||||||
test-details : what was measured in the numeric-score, e.g. bits per second, bytes per second, upload speed, minimum cx time (ms)
|
|
||||||
Units : units used for the numeric-scort
|
|
||||||
Graph-Group - For the dashboard the graph / panel to put the resutls in . Currently the dashboard is Grafana
|
|
||||||
|
|
||||||
'''
|
|
||||||
class lf_kpi_csv:
|
|
||||||
def __init__(self,
|
|
||||||
_kpi_headers = ['Date','test-rig','test-tag','dut-hw-version','dut-sw-version','dut-model-num',
|
|
||||||
'test-priority','test-id','short-description','pass/fail','numberic-score'
|
|
||||||
'test details','Units','Graph-Group','Subtest-Pass','Subtest-Fail'],
|
|
||||||
_kpi_file='kpi.csv' #Currently this is the only file name accepted
|
|
||||||
):
|
|
||||||
self.kpi_headers = _kpi_headers
|
|
||||||
self.kpi_rows = ""
|
|
||||||
self.kpi_filename = _kpi_file
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
test = lf_csv()
|
test = lf_csv()
|
||||||
test.generate_csv()
|
test.generate_csv()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ Note: To Run this script gui should be opened with
|
|||||||
This script is used to automate running Dataplane tests. You
|
This script is used to automate running Dataplane tests. You
|
||||||
may need to view a Dataplane test configured through the GUI to understand
|
may need to view a Dataplane test configured through the GUI to understand
|
||||||
the options and how best to input data.
|
the options and how best to input data.
|
||||||
|
|
||||||
./lf_dataplane_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \
|
./lf_dataplane_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \
|
||||||
--instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 \
|
--instance_name dataplane-instance --config_name test_con --upstream 1.1.eth2 \
|
||||||
--dut linksys-8450 --duration 15s --station 1.1.sta01500 \
|
--dut linksys-8450 --duration 15s --station 1.1.sta01500 \
|
||||||
@@ -39,7 +39,7 @@ port_sorting: 0
|
|||||||
kpi_id: Dataplane Pkt-Size
|
kpi_id: Dataplane Pkt-Size
|
||||||
notes0: ec5211 in bridge mode, wpa2 auth.
|
notes0: ec5211 in bridge mode, wpa2 auth.
|
||||||
bg: 0xE0ECF8
|
bg: 0xE0ECF8
|
||||||
test_rig:
|
test_rig:
|
||||||
show_scan: 1
|
show_scan: 1
|
||||||
auto_helper: 0
|
auto_helper: 0
|
||||||
skip_2: 0
|
skip_2: 0
|
||||||
@@ -87,7 +87,7 @@ show_1m: 1
|
|||||||
pause_iter: 0
|
pause_iter: 0
|
||||||
outer_loop_atten: 0
|
outer_loop_atten: 0
|
||||||
show_realtime: 1
|
show_realtime: 1
|
||||||
operator:
|
operator:
|
||||||
mconn: 1
|
mconn: 1
|
||||||
mpkt: 1000
|
mpkt: 1000
|
||||||
tos: 0
|
tos: 0
|
||||||
@@ -105,7 +105,6 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
|
cv_test_manager = importlib.import_module("py-json.cv_test_manager")
|
||||||
@@ -132,17 +131,24 @@ class DataplaneTest(cv_test):
|
|||||||
duration="15s",
|
duration="15s",
|
||||||
station="1.1.sta01500",
|
station="1.1.sta01500",
|
||||||
dut="NA",
|
dut="NA",
|
||||||
enables=[],
|
enables=None,
|
||||||
disables=[],
|
disables=None,
|
||||||
raw_lines=[],
|
raw_lines=None,
|
||||||
raw_lines_file="",
|
raw_lines_file="",
|
||||||
sets=[],
|
sets=None,
|
||||||
graph_groups=None,
|
graph_groups=None,
|
||||||
report_dir="",
|
|
||||||
test_rig=""
|
test_rig=""
|
||||||
):
|
):
|
||||||
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
|
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
|
||||||
|
|
||||||
|
if enables is None:
|
||||||
|
enables = []
|
||||||
|
if disables is None:
|
||||||
|
disables = []
|
||||||
|
if raw_lines is None:
|
||||||
|
raw_lines = []
|
||||||
|
if sets is None:
|
||||||
|
sets = []
|
||||||
self.lf_host = lf_host
|
self.lf_host = lf_host
|
||||||
self.lf_port = lf_port
|
self.lf_port = lf_port
|
||||||
self.lf_user = lf_user
|
self.lf_user = lf_user
|
||||||
@@ -164,7 +170,6 @@ class DataplaneTest(cv_test):
|
|||||||
self.raw_lines_file = raw_lines_file
|
self.raw_lines_file = raw_lines_file
|
||||||
self.sets = sets
|
self.sets = sets
|
||||||
self.graph_groups = graph_groups
|
self.graph_groups = graph_groups
|
||||||
self.report_dir = report_dir
|
|
||||||
self.ssh_port = ssh_port
|
self.ssh_port = ssh_port
|
||||||
self.local_lf_report_dir = local_lf_report_dir
|
self.local_lf_report_dir = local_lf_report_dir
|
||||||
self.test_rig = test_rig
|
self.test_rig = test_rig
|
||||||
@@ -180,14 +185,16 @@ class DataplaneTest(cv_test):
|
|||||||
|
|
||||||
blob_test = "dataplane-test-latest-"
|
blob_test = "dataplane-test-latest-"
|
||||||
|
|
||||||
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
|
# To delete old config with same name
|
||||||
|
self.rm_text_blob(self.config_name, blob_test)
|
||||||
self.show_text_blob(None, None, False)
|
self.show_text_blob(None, None, False)
|
||||||
|
|
||||||
# Test related settings
|
# Test related settings
|
||||||
cfg_options = []
|
cfg_options = []
|
||||||
|
|
||||||
### HERE###
|
### HERE###
|
||||||
self.apply_cfg_options(cfg_options, self.enables, self.disables, self.raw_lines, self.raw_lines_file)
|
self.apply_cfg_options(cfg_options, self.enables,
|
||||||
|
self.disables, self.raw_lines, self.raw_lines_file)
|
||||||
|
|
||||||
# cmd line args take precedence and so come last in the cfg array.
|
# cmd line args take precedence and so come last in the cfg array.
|
||||||
if self.upstream != "":
|
if self.upstream != "":
|
||||||
@@ -215,7 +222,8 @@ class DataplaneTest(cv_test):
|
|||||||
self.pull_report, self.lf_host, self.lf_user, self.lf_password,
|
self.pull_report, self.lf_host, self.lf_user, self.lf_password,
|
||||||
cv_cmds, ssh_port=self.ssh_port, local_lf_report_dir=self.local_lf_report_dir,
|
cv_cmds, ssh_port=self.ssh_port, local_lf_report_dir=self.local_lf_report_dir,
|
||||||
graph_groups_file=self.graph_groups)
|
graph_groups_file=self.graph_groups)
|
||||||
self.rm_text_blob(self.config_name, blob_test) # To delete old config with same name
|
# To delete old config with same name
|
||||||
|
self.rm_text_blob(self.config_name, blob_test)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -226,7 +234,7 @@ def main():
|
|||||||
|
|
||||||
IMPORTANT: Start lanforge with socket 3990 : ./lfclient.bash -cli-socket 3990
|
IMPORTANT: Start lanforge with socket 3990 : ./lfclient.bash -cli-socket 3990
|
||||||
lfclient.bash is located in the LANforgeGUI_X.X.X directory
|
lfclient.bash is located in the LANforgeGUI_X.X.X directory
|
||||||
|
|
||||||
On local or remote system: ./lfclient.bash -cli-socket 3990 -s LF_MGR
|
On local or remote system: ./lfclient.bash -cli-socket 3990 -s LF_MGR
|
||||||
On local system the -s LF_MGR will be local_host if not provided
|
On local system the -s LF_MGR will be local_host if not provided
|
||||||
|
|
||||||
@@ -246,7 +254,7 @@ def main():
|
|||||||
--influx_bucket ben \
|
--influx_bucket ben \
|
||||||
--influx_tag testbed Ferndale-01
|
--influx_tag testbed Ferndale-01
|
||||||
|
|
||||||
|
|
||||||
Example 2:
|
Example 2:
|
||||||
./lf_dataplane_test.py --json <name>.json
|
./lf_dataplane_test.py --json <name>.json
|
||||||
|
|
||||||
@@ -254,46 +262,47 @@ def main():
|
|||||||
|
|
||||||
Sample <name>.json between using eth1 and eth2
|
Sample <name>.json between using eth1 and eth2
|
||||||
{
|
{
|
||||||
"mgr":"192.168.0.101",
|
"mgr":"192.168.0.101",
|
||||||
"port":"8080",
|
"port":"8080",
|
||||||
"lf_user":"lanforge",
|
"lf_user":"lanforge",
|
||||||
"lf_password":"lanforge",
|
"lf_password":"lanforge",
|
||||||
"instance_name":"dataplane-instance",
|
"instance_name":"dataplane-instance",
|
||||||
"config_name":"test_con",
|
"config_name":"test_con",
|
||||||
"upstream":"1.1.eth1",
|
"upstream":"1.1.eth1",
|
||||||
"dut":"asus_5g",
|
"dut":"asus_5g",
|
||||||
"duration":"15s",
|
"duration":"15s",
|
||||||
"station":"1.1.eth2",
|
"station":"1.1.eth2",
|
||||||
"download_speed":"85%",
|
"download_speed":"85%",
|
||||||
"upload_speed":"0",
|
"upload_speed":"0",
|
||||||
"raw_line": ["pkts: Custom;60;MTU", "cust_pkt_sz: 88 1200", "directions: DUT Transmit", "traffic_types: UDP", "bandw_options: 20", "spatial_streams: 1"]
|
"raw_line": ["pkts: Custom;60;MTU", "cust_pkt_sz: 88 1200", "directions: DUT Transmit", "traffic_types: UDP", "bandw_options: 20", "spatial_streams: 1"]
|
||||||
}
|
}
|
||||||
|
|
||||||
Sample <name>.json between using eth1 and station 1.1.sta0002
|
Sample <name>.json between using eth1 and station 1.1.sta0002
|
||||||
{
|
{
|
||||||
"mgr":"192.168.0.101",
|
"mgr":"192.168.0.101",
|
||||||
"port":"8080",
|
"port":"8080",
|
||||||
"lf_user":"lanforge",
|
"lf_user":"lanforge",
|
||||||
"lf_password":"lanforge",
|
"lf_password":"lanforge",
|
||||||
"instance_name":"dataplane-instance",
|
"instance_name":"dataplane-instance",
|
||||||
"config_name":"test_con",
|
"config_name":"test_con",
|
||||||
"upstream":"1.1.eth1",
|
"upstream":"1.1.eth1",
|
||||||
"dut":"asus_5g",
|
"dut":"asus_5g",
|
||||||
"duration":"15s",
|
"duration":"15s",
|
||||||
"station":"1.1.sta0002",
|
"station":"1.1.sta0002",
|
||||||
"download_speed":"85%",
|
"download_speed":"85%",
|
||||||
"upload_speed":"0",
|
"upload_speed":"0",
|
||||||
"raw_line": ["pkts: Custom;60;MTU", "cust_pkt_sz: 88 1200", "directions: DUT Transmit", "traffic_types: UDP", "bandw_options: 20", "spatial_streams: 1"]
|
"raw_line": ["pkts: Custom;60;MTU", "cust_pkt_sz: 88 1200", "directions: DUT Transmit", "traffic_types: UDP", "bandw_options: 20", "spatial_streams: 1"]
|
||||||
}
|
}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
cv_add_base_parser(parser) # see cv_test_manager.py
|
cv_add_base_parser(parser) # see cv_test_manager.py
|
||||||
|
|
||||||
parser.add_argument('--json', help="--json <config.json> json input file", default="")
|
parser.add_argument(
|
||||||
parser.add_argument('--influx_json', help="--influx_json <influx_config.json> influx config json input file",
|
'--json', help="--json <config.json> json input file", default="")
|
||||||
default="")
|
parser.add_argument(
|
||||||
|
'--influx_json', help="--influx_json <influx_config.json> influx config json input file", default="")
|
||||||
parser.add_argument("-u", "--upstream", type=str, default="",
|
parser.add_argument("-u", "--upstream", type=str, default="",
|
||||||
help="Upstream port for wifi capacity test ex. 1.1.eth2")
|
help="Upstream port for wifi capacity test ex. 1.1.eth2")
|
||||||
parser.add_argument("--station", type=str, default="",
|
parser.add_argument("--station", type=str, default="",
|
||||||
@@ -307,8 +316,8 @@ def main():
|
|||||||
help="Specify requested upload speed. Percentage of theoretical is also supported. Default: 0")
|
help="Specify requested upload speed. Percentage of theoretical is also supported. Default: 0")
|
||||||
parser.add_argument("--duration", default="",
|
parser.add_argument("--duration", default="",
|
||||||
help="Specify duration of each traffic run")
|
help="Specify duration of each traffic run")
|
||||||
parser.add_argument("--graph_groups", help="File to save graph_groups to", default=None)
|
parser.add_argument(
|
||||||
parser.add_argument("--report_dir", default="")
|
"--graph_groups", help="File to save graph_groups to", default=None)
|
||||||
parser.add_argument("--local_lf_report_dir",
|
parser.add_argument("--local_lf_report_dir",
|
||||||
help="--local_lf_report_dir <where to pull reports to> default '' put where dataplane script run from",
|
help="--local_lf_report_dir <where to pull reports to> default '' put where dataplane script run from",
|
||||||
default="")
|
default="")
|
||||||
@@ -316,12 +325,12 @@ def main():
|
|||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
# use json config file
|
# use json config file
|
||||||
if args.json != "":
|
if args.json:
|
||||||
try:
|
if os.path.exists(args.json):
|
||||||
with open(args.json, 'r') as json_config:
|
with open(args.json, 'r') as json_config:
|
||||||
json_data = json.load(json_config)
|
json_data = json.load(json_config)
|
||||||
except:
|
else:
|
||||||
print("Error reading {}".format(args.json))
|
return FileNotFoundError("Error reading {}".format(args.json))
|
||||||
# json configuation takes presidence to command line
|
# json configuation takes presidence to command line
|
||||||
if "mgr" in json_data:
|
if "mgr" in json_data:
|
||||||
args.mgr = json_data["mgr"]
|
args.mgr = json_data["mgr"]
|
||||||
@@ -356,12 +365,12 @@ def main():
|
|||||||
args.raw_line = json_data_tmp
|
args.raw_line = json_data_tmp
|
||||||
|
|
||||||
# use influx json config file
|
# use influx json config file
|
||||||
if args.influx_json != "":
|
if args.influx_json:
|
||||||
try:
|
if os.path.exists(args.influx_json):
|
||||||
with open(args.influx_json, 'r') as influx_json_config:
|
with open(args.influx_json, 'r') as json_config:
|
||||||
influx_json_data = json.load(influx_json_config)
|
influx_json_data = json.load(json_config)
|
||||||
except:
|
else:
|
||||||
print("Error reading {}".format(args.influx_json))
|
return FileNotFoundError("Error reading {}".format(args.influx_json))
|
||||||
# json configuation takes presidence to command line
|
# json configuation takes presidence to command line
|
||||||
# influx DB configuration
|
# influx DB configuration
|
||||||
if "influx_host" in influx_json_data:
|
if "influx_host" in influx_json_data:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
'''
|
"""
|
||||||
This Script has two classes :
|
This Script has two classes :
|
||||||
1. LoadScenario : It will load the existing saved scenario to the Lanforge (Here used for Loading Bridged VAP)
|
1. LoadScenario : It will load the existing saved scenario to the Lanforge (Here used for Loading Bridged VAP)
|
||||||
2. CreateSTA_CX : It will create stations and L3 Cross connects and start them
|
2. CreateSTA_CX : It will create stations and L3 Cross connects and start them
|
||||||
@@ -7,11 +7,11 @@
|
|||||||
In this example, Another Lanforge is used as DUT
|
In this example, Another Lanforge is used as DUT
|
||||||
It also have a function : GenerateReport that generates the report in xlsx format as well as it plots the Graph of throughput over time with temperature
|
It also have a function : GenerateReport that generates the report in xlsx format as well as it plots the Graph of throughput over time with temperature
|
||||||
It also have Plot function that generates a html page that contains the plot
|
It also have Plot function that generates a html page that contains the plot
|
||||||
|
|
||||||
|
|
||||||
Prerequisite
|
Prerequisite
|
||||||
Start the Lanforge Manager both Sides
|
Start the Lanforge Manager both Sides
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
pip install paramiko
|
pip install paramiko
|
||||||
pip install bokeh
|
pip install bokeh
|
||||||
@@ -21,13 +21,12 @@
|
|||||||
.\Lexus_Final.py --lf_host 192.168.200.15 --dut_host 192.168.200.18 --dut_radio wiphy1 --lf_radio wiphy1 --num_sta 1 --sta_id 1 --lf_ssid lanforge_ap --dut_ssid lexusap --security open --dut_upstream eth2 --lf_upstream eth1 --protocol lf_udp --min_bps 1000 --max_bps 10000 --time 1
|
.\Lexus_Final.py --lf_host 192.168.200.15 --dut_host 192.168.200.18 --dut_radio wiphy1 --lf_radio wiphy1 --num_sta 1 --sta_id 1 --lf_ssid lanforge_ap --dut_ssid lexusap --security open --dut_upstream eth2 --lf_upstream eth1 --protocol lf_udp --min_bps 1000 --max_bps 10000 --time 1
|
||||||
This Script is intended to automate the testing of DUT that has stations as well as AP.
|
This Script is intended to automate the testing of DUT that has stations as well as AP.
|
||||||
To automate the simultaenous testing and check the DUT Temperature
|
To automate the simultaenous testing and check the DUT Temperature
|
||||||
'''
|
"""
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import importlib
|
import importlib
|
||||||
import argparse
|
import argparse
|
||||||
import time
|
import time
|
||||||
import logging
|
|
||||||
import paramiko as pm
|
import paramiko as pm
|
||||||
from paramiko.ssh_exception import NoValidConnectionsError as exception
|
from paramiko.ssh_exception import NoValidConnectionsError as exception
|
||||||
import xlsxwriter
|
import xlsxwriter
|
||||||
@@ -39,7 +38,6 @@ if sys.version_info[0] != 3:
|
|||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
||||||
@@ -53,58 +51,60 @@ Realm = realm.Realm
|
|||||||
class Login_DUT:
|
class Login_DUT:
|
||||||
|
|
||||||
def __init__(self, threadID, name, HOST):
|
def __init__(self, threadID, name, HOST):
|
||||||
self.threadID = threadID
|
self.threadID = threadID
|
||||||
self.name = name
|
self.name = name
|
||||||
self.host=HOST
|
self.host = HOST
|
||||||
self.USERNAME = "lanforge"
|
self.USERNAME = "lanforge"
|
||||||
self.PASSWORD = "lanforge"
|
self.PASSWORD = "lanforge"
|
||||||
self.CLIENT= pm.SSHClient()
|
self.CLIENT = pm.SSHClient()
|
||||||
self.LF1= self.Connect()
|
self.LF1 = self.Connect()
|
||||||
self.data_core1=[]
|
self.data_core1 = []
|
||||||
self.data_core2=[]
|
self.data_core2 = []
|
||||||
if self.CLIENT == 0:
|
if self.CLIENT == 0:
|
||||||
exit()
|
exit()
|
||||||
print("Connected to " +HOST+" DUT to Measure the Core Temperature")
|
print("Connected to " + HOST + " DUT to Measure the Core Temperature")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
stdin, stdout, stderr= self.CLIENT.exec_command("sensors")
|
stdin, stdout, stderr = self.CLIENT.exec_command("sensors")
|
||||||
out_lines = stdout.readlines()
|
out_lines = stdout.readlines()
|
||||||
err_lines = stderr.readlines()
|
err_lines = stderr.readlines()
|
||||||
print(out_lines[len(out_lines)-3], out_lines[len(out_lines)-2])
|
print(out_lines[len(out_lines) - 3], out_lines[len(out_lines) - 2])
|
||||||
self.data_core1.append(out_lines[len(out_lines)-3])
|
self.data_core1.append(out_lines[len(out_lines) - 3])
|
||||||
self.data_core2.append(out_lines[len(out_lines)-2])
|
self.data_core2.append(out_lines[len(out_lines) - 2])
|
||||||
|
|
||||||
|
|
||||||
def Connect(self):
|
def Connect(self):
|
||||||
self.CLIENT.load_system_host_keys()
|
self.CLIENT.load_system_host_keys()
|
||||||
self.CLIENT.set_missing_host_key_policy(pm.AutoAddPolicy())
|
self.CLIENT.set_missing_host_key_policy(pm.AutoAddPolicy())
|
||||||
try:
|
try:
|
||||||
self.CLIENT.connect(self.host, username=self.USERNAME, password=self.PASSWORD,timeout=10)
|
self.CLIENT.connect(self.host, username=self.USERNAME, password=self.PASSWORD, timeout=10)
|
||||||
return None
|
return None
|
||||||
except exception as error:
|
except exception as error:
|
||||||
self.CLIENT = 0;
|
self.CLIENT = 0
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
# Class to Load a Scenario that has been Created in Chamber View saved under DB/[Database_Name]
|
# Class to Load a Scenario that has been Created in Chamber View saved under DB/[Database_Name]
|
||||||
class LoadScenario(LFCliBase):
|
class LoadScenario(LFCliBase):
|
||||||
def __init__(self, host, port, db_name, security_debug_on=False, _exit_on_error=False,_exit_on_fail=False):
|
def __init__(self, host, port, db_name, security_debug_on=False, _exit_on_error=False, _exit_on_fail=False):
|
||||||
super().__init__(host, port, _debug=security_debug_on, _exit_on_fail=_exit_on_fail)
|
super().__init__(host, port, _debug=security_debug_on, _exit_on_fail=_exit_on_fail)
|
||||||
self.host = host
|
self.host = host
|
||||||
self.port = port
|
self.port = port
|
||||||
self.json_post("/cli-json/load", { "name": db_name, "action": 'overwrite' })
|
self.json_post("/cli-json/load", {"name": db_name, "action": 'overwrite'})
|
||||||
print(host+ " : Scenario Loaded...")
|
print(host + " : Scenario Loaded...")
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
||||||
|
|
||||||
# Class to create stations and run L3 Cross connects and run them for given time. It also stores the endpoint names for measuring throughput
|
# Class to create stations and run L3 Cross connects and run them for given time. It also stores the endpoint names for measuring throughput
|
||||||
class CreateSTA_CX(LFCliBase):
|
class CreateSTA_CX(LFCliBase):
|
||||||
|
|
||||||
def __init__(self, host, port, radio, num_sta, sta_id, ssid, security, password, upstream, protocol, min_bps, max_bps, security_debug_on=True, _exit_on_error=True, _exit_on_fail=True):
|
def __init__(self, host, port, radio, num_sta, sta_id, ssid, security, password, upstream, protocol, min_bps,
|
||||||
|
max_bps, security_debug_on=True, _exit_on_error=True, _exit_on_fail=True):
|
||||||
super().__init__(host, port, _debug=security_debug_on, _exit_on_fail=_exit_on_fail)
|
super().__init__(host, port, _debug=security_debug_on, _exit_on_fail=_exit_on_fail)
|
||||||
|
|
||||||
self.host = host
|
self.host = host
|
||||||
self.port = port
|
self.port = port
|
||||||
self.radio = radio
|
self.radio = radio
|
||||||
|
|
||||||
self.num_sta = num_sta
|
self.num_sta = num_sta
|
||||||
self.sta_id = sta_id
|
self.sta_id = sta_id
|
||||||
|
|
||||||
@@ -116,25 +116,25 @@ class CreateSTA_CX(LFCliBase):
|
|||||||
self.upstream = upstream
|
self.upstream = upstream
|
||||||
self.protocol = protocol
|
self.protocol = protocol
|
||||||
|
|
||||||
self.min_bps =min_bps
|
self.min_bps = min_bps
|
||||||
self.max_bps =max_bps
|
self.max_bps = max_bps
|
||||||
|
|
||||||
#Creating a Realm Object
|
# Creating a Realm Object
|
||||||
self.local_realm = Realm(lfclient_host=host, lfclient_port=port)
|
self.local_realm = Realm(lfclient_host=host, lfclient_port=port)
|
||||||
|
|
||||||
#Creating Profile Objects
|
# Creating Profile Objects
|
||||||
self.station_profile = self.local_realm.new_station_profile()
|
self.station_profile = self.local_realm.new_station_profile()
|
||||||
self.cx_profile = self.local_realm.new_l3_cx_profile()
|
self.cx_profile = self.local_realm.new_l3_cx_profile()
|
||||||
|
|
||||||
#Setting CX Name
|
# Setting CX Name
|
||||||
self.cx_profile.name_prefix_="Connection"
|
self.cx_profile.name_prefix_ = "Connection"
|
||||||
self.cx_names = []
|
self.cx_names = []
|
||||||
self.sta_list = []
|
self.sta_list = []
|
||||||
self.endp=[]
|
self.endp = []
|
||||||
for i in range(sta_id,sta_id+num_sta):
|
for i in range(sta_id, sta_id + num_sta):
|
||||||
self.sta_list.append("sta00")
|
self.sta_list.append("sta00")
|
||||||
|
|
||||||
#portDhcpUpRequest
|
# portDhcpUpRequest
|
||||||
'''
|
'''
|
||||||
upstream_dhcp = LFRequest.LFRequest("http://"+str(host)+":"+str(port)+"/"+"/cli-form/set_port")
|
upstream_dhcp = LFRequest.LFRequest("http://"+str(host)+":"+str(port)+"/"+"/cli-form/set_port")
|
||||||
upstream_dhcp.addPostData( LFUtils.portSetDhcpDownRequest(1, upstream))
|
upstream_dhcp.addPostData( LFUtils.portSetDhcpDownRequest(1, upstream))
|
||||||
@@ -147,42 +147,42 @@ class CreateSTA_CX(LFCliBase):
|
|||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
|
|
||||||
#Creating Stations of Given Profile Settings
|
# Creating Stations of Given Profile Settings
|
||||||
self.station_profile.use_security(self.security, self.ssid, passwd=self.password)
|
self.station_profile.use_security(self.security, self.ssid, passwd=self.password)
|
||||||
self.station_profile.create(self.radio, num_stations=self.num_sta, sta_names_=self.sta_list)
|
self.station_profile.create(self.radio, num_stations=self.num_sta, sta_names_=self.sta_list)
|
||||||
self.station_profile.admin_up()
|
self.station_profile.admin_up()
|
||||||
#Wait for a while
|
# Wait for a while
|
||||||
time.sleep(15)
|
time.sleep(15)
|
||||||
|
|
||||||
#Setting up the Parameters for CX
|
# Setting up the Parameters for CX
|
||||||
self.cx_profile.side_a_min_bps = self.min_bps
|
self.cx_profile.side_a_min_bps = self.min_bps
|
||||||
self.cx_profile.side_b_min_bps = self.min_bps
|
self.cx_profile.side_b_min_bps = self.min_bps
|
||||||
self.cx_profile.side_a_max_bps = self.max_bps
|
self.cx_profile.side_a_max_bps = self.max_bps
|
||||||
self.cx_profile.side_b_max_bps = self.max_bps
|
self.cx_profile.side_b_max_bps = self.max_bps
|
||||||
|
|
||||||
self.cx_profile.side_a_min_pdu = 'Auto'
|
self.cx_profile.side_a_min_pdu = 'Auto'
|
||||||
self.cx_profile.side_b_min_pdu = 'Auto'
|
self.cx_profile.side_b_min_pdu = 'Auto'
|
||||||
self.cx_profile.report_timer = 1000
|
self.cx_profile.report_timer = 1000
|
||||||
self.cx_profile.side_a_min_pkt='Same'
|
self.cx_profile.side_a_min_pkt = 'Same'
|
||||||
self.cx_profile.side_a_max_pkt='Same'
|
self.cx_profile.side_a_max_pkt = 'Same'
|
||||||
|
|
||||||
#Create Connections of Given Parameters
|
# Create Connections of Given Parameters
|
||||||
self.cx_profile.create(self.protocol, side_a="1.1."+self.upstream, side_b=list(self.local_realm.find_ports_like("sta0+")))
|
self.cx_profile.create(self.protocol, side_a="1.1." + self.upstream,
|
||||||
|
side_b=list(self.local_realm.find_ports_like("sta0+")))
|
||||||
time.sleep(15)
|
time.sleep(15)
|
||||||
|
|
||||||
# Getting all the Endpoint Names for measuring Throughput Later
|
# Getting all the Endpoint Names for measuring Throughput Later
|
||||||
for i in self.cx_profile.get_cx_names():
|
for i in self.cx_profile.get_cx_names():
|
||||||
self.cx_names.append(i)
|
self.cx_names.append(i)
|
||||||
for j in self.cx_names:
|
for j in self.cx_names:
|
||||||
x=self.local_realm.json_get("/cx/"+j)
|
x = self.local_realm.json_get("/cx/" + j)
|
||||||
self.endp.append(x.get(j).get('endpoints')[1])
|
self.endp.append(x.get(j).get('endpoints')[1])
|
||||||
#print(self.endp)
|
# print(self.endp)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
#self.station_profile.admin_up()
|
# self.station_profile.admin_up()
|
||||||
|
|
||||||
self.cx_profile.start_cx()
|
self.cx_profile.start_cx()
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
return 0
|
return 0
|
||||||
@@ -197,75 +197,71 @@ class CreateSTA_CX(LFCliBase):
|
|||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
# Removing Connections
|
# Removing Connections
|
||||||
self.local_realm.cleanup_cxe_prefix(self.cx_profile.name_prefix)
|
self.local_realm.cleanup_cxe_prefix(self.cx_profile.name_prefix)
|
||||||
|
|
||||||
vap = self.local_realm.find_ports_like("vap+")
|
vap = self.local_realm.find_ports_like("vap+")
|
||||||
bridges = self.local_realm.find_ports_like("br+")
|
bridges = self.local_realm.find_ports_like("br+")
|
||||||
station_map = self.local_realm.find_ports_like("sta+")
|
station_map = self.local_realm.find_ports_like("sta+")
|
||||||
#Removing Bridges
|
# Removing Bridges
|
||||||
for eid,record in bridges.items():
|
for eid, record in bridges.items():
|
||||||
self.local_realm.remove_vlan_by_eid(eid)
|
self.local_realm.remove_vlan_by_eid(eid)
|
||||||
time.sleep(0.03)
|
time.sleep(0.03)
|
||||||
#Removing VAP
|
# Removing VAP
|
||||||
for eid,record in vap.items():
|
for eid, record in vap.items():
|
||||||
self.local_realm.remove_vlan_by_eid(eid)
|
self.local_realm.remove_vlan_by_eid(eid)
|
||||||
time.sleep(0.03)
|
time.sleep(0.03)
|
||||||
|
|
||||||
#Removing stations
|
# Removing stations
|
||||||
for eid,record in station_map.items():
|
for eid, record in station_map.items():
|
||||||
self.local_realm.remove_vlan_by_eid(eid)
|
self.local_realm.remove_vlan_by_eid(eid)
|
||||||
time.sleep(0.03)
|
time.sleep(0.03)
|
||||||
del_sta_names = []
|
del_sta_names = []
|
||||||
try:
|
try:
|
||||||
for eid,value in station_map.items():
|
for eid, value in station_map.items():
|
||||||
tname = eid[eid.rfind('.'):]
|
tname = eid[eid.rfind('.'):]
|
||||||
del_sta_names.append(tname)
|
del_sta_names.append(tname)
|
||||||
except Exception as x:
|
except Exception as x:
|
||||||
self.local_realm.error(x)
|
self.local_realm.error(x)
|
||||||
try:
|
LFUtils.waitUntilPortsDisappear(base_url=self.local_realm.lfclient_url, port_list=del_sta_names, debug=True)
|
||||||
LFUtils.waitUntilPortsDisappear(base_url=self.local_realm.lfclient_url, port_list=del_sta_names, debug=True)
|
print("Ports Successfully Cleaned up")
|
||||||
print("Ports Successfully Cleaned up")
|
|
||||||
return 0
|
|
||||||
except:
|
|
||||||
print("Ports Successfully Cleaned up")
|
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
# Generates XLSX Report
|
# Generates XLSX Report
|
||||||
def GenerateReport(throughput_sta, throughput_vap, core1_temp, core2_temp, duration,name):
|
def GenerateReport(throughput_sta, throughput_vap, core1_temp, core2_temp, duration, name):
|
||||||
workbook = xlsxwriter.Workbook(name)
|
workbook = xlsxwriter.Workbook(name)
|
||||||
worksheet = workbook.add_worksheet()
|
worksheet = workbook.add_worksheet()
|
||||||
worksheet.write('A1', 'THROUGHPUT OVER TIME STA CX ')
|
worksheet.write('A1', 'THROUGHPUT OVER TIME STA CX ')
|
||||||
worksheet.write('B1', 'THROUGHPUT OVER TIME VAP ')
|
worksheet.write('B1', 'THROUGHPUT OVER TIME VAP ')
|
||||||
worksheet.write('C1', 'CORE 0 TEMP')
|
worksheet.write('C1', 'CORE 0 TEMP')
|
||||||
worksheet.write('D1', 'CORE 1 TEMP')
|
worksheet.write('D1', 'CORE 1 TEMP')
|
||||||
core1=[]
|
core1 = []
|
||||||
core2=[]
|
core2 = []
|
||||||
sta_throu=[]
|
sta_throu = []
|
||||||
vap_throu=[]
|
vap_throu = []
|
||||||
j=2
|
j = 2
|
||||||
for i in throughput_sta:
|
for i in throughput_sta:
|
||||||
sta_throu.append(i/1000000)
|
sta_throu.append(i / 1000000)
|
||||||
worksheet.write('A'+str(j), str(i/1000000)+" Mbps")
|
worksheet.write('A' + str(j), str(i / 1000000) + " Mbps")
|
||||||
j=j+1
|
j = j + 1
|
||||||
j=2
|
j = 2
|
||||||
for i in throughput_vap:
|
for i in throughput_vap:
|
||||||
vap_throu.append(i/1000000)
|
vap_throu.append(i / 1000000)
|
||||||
worksheet.write('B'+str(j), str(i/1000000)+" Mbps")
|
worksheet.write('B' + str(j), str(i / 1000000) + " Mbps")
|
||||||
j=j+1
|
j = j + 1
|
||||||
j=2
|
j = 2
|
||||||
for i in core1_temp:
|
for i in core1_temp:
|
||||||
core1.append(int(str(i).split(':')[1].split('(')[0].split('.')[0].split('+')[1]))
|
core1.append(int(str(i).split(':')[1].split('(')[0].split('.')[0].split('+')[1]))
|
||||||
worksheet.write('C'+str(j),str(i).split(':')[1].split('(')[0] )
|
worksheet.write('C' + str(j), str(i).split(':')[1].split('(')[0])
|
||||||
j=j+1
|
j = j + 1
|
||||||
j=2
|
j = 2
|
||||||
for i in core2_temp:
|
for i in core2_temp:
|
||||||
core2.append(int(str(i).split(':')[1].split('(')[0].split('.')[0].split('+')[1]))
|
core2.append(int(str(i).split(':')[1].split('(')[0].split('.')[0].split('+')[1]))
|
||||||
worksheet.write('D'+str(j), str(i).split(':')[1].split('(')[0])
|
worksheet.write('D' + str(j), str(i).split(':')[1].split('(')[0])
|
||||||
j=j+1
|
j = j + 1
|
||||||
|
|
||||||
Time =[]
|
Time = []
|
||||||
for i in range(0,int(duration)*5):
|
for i in range(0, int(duration) * 5):
|
||||||
Time.append(i)
|
Time.append(i)
|
||||||
plot(sta_throu, vap_throu, core1, core2, Time)
|
plot(sta_throu, vap_throu, core1, core2, Time)
|
||||||
workbook.close()
|
workbook.close()
|
||||||
@@ -273,169 +269,94 @@ def GenerateReport(throughput_sta, throughput_vap, core1_temp, core2_temp, durat
|
|||||||
|
|
||||||
# Plotting Function for Parameters
|
# Plotting Function for Parameters
|
||||||
def plot(throughput_sta, throughput_vap, core1_temp, core2_temp, Time):
|
def plot(throughput_sta, throughput_vap, core1_temp, core2_temp, Time):
|
||||||
|
|
||||||
|
|
||||||
s1 = figure()
|
s1 = figure()
|
||||||
s1.title.text = "WIFI Throughput vs Temperature Plot"
|
s1.title.text = "WIFI Throughput vs Temperature Plot"
|
||||||
s1.xaxis.axis_label = "Time in Seconds"
|
s1.xaxis.axis_label = "Time in Seconds"
|
||||||
s1.yaxis.axis_label = "Throughput in Mbps"
|
s1.yaxis.axis_label = "Throughput in Mbps"
|
||||||
|
|
||||||
s1.line( Time, throughput_sta, color='black')
|
s1.line(Time, throughput_sta, color='black')
|
||||||
#s1.circle(Time, throughput_sta, color='red')
|
# s1.circle(Time, throughput_sta, color='red')
|
||||||
|
|
||||||
|
s1.line(Time, throughput_vap, color='blue')
|
||||||
|
# s1.circle(Time, throughput_vap, color='blue')
|
||||||
|
|
||||||
s1.line( Time, throughput_vap, color='blue')
|
|
||||||
#s1.circle(Time, throughput_vap, color='blue')
|
|
||||||
|
|
||||||
s1.extra_y_ranges = {"Temperature": Range1d(start=0, end=150)}
|
s1.extra_y_ranges = {"Temperature": Range1d(start=0, end=150)}
|
||||||
s1.add_layout(LinearAxis(y_range_name="Temperature", axis_label="Temperature in Degree Celsius"), 'right')
|
s1.add_layout(LinearAxis(y_range_name="Temperature", axis_label="Temperature in Degree Celsius"), 'right')
|
||||||
|
|
||||||
s1.line(Time, core1_temp, y_range_name='Temperature', color='red')
|
s1.line(Time, core1_temp, y_range_name='Temperature', color='red')
|
||||||
#s1.circle(Time, core1_temp, y_range_name='Temperature', color='red')
|
# s1.circle(Time, core1_temp, y_range_name='Temperature', color='red')
|
||||||
|
|
||||||
s1.line(Time, core2_temp, y_range_name='Temperature', color='green')
|
s1.line(Time, core2_temp, y_range_name='Temperature', color='green')
|
||||||
#s1.circle(Time, core2_temp, y_range_name='Temperature', color='blue')
|
# s1.circle(Time, core2_temp, y_range_name='Temperature', color='blue')
|
||||||
|
|
||||||
show(s1)
|
show(s1)
|
||||||
|
|
||||||
|
|
||||||
# Creates the Instance for LFCliBase
|
# Creates the Instance for LFCliBase
|
||||||
class VAP_Measure(LFCliBase):
|
class VAP_Measure(LFCliBase):
|
||||||
def __init__(self, lfclient_host, lfclient_port):
|
def __init__(self, lfclient_host, lfclient_port):
|
||||||
super().__init__(lfclient_host, lfclient_port)
|
super().__init__(lfclient_host, lfclient_port)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# main method
|
# main method
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog='lf_dut_sta_vap_test.py',
|
prog='lf_dut_sta_vap_test.py',
|
||||||
formatter_class=argparse.RawTextHelpFormatter,
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
description="Test Scenario of DUT Temperature measurement along with simultaneous throughput on VAP as well as stations")
|
description="Test Scenario of DUT Temperature measurement along with simultaneous throughput on VAP as well as stations")
|
||||||
|
|
||||||
parser.add_argument("-m", "--manager", type=str, help="Enter the address of Lanforge Manager (By default localhost)")
|
parser.add_argument("-m", "--manager", type=str,
|
||||||
parser.add_argument("-sc", "--scenario", type=str, help="Enter the Name of the Scenario you want to load (by Default DFLT)")
|
help="Enter the address of Lanforge Manager (By default localhost)", default="localhost")
|
||||||
|
parser.add_argument("-sc", "--scenario", type=str,
|
||||||
|
help="Enter the Name of the Scenario you want to load (by Default DFLT)")
|
||||||
parser.add_argument("-r", "--radio", type=str, help="Enter the radio on which you want to create a station/s on ")
|
parser.add_argument("-r", "--radio", type=str, help="Enter the radio on which you want to create a station/s on ")
|
||||||
parser.add_argument("-n", "--num_sta", type=int, help="Enter the Number of Stations You want to create")
|
parser.add_argument("-n", "--num_sta", type=int, help="Enter the Number of Stations You want to create", default=0)
|
||||||
parser.add_argument("-i", "--sta_id", type=int, help="Enter Station id [for sta001, enter 1]")
|
parser.add_argument("-i", "--sta_id", type=int, help="Enter Station id [for sta001, enter 1]", default=0)
|
||||||
parser.add_argument("-ss", "--ssid", type=str, help="Enter the ssid, with which you want to associate your stations (Enter the SSID of DUT AP)")
|
parser.add_argument("-ss", "--ssid", type=str,
|
||||||
parser.add_argument("-up", "--upstream", type=str, help="Enter the upstream ethernet port")
|
help="Enter the ssid, with which you want to associate your stations (Enter the SSID of DUT AP)")
|
||||||
parser.add_argument("-sec", "--security", type=str, help="Enter the security type [open, wep, wpa, wpa2]")
|
parser.add_argument("-up", "--upstream", type=str, help="Enter the upstream ethernet port", default='br0000')
|
||||||
parser.add_argument("-p", "--password", type=str, help="Enter the password if security is not open")
|
parser.add_argument("-sec", "--security", type=str, help="Enter the security type [open, wep, wpa, wpa2]",
|
||||||
parser.add_argument("-pr", "--protocol", type=str, help="Enter the protocol on which you want to run your connections [lf_udp, lf_tcp]")
|
default='open')
|
||||||
parser.add_argument("-mn", "--min_mbps", type=str, help="Enter the Minimum Rate")
|
parser.add_argument("-p", "--password", type=str, help="Enter the password if security is not open",
|
||||||
parser.add_argument("-mx", "--max_mbps", type=str, help="Enter the Maximum Rate")
|
default='[Blank]')
|
||||||
parser.add_argument("-t", "--duration", type=int, help="Enter the Time for which you want to run test (In Minutes)")
|
parser.add_argument("-pr", "--protocol", type=str,
|
||||||
parser.add_argument("-o", "--report_name", type=str, help="Enter the Name of the Output file ('Report.xlsx')")
|
help="Enter the protocol on which you want to run your connections [lf_udp, lf_tcp]",
|
||||||
args = None
|
default='lf_udp')
|
||||||
|
parser.add_argument("-mn", "--min_mbps", type=int, help="Enter the Minimum Rate", default=1000)
|
||||||
try:
|
parser.add_argument("-mx", "--max_mbps", type=int, help="Enter the Maximum Rate")
|
||||||
args = parser.parse_args()
|
parser.add_argument("-t", "--duration", type=int, help="Enter the Time for which you want to run test (In Minutes)",
|
||||||
# Lanforge Manager IP Address
|
default=15)
|
||||||
if (args.manager is None):
|
parser.add_argument("-o", "--report_name", type=str, help="Enter the Name of the Output file ('Report.xlsx')",
|
||||||
manager = "localhost"
|
default='report.xlsx')
|
||||||
if (args.manager is not None):
|
|
||||||
manager = args.manager
|
|
||||||
|
|
||||||
# Scenario Name
|
|
||||||
if (args.scenario is not None):
|
|
||||||
scenario = args.scenario
|
|
||||||
# Radio Name
|
|
||||||
if (args.radio is not None):
|
|
||||||
radio = args.radio
|
|
||||||
|
|
||||||
# Number of Stations
|
|
||||||
if (args.num_sta is None):
|
|
||||||
num_sta = 0
|
|
||||||
if (args.num_sta is not None):
|
|
||||||
num_sta = args.num_sta
|
|
||||||
|
|
||||||
# Station ID
|
|
||||||
if (args.sta_id is None):
|
|
||||||
sta_id = '0'
|
|
||||||
if (args.sta_id is not None):
|
|
||||||
sta_id = args.sta_id
|
|
||||||
|
|
||||||
# SSID
|
|
||||||
if (args.ssid is not None):
|
|
||||||
ssid = args.ssid
|
|
||||||
if (args.ssid is not None):
|
|
||||||
ssid = args.ssid
|
|
||||||
|
|
||||||
# Security (Open by Default)
|
args = parser.parse_args()
|
||||||
if (args.security is None):
|
|
||||||
security = 'open'
|
|
||||||
if (args.security is not None):
|
|
||||||
security = args.security
|
|
||||||
|
|
||||||
# Password (if Security is not Open)
|
min_bps = args.min_mbps * 1000000
|
||||||
if (args.password is not None):
|
|
||||||
password = args.password
|
|
||||||
if (args.password == 'open'):
|
|
||||||
password = "[Blank]"
|
|
||||||
if (args.password is None):
|
|
||||||
password = "[Blank]"
|
|
||||||
|
|
||||||
# Upstream Port (By default br0000)
|
|
||||||
if (args.upstream is None):
|
|
||||||
upstream = 'br0000'
|
|
||||||
if (args.upstream is not None):
|
|
||||||
upstream = args.upstream
|
|
||||||
|
|
||||||
# Protocol (By Default lf_udp)
|
|
||||||
if (args.protocol is not None):
|
|
||||||
protocol = args.protocol
|
|
||||||
if (args.protocol is None):
|
|
||||||
protocol = 'lf_udp'
|
|
||||||
|
|
||||||
#Min BPS
|
|
||||||
if (args.min_mbps is not None):
|
|
||||||
min_bps = int(args.min_mbps)*1000000
|
|
||||||
if (args.min_mbps is None):
|
|
||||||
min_bps = int(1000)*1000000
|
|
||||||
if (args.max_mbps is None ):
|
|
||||||
max_bps = int(1000)*1000000
|
|
||||||
|
|
||||||
if (args.min_mbps is not None):
|
if args.max_mbps and args.max_mbps != "same":
|
||||||
min_bps = int(args.min_mbps)*1000000
|
max_bps = int(args.max_mbps) * 1000000
|
||||||
if (args.max_mbps is not None and args.max_mbps != "same"):
|
if args.max_mbps and args.max_mbps == "same":
|
||||||
max_bps = int(args.max_mbps)*1000000
|
max_bps = args.min_mbps
|
||||||
if (args.max_mbps is not None and args.max_mbps == "same"):
|
|
||||||
max_bps = args.min_mbps
|
|
||||||
if (args.duration is not None):
|
|
||||||
duration = (args.duration * 60)/5
|
|
||||||
if (args.report_name is not None):
|
|
||||||
report_name = args.report_name
|
|
||||||
if (args.duration is None):
|
|
||||||
duration = (1 * 60)/5
|
|
||||||
if (args.report_name is None):
|
|
||||||
report_name = "report.xlsx"
|
|
||||||
except Exception as e:
|
|
||||||
logging.exception(e)
|
|
||||||
|
|
||||||
exit(2)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Start DUT
|
# Start DUT
|
||||||
|
|
||||||
|
|
||||||
#Loading the Scenario on Lanforge_1 (Here Considered as DUT) [Created VAP With SSID 'lexusap' on wiphy0 with eth1 as backhaul]
|
# Loading the Scenario on Lanforge_1 (Here Considered as DUT) [Created VAP With SSID 'lexusap' on wiphy0 with eth1 as backhaul]
|
||||||
Scenario_1 = LoadScenario("192.168.200.18", 8080, "Lexus_DUT")
|
Scenario_1 = LoadScenario("192.168.200.18", 8080, "Lexus_DUT")
|
||||||
|
|
||||||
dut_traffic_profile = CreateSTA_CX("192.168.200.18", 8080, "wiphy1", 1, 0, 'lanforge_ap', 'open', password, 'br0000', 'lf_udp', min_bps, max_bps)
|
dut_traffic_profile = CreateSTA_CX("192.168.200.18", 8080, "wiphy1", 1, 0, 'lanforge_ap', 'open', args.password,
|
||||||
|
'br0000', 'lf_udp', min_bps, max_bps)
|
||||||
dut_traffic_profile.build()
|
dut_traffic_profile.build()
|
||||||
|
|
||||||
print("DUT All Set... Lets setup Lanforge")
|
print("DUT All Set... Lets setup Lanforge")
|
||||||
|
|
||||||
|
|
||||||
#Loading the Scenario on Lanforge_2 (Here Considered as LANFORGE Test) [Created VAP With SSID 'lanforge_ap' on wiphy0 with eth2 as backhaul]
|
# Loading the Scenario on Lanforge_2 (Here Considered as LANFORGE Test) [Created VAP With SSID 'lanforge_ap' on wiphy0 with eth2 as backhaul]
|
||||||
|
|
||||||
DB_Lanforge_2 = "LANforge_TEST"
|
DB_Lanforge_2 = "LANforge_TEST"
|
||||||
Scenario_2 = LoadScenario(manager, 8080, scenario)
|
Scenario_2 = LoadScenario(args.manager, 8080, args.scenario)
|
||||||
|
|
||||||
|
|
||||||
lf_traffic_profile = CreateSTA_CX(manager, 8080, radio, num_sta, sta_id, ssid, security, password, upstream, protocol, min_bps, max_bps)
|
lf_traffic_profile = CreateSTA_CX(args.manager, 8080, args.radio, args.num_sta, args.sta_id, args.ssid,
|
||||||
|
args.security, args.password, args.upstream, args.protocol, min_bps,
|
||||||
|
max_bps)
|
||||||
lf_traffic_profile.build()
|
lf_traffic_profile.build()
|
||||||
|
|
||||||
print("Lanforge System is All set... Lets start and Measure")
|
print("Lanforge System is All set... Lets start and Measure")
|
||||||
@@ -448,21 +369,21 @@ def main():
|
|||||||
print("Collecting Throughput Values...")
|
print("Collecting Throughput Values...")
|
||||||
|
|
||||||
# Object to Measure Throughput at VAP Side
|
# Object to Measure Throughput at VAP Side
|
||||||
vap_measure_obj = VAP_Measure(manager, 8080)
|
vap_measure_obj = VAP_Measure(args.manager, 8080)
|
||||||
|
|
||||||
#
|
#
|
||||||
dut_temp_obj = Login_DUT(1, "Thread-1", "192.168.200.18")
|
dut_temp_obj = Login_DUT(1, "Thread-1", "192.168.200.18")
|
||||||
|
|
||||||
#List for Storing the Total Throughput
|
# List for Storing the Total Throughput
|
||||||
throughput_sta =[]
|
throughput_sta = []
|
||||||
throughput_vap =[]
|
throughput_vap = []
|
||||||
|
|
||||||
# This loop will get the Data from All the endpoints and sum up to give total Throughput over time
|
# This loop will get the Data from All the endpoints and sum up to give total Throughput over time
|
||||||
for i in range(0,int(duration)):
|
for i in range(0, int(args.duration)):
|
||||||
temp=0
|
temp = 0
|
||||||
for j in lf_traffic_profile.endp:
|
for j in lf_traffic_profile.endp:
|
||||||
y=lf_traffic_profile.local_realm.json_get("/endp/"+j).get('endpoint').get('rx rate')
|
y = lf_traffic_profile.local_realm.json_get("/endp/" + j).get('endpoint').get('rx rate')
|
||||||
temp=temp+y
|
temp = temp + y
|
||||||
throughput_sta.append(temp)
|
throughput_sta.append(temp)
|
||||||
throughput_vap.append(vap_measure_obj.json_get("/port/1/1/vap0000/").get('interface').get('bps rx'))
|
throughput_vap.append(vap_measure_obj.json_get("/port/1/1/vap0000/").get('interface').get('bps rx'))
|
||||||
dut_temp_obj.run()
|
dut_temp_obj.run()
|
||||||
@@ -472,10 +393,9 @@ def main():
|
|||||||
print(throughput_sta)
|
print(throughput_sta)
|
||||||
dut_traffic_profile.cleanup()
|
dut_traffic_profile.cleanup()
|
||||||
lf_traffic_profile.cleanup()
|
lf_traffic_profile.cleanup()
|
||||||
GenerateReport(throughput_sta, throughput_vap, dut_temp_obj.data_core1, dut_temp_obj.data_core2, duration, report_name)
|
GenerateReport(throughput_sta, throughput_vap, dut_temp_obj.data_core1, dut_temp_obj.data_core2, args.duration,
|
||||||
|
args.report_name)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
""" lf_ftp.py will verify that N clients connected on specified band and can simultaneously download/upload some amount of file from FTP server and measuring the time taken by client to download/upload the file.
|
""" lf_ftp.py will verify that N clients connected on specified band and can simultaneously download/upload some amount of file from FTP server and measuring the time taken by client to download/upload the file.
|
||||||
cli- python3 lf_ftp.py --mgr localhost --mgr_port 8080 --upstream_port eth1 --ssid FTP --security open --passwd BLANK --ap_name WAC505 --ap_ip 192.168.213.90 --bands Both --directions Download --twog_radio wiphy1 --fiveg_radio wiphy0 --file_size 2MB --num_stations 40 --Both_duration 1 --traffic_duration 2 --ssh_port 22_
|
cli- ./lf_ftp.py --ssid <SSID> --passwd <PASSWORD> --file_sizes 2MB --fiveg_duration 4 --mgr 192.168.1.101 --traffic_duration 2 --security wpa2 --bands 5G --fiveg_radio wiphy1 --directions Download Upload
|
||||||
Copyright 2021 Candela Technologies Inc
|
Copyright 2021 Candela Technologies Inc
|
||||||
License: Free to distribute and modify. LANforge systems must be licensed.
|
License: Free to distribute and modify. LANforge systems must be licensed.
|
||||||
"""
|
"""
|
||||||
@@ -11,12 +12,13 @@ from datetime import datetime
|
|||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
import matplotlib.patches as mpatches
|
import matplotlib.patches as mpatches
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
if sys.version_info[0] != 3:
|
if sys.version_info[0] != 3:
|
||||||
print("This script requires Python 3")
|
print("This script requires Python 3")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
LFUtils = importlib.import_module("py-json.LANforge.LFUtils")
|
||||||
@@ -26,13 +28,14 @@ realm = importlib.import_module("py-json.realm")
|
|||||||
Realm = realm.Realm
|
Realm = realm.Realm
|
||||||
lf_report = importlib.import_module("py-scripts.lf_report")
|
lf_report = importlib.import_module("py-scripts.lf_report")
|
||||||
lf_graph = importlib.import_module("py-scripts.lf_graph")
|
lf_graph = importlib.import_module("py-scripts.lf_graph")
|
||||||
|
lf_kpi_csv = importlib.import_module("py-scripts.lf_kpi_csv")
|
||||||
|
|
||||||
|
|
||||||
class FtpTest(LFCliBase):
|
class FtpTest(LFCliBase):
|
||||||
def __init__(self, lfclient_host="localhost", lfclient_port=8080, sta_prefix="sta", start_id=0, num_sta=None,
|
def __init__(self, lfclient_host="localhost", lfclient_port=8080, sta_prefix="sta", start_id=0, num_sta=None,
|
||||||
dut_ssid=None, dut_security=None, dut_passwd=None, file_size=None, band=None, twog_radio=None,
|
dut_ssid=None, dut_security=None, dut_passwd=None, file_size=None, band=None, twog_radio=None,
|
||||||
fiveg_radio=None, upstream="eth1", _debug_on=False, _exit_on_error=False, _exit_on_fail=False,
|
fiveg_radio=None, upstream="eth1", _debug_on=False, _exit_on_error=False, _exit_on_fail=False,
|
||||||
direction=None, duration=None, traffic_duration=None, ssh_port=None):
|
direction=None, duration=None, traffic_duration=None, ssh_port=None, kpi_csv=None):
|
||||||
super().__init__(lfclient_host, lfclient_port, _debug=_debug_on, _exit_on_fail=_exit_on_fail)
|
super().__init__(lfclient_host, lfclient_port, _debug=_debug_on, _exit_on_fail=_exit_on_fail)
|
||||||
print("Test is about to start")
|
print("Test is about to start")
|
||||||
self.host = lfclient_host
|
self.host = lfclient_host
|
||||||
@@ -47,6 +50,7 @@ class FtpTest(LFCliBase):
|
|||||||
self.password = dut_passwd
|
self.password = dut_passwd
|
||||||
self.requests_per_ten = 1
|
self.requests_per_ten = 1
|
||||||
self.band = band
|
self.band = band
|
||||||
|
self.kpi_csv = kpi_csv
|
||||||
self.file_size = file_size
|
self.file_size = file_size
|
||||||
self.direction = direction
|
self.direction = direction
|
||||||
self.twog_radio = twog_radio
|
self.twog_radio = twog_radio
|
||||||
@@ -301,7 +305,7 @@ class FtpTest(LFCliBase):
|
|||||||
for i in range(self.num_sta):
|
for i in range(self.num_sta):
|
||||||
list_of_time.append(0)
|
list_of_time.append(0)
|
||||||
#running layer 4 traffic upto user given time
|
#running layer 4 traffic upto user given time
|
||||||
while str(datetime.datetime.now() - time1) <= self.traffic_duration:
|
while str(datetime.now() - time1) <= self.traffic_duration:
|
||||||
if list_of_time.count(0) == 0:
|
if list_of_time.count(0) == 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -309,11 +313,11 @@ class FtpTest(LFCliBase):
|
|||||||
|
|
||||||
# run script upto given time
|
# run script upto given time
|
||||||
if counter == 0:
|
if counter == 0:
|
||||||
if str(datetime.datetime.now() - time1) >= self.duration:
|
if str(datetime.now() - time1) >= self.duration:
|
||||||
counter = counter + 1
|
counter = counter + 1
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
if str(datetime.datetime.now() - time1) >= self.traffic_duration:
|
if str(datetime.now() - time1) >= self.traffic_duration:
|
||||||
break
|
break
|
||||||
|
|
||||||
for i in range(self.num_sta):
|
for i in range(self.num_sta):
|
||||||
@@ -321,9 +325,9 @@ class FtpTest(LFCliBase):
|
|||||||
|
|
||||||
# reading uc-avg data in json format
|
# reading uc-avg data in json format
|
||||||
uc_avg = self.json_get("layer4/list?fields=uc-avg")
|
uc_avg = self.json_get("layer4/list?fields=uc-avg")
|
||||||
if data['endpoint'][i][data2[i]]['bytes-rd'] <= self.file_size_bytes:
|
if int(data['endpoint'][i][data2[i]]['bytes-rd']) <= self.file_size_bytes:
|
||||||
data = self.json_get("layer4/list?fields=bytes-rd")
|
data = self.json_get("layer4/list?fields=bytes-rd")
|
||||||
if data['endpoint'][i][data2[i]]['bytes-rd'] >= self.file_size_bytes:
|
if int(data['endpoint'][i][data2[i]]['bytes-rd']) >= self.file_size_bytes:
|
||||||
list1.append(i)
|
list1.append(i)
|
||||||
if list1.count(i) == 1:
|
if list1.count(i) == 1:
|
||||||
list2.append(i)
|
list2.append(i)
|
||||||
@@ -614,7 +618,7 @@ class FtpTest(LFCliBase):
|
|||||||
def bar_graph(self, x_axis, image_name, dataset, color, labels, x_axis_name, y_axis_name,handles, ncol, box, fontsize):
|
def bar_graph(self, x_axis, image_name, dataset, color, labels, x_axis_name, y_axis_name,handles, ncol, box, fontsize):
|
||||||
'''This Method will plot bar graph'''
|
'''This Method will plot bar graph'''
|
||||||
|
|
||||||
graph = lf_bar_graph(_data_set=dataset,
|
graph = lf_graph.lf_bar_graph(_data_set=dataset,
|
||||||
_xaxis_name=x_axis_name,
|
_xaxis_name=x_axis_name,
|
||||||
_yaxis_name=y_axis_name,
|
_yaxis_name=y_axis_name,
|
||||||
_xaxis_categories=x_axis,
|
_xaxis_categories=x_axis,
|
||||||
@@ -660,7 +664,7 @@ class FtpTest(LFCliBase):
|
|||||||
def generate_report(self, ftp_data, date,test_setup_info, input_setup_info):
|
def generate_report(self, ftp_data, date,test_setup_info, input_setup_info):
|
||||||
'''Method for generate the report'''
|
'''Method for generate the report'''
|
||||||
|
|
||||||
self.report = lf_report(_results_dir_name="ftp_test", _output_html="ftp_test.html", _output_pdf="ftp_test.pdf")
|
self.report = lf_report.lf_report(_results_dir_name="ftp_test", _output_html="ftp_test.html", _output_pdf="ftp_test.pdf")
|
||||||
self.report.set_title("FTP Test")
|
self.report.set_title("FTP Test")
|
||||||
self.report.set_date(date)
|
self.report.set_date(date)
|
||||||
self.report.build_banner()
|
self.report.build_banner()
|
||||||
@@ -703,7 +707,14 @@ def main():
|
|||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog='lf_ftp.py',
|
prog='lf_ftp.py',
|
||||||
formatter_class=argparse.RawTextHelpFormatter,
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
description="FTP Test Script")
|
description='''\
|
||||||
|
---------------------------
|
||||||
|
FTP Test Script - lf_ftp.py
|
||||||
|
---------------------------
|
||||||
|
CLI Example:
|
||||||
|
./lf_ftp.py --ssid <SSID> --passwd <PASSWORD> --file_sizes 2MB --fiveg_duration 4 --mgr 192.168.1.101 --traffic_duration 2 --security wpa2 --bands 5G --fiveg_radio wiphy1 --directions Download Upload
|
||||||
|
---------------------------
|
||||||
|
''')
|
||||||
parser.add_argument('--mgr', help='hostname for where LANforge GUI is running', default='localhost')
|
parser.add_argument('--mgr', help='hostname for where LANforge GUI is running', default='localhost')
|
||||||
parser.add_argument('--mgr_port', help='port LANforge GUI HTTP service is running on', default=8080)
|
parser.add_argument('--mgr_port', help='port LANforge GUI HTTP service is running on', default=8080)
|
||||||
parser.add_argument('--upstream_port', help='non-station port that generates traffic: eg: eth1', default='eth1')
|
parser.add_argument('--upstream_port', help='non-station port that generates traffic: eg: eth1', default='eth1')
|
||||||
@@ -716,7 +727,7 @@ def main():
|
|||||||
parser.add_argument('--fiveg_radio', type=str, help='specify radio for 5G client', default='wiphy0')
|
parser.add_argument('--fiveg_radio', type=str, help='specify radio for 5G client', default='wiphy0')
|
||||||
parser.add_argument('--twog_duration', nargs="+", help='Pass and Fail duration for 2.4G band in minutes')
|
parser.add_argument('--twog_duration', nargs="+", help='Pass and Fail duration for 2.4G band in minutes')
|
||||||
parser.add_argument('--fiveg_duration', nargs="+", help='Pass and Fail duration for 5G band in minutes')
|
parser.add_argument('--fiveg_duration', nargs="+", help='Pass and Fail duration for 5G band in minutes')
|
||||||
parser.add_argument('--Both_duration', nargs="+", help='Pass and Fail duration for Both band in minutes')
|
parser.add_argument('--both_duration', nargs="+", help='Pass and Fail duration for Both band in minutes')
|
||||||
parser.add_argument('--traffic_duration', type=int, help='duration for layer 4 traffic running')
|
parser.add_argument('--traffic_duration', type=int, help='duration for layer 4 traffic running')
|
||||||
parser.add_argument('--ssh_port', type=int, help="specify the shh port eg 22", default=22)
|
parser.add_argument('--ssh_port', type=int, help="specify the shh port eg 22", default=22)
|
||||||
|
|
||||||
@@ -732,10 +743,10 @@ def main():
|
|||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
# 1st time stamp for test duration
|
# 1st time stamp for test duration
|
||||||
time_stamp1 = datetime.datetime.now()
|
time_stamp1 = datetime.now()
|
||||||
|
|
||||||
# use for creating ftp_test dictionary
|
# use for creating ftp_test dictionary
|
||||||
iteraration_num = 0
|
interation_num = 0
|
||||||
|
|
||||||
# empty dictionary for whole test data
|
# empty dictionary for whole test data
|
||||||
ftp_data = {}
|
ftp_data = {}
|
||||||
@@ -759,12 +770,12 @@ def main():
|
|||||||
index = list(args.file_sizes).index(size)
|
index = list(args.file_sizes).index(size)
|
||||||
duration = args.fiveg_duration[index]
|
duration = args.fiveg_duration[index]
|
||||||
else:
|
else:
|
||||||
if len(args.file_sizes) is not len(args.Both_duration):
|
if len(args.file_sizes) is not len(args.both_duration):
|
||||||
raise Exception("Give proper Pass or Fail duration for 5G band")
|
raise Exception("Give proper Pass or Fail duration for 5G band")
|
||||||
for size in args.file_sizes:
|
for size in args.file_sizes:
|
||||||
if size == file_size:
|
if size == file_size:
|
||||||
index = list(args.file_sizes).index(size)
|
index = list(args.file_sizes).index(size)
|
||||||
duration = args.Both_duration[index]
|
duration = args.both_duration[index]
|
||||||
if duration.isdigit():
|
if duration.isdigit():
|
||||||
duration = int(duration)
|
duration = int(duration)
|
||||||
else:
|
else:
|
||||||
@@ -794,7 +805,7 @@ def main():
|
|||||||
ssh_port=args.ssh_port
|
ssh_port=args.ssh_port
|
||||||
)
|
)
|
||||||
|
|
||||||
iteraration_num = iteraration_num + 1
|
interation_num = interation_num + 1
|
||||||
obj.file_create()
|
obj.file_create()
|
||||||
obj.set_values()
|
obj.set_values()
|
||||||
obj.precleanup()
|
obj.precleanup()
|
||||||
@@ -804,7 +815,7 @@ def main():
|
|||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
# First time stamp
|
# First time stamp
|
||||||
time1 = datetime.datetime.now()
|
time1 = datetime.now()
|
||||||
|
|
||||||
obj.start(False, False)
|
obj.start(False, False)
|
||||||
|
|
||||||
@@ -815,19 +826,19 @@ def main():
|
|||||||
pass_fail = obj.pass_fail_check(time_list)
|
pass_fail = obj.pass_fail_check(time_list)
|
||||||
|
|
||||||
# dictionary of whole data
|
# dictionary of whole data
|
||||||
ftp_data[iteraration_num] = obj.ftp_test_data(time_list, pass_fail, args.bands, args.file_sizes,
|
ftp_data[interation_num] = obj.ftp_test_data(time_list, pass_fail, args.bands, args.file_sizes,
|
||||||
args.directions, args.num_stations)
|
args.directions, args.num_stations)
|
||||||
|
|
||||||
obj.stop()
|
obj.stop()
|
||||||
obj.postcleanup()
|
obj.postcleanup()
|
||||||
|
|
||||||
# 2nd time stamp for test duration
|
# 2nd time stamp for test duration
|
||||||
time_stamp2 = datetime.datetime.now()
|
time_stamp2 = datetime.now()
|
||||||
|
|
||||||
# total time for test duration
|
# total time for test duration
|
||||||
test_duration = str(time_stamp2 - time_stamp1)[:-7]
|
test_duration = str(time_stamp2 - time_stamp1)[:-7]
|
||||||
|
|
||||||
date = str(datetime.datetime.now()).split(",")[0].replace(" ", "-").split(".")[0]
|
date = str(datetime.now()).split(",")[0].replace(" ", "-").split(".")[0]
|
||||||
|
|
||||||
#print(ftp_data)
|
#print(ftp_data)
|
||||||
|
|
||||||
|
|||||||
@@ -24,39 +24,42 @@ import matplotlib.pyplot as plt
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import pdfkit
|
import pdfkit
|
||||||
from matplotlib.colors import ListedColormap
|
from matplotlib.colors import ListedColormap
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lf_csv = importlib.import_module("py-scripts.lf_csv")
|
lf_csv = importlib.import_module("py-scripts.lf_csv")
|
||||||
lf_csv = lf_csv.lf_csv
|
lf_csv = lf_csv.lf_csv
|
||||||
|
|
||||||
# internal candela references included during intial phases, to be deleted at future date
|
# internal candela references included during intial phases, to be deleted
|
||||||
|
# at future date
|
||||||
|
|
||||||
# graph reporting classes
|
# graph reporting classes
|
||||||
class lf_bar_graph():
|
|
||||||
def __init__(self, _data_set=[[30.4, 55.3, 69.2, 37.1], [45.1, 67.2, 34.3, 22.4], [22.5, 45.6, 12.7, 34.8]],
|
|
||||||
|
class lf_bar_graph:
|
||||||
|
def __init__(self, _data_set=None,
|
||||||
_xaxis_name="x-axis",
|
_xaxis_name="x-axis",
|
||||||
_yaxis_name="y-axis",
|
_yaxis_name="y-axis",
|
||||||
_xaxis_categories=[1, 2, 3, 4],
|
_xaxis_categories=None,
|
||||||
_xaxis_label=["a", "b", "c", "d"],
|
_xaxis_label=None,
|
||||||
_graph_title="",
|
_graph_title="",
|
||||||
_title_size=16,
|
_title_size=16,
|
||||||
_graph_image_name="image_name",
|
_graph_image_name="image_name",
|
||||||
_label=["bi-downlink", "bi-uplink", 'uplink'],
|
_label=None,
|
||||||
_color=None,
|
_color=None,
|
||||||
_bar_width=0.25,
|
_bar_width=0.25,
|
||||||
_color_edge='grey',
|
_color_edge='grey',
|
||||||
_font_weight='bold',
|
_font_weight='bold',
|
||||||
_color_name=['lightcoral', 'darkgrey', 'r', 'g', 'b', 'y'],
|
_color_name=None,
|
||||||
_figsize=(10, 5),
|
_figsize=(10, 5),
|
||||||
_show_bar_value=False,
|
_show_bar_value=False,
|
||||||
_xaxis_step=1,
|
_xaxis_step=1,
|
||||||
_xticks_font = None,
|
_xticks_font=None,
|
||||||
_xaxis_value_location = 0,
|
_xaxis_value_location=0,
|
||||||
_text_font=None,
|
_text_font=None,
|
||||||
_text_rotation=None,
|
_text_rotation=None,
|
||||||
_grp_title = "",
|
_grp_title="",
|
||||||
_legend_handles=None,
|
_legend_handles=None,
|
||||||
_legend_loc="best",
|
_legend_loc="best",
|
||||||
_legend_box=None,
|
_legend_box=None,
|
||||||
@@ -65,6 +68,16 @@ class lf_bar_graph():
|
|||||||
_dpi=96,
|
_dpi=96,
|
||||||
_enable_csv=False):
|
_enable_csv=False):
|
||||||
|
|
||||||
|
if _data_set is None:
|
||||||
|
_data_set = [[30.4, 55.3, 69.2, 37.1], [45.1, 67.2, 34.3, 22.4], [22.5, 45.6, 12.7, 34.8]]
|
||||||
|
if _xaxis_categories is None:
|
||||||
|
_xaxis_categories = [1, 2, 3, 4]
|
||||||
|
if _xaxis_label is None:
|
||||||
|
_xaxis_label = ["a", "b", "c", "d"]
|
||||||
|
if _label is None:
|
||||||
|
_label = ["bi-downlink", "bi-uplink", 'uplink']
|
||||||
|
if _color_name is None:
|
||||||
|
_color_name = ['lightcoral', 'darkgrey', 'r', 'g', 'b', 'y']
|
||||||
self.data_set = _data_set
|
self.data_set = _data_set
|
||||||
self.xaxis_name = _xaxis_name
|
self.xaxis_name = _xaxis_name
|
||||||
self.yaxis_name = _yaxis_name
|
self.yaxis_name = _yaxis_name
|
||||||
@@ -99,20 +112,20 @@ class lf_bar_graph():
|
|||||||
if self.color is None:
|
if self.color is None:
|
||||||
i = 0
|
i = 0
|
||||||
self.color = []
|
self.color = []
|
||||||
for col in self.data_set:
|
for _ in self.data_set:
|
||||||
self.color.append(self.color_name[i])
|
self.color.append(self.color_name[i])
|
||||||
i = i + 1
|
i = i + 1
|
||||||
|
|
||||||
fig = plt.subplots(figsize=self.figsize)
|
plt.subplots(figsize=self.figsize)
|
||||||
i = 0
|
i = 0
|
||||||
|
|
||||||
def show_value(rects):
|
def show_value(rectangles):
|
||||||
for rect in rects:
|
for rect in rectangles:
|
||||||
h = rect.get_height()
|
h = rect.get_height()
|
||||||
plt.text(rect.get_x() + rect.get_width() / 2., h, h,
|
plt.text(rect.get_x() + rect.get_width() / 2., h, h,
|
||||||
ha='center', va='bottom', rotation=self.text_rotation, fontsize=self.text_font)
|
ha='center', va='bottom', rotation=self.text_rotation, fontsize=self.text_font)
|
||||||
|
|
||||||
for data in self.data_set:
|
for _ in self.data_set:
|
||||||
if i > 0:
|
if i > 0:
|
||||||
br = br1
|
br = br1
|
||||||
br2 = [x + self.bar_width for x in br]
|
br2 = [x + self.bar_width for x in br]
|
||||||
@@ -132,14 +145,22 @@ class lf_bar_graph():
|
|||||||
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
|
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
|
||||||
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
|
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
|
||||||
if self.xaxis_categories[0] == 0:
|
if self.xaxis_categories[0] == 0:
|
||||||
plt.xticks(np.arange(0, len(self.xaxis_categories), step=self.xaxis_step),fontsize = self.xticks_font)
|
plt.xticks(np.arange(0,
|
||||||
|
len(self.xaxis_categories),
|
||||||
|
step=self.xaxis_step),
|
||||||
|
fontsize=self.xticks_font)
|
||||||
else:
|
else:
|
||||||
plt.xticks([i + self._xaxis_value_location for i in np.arange(0, len(self.data_set[0]), step=self.xaxis_step)],
|
plt.xticks([i + self._xaxis_value_location for i in np.arange(0, len(self.data_set[0]), step=self.xaxis_step)],
|
||||||
self.xaxis_categories, fontsize=self.xticks_font)
|
self.xaxis_categories, fontsize=self.xticks_font)
|
||||||
plt.legend(handles=self.legend_handles, loc=self.legend_loc, bbox_to_anchor=self.legend_box, ncol=self.legend_ncol, fontsize=self.legend_fontsize)
|
plt.legend(
|
||||||
|
handles=self.legend_handles,
|
||||||
|
loc=self.legend_loc,
|
||||||
|
bbox_to_anchor=self.legend_box,
|
||||||
|
ncol=self.legend_ncol,
|
||||||
|
fontsize=self.legend_fontsize)
|
||||||
plt.suptitle(self.title, fontsize=self.title_size)
|
plt.suptitle(self.title, fontsize=self.title_size)
|
||||||
plt.title(self.grp_title)
|
plt.title(self.grp_title)
|
||||||
fig = plt.gcf()
|
plt.gcf()
|
||||||
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
|
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
|
||||||
plt.close()
|
plt.close()
|
||||||
print("{}.png".format(self.graph_image_name))
|
print("{}.png".format(self.graph_image_name))
|
||||||
@@ -155,25 +176,32 @@ class lf_bar_graph():
|
|||||||
self.lf_csv.filename = f"{self.graph_image_name}.csv"
|
self.lf_csv.filename = f"{self.graph_image_name}.csv"
|
||||||
self.lf_csv.generate_csv()
|
self.lf_csv.generate_csv()
|
||||||
else:
|
else:
|
||||||
raise ValueError("Length and x-axis values and y-axis values should be same.")
|
raise ValueError(
|
||||||
|
"Length and x-axis values and y-axis values should be same.")
|
||||||
else:
|
else:
|
||||||
print("No Dataset Found")
|
print("No Dataset Found")
|
||||||
print("{}.csv".format(self.graph_image_name))
|
print("{}.csv".format(self.graph_image_name))
|
||||||
return "%s.png" % self.graph_image_name
|
return "%s.png" % self.graph_image_name
|
||||||
|
|
||||||
|
|
||||||
class lf_scatter_graph():
|
class lf_scatter_graph:
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
_x_data_set=["sta0 ", "sta1", "sta2", "sta3"],
|
_x_data_set=None,
|
||||||
_y_data_set=[[30, 55, 69, 37]],
|
_y_data_set=None,
|
||||||
_values=None,
|
_values=None,
|
||||||
_xaxis_name="x-axis",
|
_xaxis_name="x-axis",
|
||||||
_yaxis_name="y-axis",
|
_yaxis_name="y-axis",
|
||||||
_label=["num1", "num2"],
|
_label=None,
|
||||||
_graph_image_name="image_name1",
|
_graph_image_name="image_name1",
|
||||||
_color=["r", "y"],
|
_color=None,
|
||||||
_figsize=(9, 4),
|
_figsize=(9, 4),
|
||||||
_enable_csv=True):
|
_enable_csv=True):
|
||||||
|
if _x_data_set is None:
|
||||||
|
_x_data_set = ["sta0 ", "sta1", "sta2", "sta3"]
|
||||||
|
if _y_data_set is None:
|
||||||
|
_y_data_set = [[30, 55, 69, 37]]
|
||||||
|
if _label is None:
|
||||||
|
_label = ["num1", "num2"]
|
||||||
self.x_data_set = _x_data_set
|
self.x_data_set = _x_data_set
|
||||||
self.y_data_set = _y_data_set
|
self.y_data_set = _y_data_set
|
||||||
self.xaxis_name = _xaxis_name
|
self.xaxis_name = _xaxis_name
|
||||||
@@ -188,20 +216,38 @@ class lf_scatter_graph():
|
|||||||
|
|
||||||
def build_scatter_graph(self):
|
def build_scatter_graph(self):
|
||||||
if self.color is None:
|
if self.color is None:
|
||||||
self.color = ["orchid", "lime", "aquamarine", "royalblue", "darkgray", "maroon"]
|
self.color = [
|
||||||
fig = plt.subplots(figsize=self.figsize)
|
"orchid",
|
||||||
|
"lime",
|
||||||
|
"aquamarine",
|
||||||
|
"royalblue",
|
||||||
|
"darkgray",
|
||||||
|
"maroon"]
|
||||||
|
plt.subplots(figsize=self.figsize)
|
||||||
if self.values is None:
|
if self.values is None:
|
||||||
plt.scatter(self.x_data_set, self.y_data_set[0], color=self.color[0], label=self.label[0])
|
plt.scatter(
|
||||||
|
self.x_data_set,
|
||||||
|
self.y_data_set[0],
|
||||||
|
color=self.color[0],
|
||||||
|
label=self.label[0])
|
||||||
if len(self.y_data_set) > 1:
|
if len(self.y_data_set) > 1:
|
||||||
for i in range(1, len(self.y_data_set)):
|
for i in range(1, len(self.y_data_set)):
|
||||||
plt.scatter(self.x_data_set, self.y_data_set[i], color=self.color[i], label=self.label[i])
|
plt.scatter(
|
||||||
|
self.x_data_set,
|
||||||
|
self.y_data_set[i],
|
||||||
|
color=self.color[i],
|
||||||
|
label=self.label[i])
|
||||||
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
|
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
|
||||||
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
|
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
|
||||||
plt.gcf().autofmt_xdate()
|
plt.gcf().autofmt_xdate()
|
||||||
plt.legend()
|
plt.legend()
|
||||||
else:
|
else:
|
||||||
colours = ListedColormap(self.color)
|
colours = ListedColormap(self.color)
|
||||||
scatter = plt.scatter(self.x_data_set, self.y_data_set, c=self.values, cmap=colours)
|
scatter = plt.scatter(
|
||||||
|
self.x_data_set,
|
||||||
|
self.y_data_set,
|
||||||
|
c=self.values,
|
||||||
|
cmap=colours)
|
||||||
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
|
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
|
||||||
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
|
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
|
||||||
plt.gcf().autofmt_xdate()
|
plt.gcf().autofmt_xdate()
|
||||||
@@ -218,16 +264,20 @@ class lf_scatter_graph():
|
|||||||
return "%s.png" % self.graph_image_name
|
return "%s.png" % self.graph_image_name
|
||||||
|
|
||||||
|
|
||||||
class lf_stacked_graph():
|
class lf_stacked_graph:
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
_data_set=[[1, 2, 3, 4], [1, 1, 1, 1], [1, 1, 1, 1]],
|
_data_set=None,
|
||||||
_xaxis_name="Stations",
|
_xaxis_name="Stations",
|
||||||
_yaxis_name="Numbers",
|
_yaxis_name="Numbers",
|
||||||
_label=['Success', 'Fail'],
|
_label=None,
|
||||||
_graph_image_name="image_name2",
|
_graph_image_name="image_name2",
|
||||||
_color=["b", "g"],
|
_color=None,
|
||||||
_figsize=(9, 4),
|
_figsize=(9, 4),
|
||||||
_enable_csv=True):
|
_enable_csv=True):
|
||||||
|
if _data_set is None:
|
||||||
|
_data_set = [[1, 2, 3, 4], [1, 1, 1, 1], [1, 1, 1, 1]]
|
||||||
|
if _label is None:
|
||||||
|
_label = ['Success', 'Fail']
|
||||||
self.data_set = _data_set # [x_axis,y1_axis,y2_axis]
|
self.data_set = _data_set # [x_axis,y1_axis,y2_axis]
|
||||||
self.xaxis_name = _xaxis_name
|
self.xaxis_name = _xaxis_name
|
||||||
self.yaxis_name = _yaxis_name
|
self.yaxis_name = _yaxis_name
|
||||||
@@ -239,11 +289,21 @@ class lf_stacked_graph():
|
|||||||
self.lf_csv = lf_csv()
|
self.lf_csv = lf_csv()
|
||||||
|
|
||||||
def build_stacked_graph(self):
|
def build_stacked_graph(self):
|
||||||
fig = plt.subplots(figsize=self.figsize)
|
plt.subplots(figsize=self.figsize)
|
||||||
if self.color is None:
|
if self.color is None:
|
||||||
self.color = ["darkred", "tomato", "springgreen", "skyblue", "indigo", "plum"]
|
self.color = [
|
||||||
|
"darkred",
|
||||||
|
"tomato",
|
||||||
|
"springgreen",
|
||||||
|
"skyblue",
|
||||||
|
"indigo",
|
||||||
|
"plum"]
|
||||||
plt.bar(self.data_set[0], self.data_set[1], color=self.color[0])
|
plt.bar(self.data_set[0], self.data_set[1], color=self.color[0])
|
||||||
plt.bar(self.data_set[0], self.data_set[2], bottom=self.data_set[1], color=self.color[1])
|
plt.bar(
|
||||||
|
self.data_set[0],
|
||||||
|
self.data_set[2],
|
||||||
|
bottom=self.data_set[1],
|
||||||
|
color=self.color[1])
|
||||||
if len(self.data_set) > 3:
|
if len(self.data_set) > 3:
|
||||||
for i in range(3, len(self.data_set)):
|
for i in range(3, len(self.data_set)):
|
||||||
plt.bar(self.data_set[0], self.data_set[i],
|
plt.bar(self.data_set[0], self.data_set[i],
|
||||||
@@ -251,7 +311,7 @@ class lf_stacked_graph():
|
|||||||
plt.xlabel(self.xaxis_name)
|
plt.xlabel(self.xaxis_name)
|
||||||
plt.ylabel(self.yaxis_name)
|
plt.ylabel(self.yaxis_name)
|
||||||
plt.legend(self.label)
|
plt.legend(self.label)
|
||||||
plt.savefig("%s.png" % (self.graph_image_name), dpi=96)
|
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
|
||||||
plt.close()
|
plt.close()
|
||||||
print("{}.png".format(self.graph_image_name))
|
print("{}.png".format(self.graph_image_name))
|
||||||
if self.enable_csv:
|
if self.enable_csv:
|
||||||
@@ -259,23 +319,31 @@ class lf_stacked_graph():
|
|||||||
self.lf_csv.rows = self.data_set
|
self.lf_csv.rows = self.data_set
|
||||||
self.lf_csv.filename = f"{self.graph_image_name}.csv"
|
self.lf_csv.filename = f"{self.graph_image_name}.csv"
|
||||||
self.lf_csv.generate_csv()
|
self.lf_csv.generate_csv()
|
||||||
return "%s.png" % (self.graph_image_name)
|
return "%s.png" % self.graph_image_name
|
||||||
|
|
||||||
|
|
||||||
class lf_horizontal_stacked_graph():
|
class lf_horizontal_stacked_graph:
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
_seg=2,
|
_seg=2,
|
||||||
_yaxis_set=('A', 'B'),
|
_yaxis_set=('A', 'B'),
|
||||||
_xaxis_set1=[12, 0, 0, 16, 15],
|
_xaxis_set1=None,
|
||||||
_xaxis_set2=[23, 34, 23, 0],
|
_xaxis_set2=None,
|
||||||
_unit="%",
|
_unit="%",
|
||||||
_xaxis_name="Stations",
|
_xaxis_name="Stations",
|
||||||
_label=['Success', 'Fail'],
|
_label=None,
|
||||||
_graph_image_name="image_name3",
|
_graph_image_name="image_name3",
|
||||||
_color=["success", "Fail"],
|
_color=None,
|
||||||
_figsize=(9, 4),
|
_figsize=(9, 4),
|
||||||
_disable_xaxis=False,
|
_disable_xaxis=False,
|
||||||
_enable_csv=True):
|
_enable_csv=True):
|
||||||
|
if _xaxis_set1 is None:
|
||||||
|
_xaxis_set1 = [12, 0, 0, 16, 15]
|
||||||
|
if _xaxis_set2 is None:
|
||||||
|
_xaxis_set2 = [23, 34, 23, 0]
|
||||||
|
if _label is None:
|
||||||
|
_label = ['Success', 'Fail']
|
||||||
|
if _color is None:
|
||||||
|
_color = ["success", "Fail"]
|
||||||
self.unit = _unit
|
self.unit = _unit
|
||||||
self.seg = _seg
|
self.seg = _seg
|
||||||
self.xaxis_set1 = _xaxis_set1
|
self.xaxis_set1 = _xaxis_set1
|
||||||
@@ -303,8 +371,19 @@ class lf_horizontal_stacked_graph():
|
|||||||
ind = np.arange(n) + .15
|
ind = np.arange(n) + .15
|
||||||
width = 0.3
|
width = 0.3
|
||||||
|
|
||||||
rects1 = plt.barh(ind, values1, width, color=self.color[0], label=self.label[0])
|
plt.barh(
|
||||||
rects2 = plt.barh(ind, values2, width, left=sumzip(values1), color=self.color[1], label=self.label[1])
|
ind,
|
||||||
|
values1,
|
||||||
|
width,
|
||||||
|
color=self.color[0],
|
||||||
|
label=self.label[0])
|
||||||
|
plt.barh(
|
||||||
|
ind,
|
||||||
|
values2,
|
||||||
|
width,
|
||||||
|
left=sumzip(values1),
|
||||||
|
color=self.color[1],
|
||||||
|
label=self.label[1])
|
||||||
|
|
||||||
extra_space = 0.15
|
extra_space = 0.15
|
||||||
ax.set_yticks(ind + width - extra_space)
|
ax.set_yticks(ind + width - extra_space)
|
||||||
@@ -326,7 +405,12 @@ class lf_horizontal_stacked_graph():
|
|||||||
ax.spines['top'].set_visible(False)
|
ax.spines['top'].set_visible(False)
|
||||||
ax.legend(loc='upper right')
|
ax.legend(loc='upper right')
|
||||||
if self.disable_xaxis:
|
if self.disable_xaxis:
|
||||||
plt.tick_params(axis='x', which='both', bottom=False, top=False, labelbottom=False) # disable x-axis
|
plt.tick_params(
|
||||||
|
axis='x',
|
||||||
|
which='both',
|
||||||
|
bottom=False,
|
||||||
|
top=False,
|
||||||
|
labelbottom=False) # disable x-axis
|
||||||
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
|
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
|
||||||
plt.close()
|
plt.close()
|
||||||
print("{}.png".format(self.graph_image_name))
|
print("{}.png".format(self.graph_image_name))
|
||||||
@@ -338,21 +422,21 @@ class lf_horizontal_stacked_graph():
|
|||||||
return "%s.png" % self.graph_image_name
|
return "%s.png" % self.graph_image_name
|
||||||
|
|
||||||
|
|
||||||
class lf_line_graph():
|
class lf_line_graph:
|
||||||
def __init__(self,_data_set=[[30.4, 55.3, 69.2, 37.1], [45.1, 67.2, 34.3, 22.4], [22.5, 45.6, 12.7, 34.8]],
|
def __init__(self, _data_set=None,
|
||||||
_xaxis_name="x-axis",
|
_xaxis_name="x-axis",
|
||||||
_yaxis_name="y-axis",
|
_yaxis_name="y-axis",
|
||||||
_xaxis_categories=[1, 2, 3, 4, 5],
|
_xaxis_categories=None,
|
||||||
_xaxis_label=["a", "b", "c", "d", "e"],
|
_xaxis_label=None,
|
||||||
_graph_title="",
|
_graph_title="",
|
||||||
_title_size=16,
|
_title_size=16,
|
||||||
_graph_image_name="image_name",
|
_graph_image_name="image_name",
|
||||||
_label=["bi-downlink", "bi-uplink", 'uplink'],
|
_label=None,
|
||||||
_font_weight='bold',
|
_font_weight='bold',
|
||||||
_color=['forestgreen', 'c', 'r', 'g', 'b', 'p'],
|
_color=None,
|
||||||
_figsize=(10, 5),
|
_figsize=(10, 5),
|
||||||
_xaxis_step = 5,
|
_xaxis_step=5,
|
||||||
_xticks_font = None,
|
_xticks_font=None,
|
||||||
_text_font=None,
|
_text_font=None,
|
||||||
_legend_handles=None,
|
_legend_handles=None,
|
||||||
_legend_loc="best",
|
_legend_loc="best",
|
||||||
@@ -362,6 +446,16 @@ class lf_line_graph():
|
|||||||
_marker=None,
|
_marker=None,
|
||||||
_dpi=96,
|
_dpi=96,
|
||||||
_enable_csv=False):
|
_enable_csv=False):
|
||||||
|
if _data_set is None:
|
||||||
|
_data_set = [[30.4, 55.3, 69.2, 37.1], [45.1, 67.2, 34.3, 22.4], [22.5, 45.6, 12.7, 34.8]]
|
||||||
|
if _xaxis_categories is None:
|
||||||
|
_xaxis_categories = [1, 2, 3, 4, 5]
|
||||||
|
if _xaxis_label is None:
|
||||||
|
_xaxis_label = ["a", "b", "c", "d", "e"]
|
||||||
|
if _label is None:
|
||||||
|
_label = ["bi-downlink", "bi-uplink", 'uplink']
|
||||||
|
if _color is None:
|
||||||
|
_color = ['forestgreen', 'c', 'r', 'g', 'b', 'p']
|
||||||
self.data_set = _data_set
|
self.data_set = _data_set
|
||||||
self.xaxis_name = _xaxis_name
|
self.xaxis_name = _xaxis_name
|
||||||
self.yaxis_name = _yaxis_name
|
self.yaxis_name = _yaxis_name
|
||||||
@@ -387,17 +481,27 @@ class lf_line_graph():
|
|||||||
self.legend_fontsize = _legend_fontsize
|
self.legend_fontsize = _legend_fontsize
|
||||||
|
|
||||||
def build_line_graph(self):
|
def build_line_graph(self):
|
||||||
fig = plt.subplots(figsize=self.figsize)
|
plt.subplots(figsize=self.figsize)
|
||||||
i = 0
|
i = 0
|
||||||
for data in self.data_set:
|
for data in self.data_set:
|
||||||
plt.plot(self.xaxis_categories, data, color=self.color[i], label=self.label[i], marker = self.marker)
|
plt.plot(
|
||||||
|
self.xaxis_categories,
|
||||||
|
data,
|
||||||
|
color=self.color[i],
|
||||||
|
label=self.label[i],
|
||||||
|
marker=self.marker)
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
|
plt.xlabel(self.xaxis_name, fontweight='bold', fontsize=15)
|
||||||
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
|
plt.ylabel(self.yaxis_name, fontweight='bold', fontsize=15)
|
||||||
plt.legend(handles=self.legend_handles, loc=self.legend_loc, bbox_to_anchor=self.legend_box, ncol=self.legend_ncol, fontsize=self.legend_fontsize)
|
plt.legend(
|
||||||
|
handles=self.legend_handles,
|
||||||
|
loc=self.legend_loc,
|
||||||
|
bbox_to_anchor=self.legend_box,
|
||||||
|
ncol=self.legend_ncol,
|
||||||
|
fontsize=self.legend_fontsize)
|
||||||
plt.suptitle(self.grp_title, fontsize=self.title_size)
|
plt.suptitle(self.grp_title, fontsize=self.title_size)
|
||||||
fig = plt.gcf()
|
plt.gcf()
|
||||||
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
|
plt.savefig("%s.png" % self.graph_image_name, dpi=96)
|
||||||
plt.close()
|
plt.close()
|
||||||
print("{}.png".format(self.graph_image_name))
|
print("{}.png".format(self.graph_image_name))
|
||||||
@@ -412,8 +516,46 @@ class lf_line_graph():
|
|||||||
print("{}.csv".format(self.graph_image_name))
|
print("{}.csv".format(self.graph_image_name))
|
||||||
return "%s.png" % self.graph_image_name
|
return "%s.png" % self.graph_image_name
|
||||||
|
|
||||||
# Unit Test
|
|
||||||
if __name__ == "__main__":
|
def main():
|
||||||
|
# arguments
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog='lf_graph.py',
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
epilog='''\
|
||||||
|
lf_graph.py : unit test in lf_graph.py for exersizing the lf_graph.py library
|
||||||
|
''',
|
||||||
|
description='''\
|
||||||
|
-----------------
|
||||||
|
NAME: lf_graph.py
|
||||||
|
|
||||||
|
PURPOSE:
|
||||||
|
Common Library for generating graphs for LANforge output
|
||||||
|
|
||||||
|
SETUP:
|
||||||
|
/lanforge/html-reports directory needs to be present or output generated in local file
|
||||||
|
|
||||||
|
EXAMPLE:
|
||||||
|
see: /py-scritps/lf_report_test.py for example
|
||||||
|
|
||||||
|
COPYWRITE
|
||||||
|
Copyright 2021 Candela Technologies Inc
|
||||||
|
License: Free to distribute and modify. LANforge systems must be licensed.
|
||||||
|
|
||||||
|
INCLUDE_IN_README
|
||||||
|
---------------------
|
||||||
|
''')
|
||||||
|
parser.add_argument(
|
||||||
|
'--mgr',
|
||||||
|
'--lfmgr',
|
||||||
|
dest='lfmgr',
|
||||||
|
help='sample argument: where LANforge GUI is running',
|
||||||
|
default='localhost')
|
||||||
|
# the args parser is not really used , this is so the report is not generated when testing
|
||||||
|
# the imports with --help
|
||||||
|
args = parser.parse_args()
|
||||||
|
print("LANforge manager {lfmgr}".format(lfmgr=args.lfmgr))
|
||||||
|
|
||||||
output_html_1 = "graph_1.html"
|
output_html_1 = "graph_1.html"
|
||||||
output_pdf_1 = "graph_1.pdf"
|
output_pdf_1 = "graph_1.pdf"
|
||||||
|
|
||||||
@@ -432,7 +574,8 @@ if __name__ == "__main__":
|
|||||||
# write logic to generate pdf here
|
# write logic to generate pdf here
|
||||||
# wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
|
# wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
|
||||||
# sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
|
# sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
|
||||||
options = {"enable-local-file-access": None} # prevent eerror Blocked access to file
|
# prevent eerror Blocked access to file
|
||||||
|
options = {"enable-local-file-access": None}
|
||||||
pdfkit.from_file(output_html_1, output_pdf_1, options=options)
|
pdfkit.from_file(output_html_1, output_pdf_1, options=options)
|
||||||
|
|
||||||
# test build_bar_graph setting values
|
# test build_bar_graph setting values
|
||||||
@@ -465,5 +608,11 @@ if __name__ == "__main__":
|
|||||||
# write logic to generate pdf here
|
# write logic to generate pdf here
|
||||||
# wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
|
# wget https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_amd64.deb
|
||||||
# sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
|
# sudo apt install ./wkhtmltox_0.12.6-1.focal_amd64.deb
|
||||||
options = {"enable-local-file-access": None} # prevent eerror Blocked access to file
|
# prevent eerror Blocked access to file
|
||||||
|
options = {"enable-local-file-access": None}
|
||||||
pdfkit.from_file(output_html_2, output_pdf_2, options=options)
|
pdfkit.from_file(output_html_2, output_pdf_2, options=options)
|
||||||
|
|
||||||
|
|
||||||
|
# Unit Test
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|||||||
8
py-scripts/lf_help_check.bash
Executable file
8
py-scripts/lf_help_check.bash
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
FILES=`ls *.py`
|
||||||
|
for FILE in $FILES
|
||||||
|
do
|
||||||
|
echo $FILE
|
||||||
|
(timeout 10 python3 ./${FILE} --help > /dev/null && echo PASSED) || echo "FAILED ${FILE}"
|
||||||
|
done
|
||||||
239
py-scripts/lf_kpi_csv.py
Normal file
239
py-scripts/lf_kpi_csv.py
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
NAME: lf_kpi_csv.py
|
||||||
|
|
||||||
|
PURPOSE:
|
||||||
|
Common Library for generating kpi csv for LANforge output
|
||||||
|
KPI - Key Performance Indicators
|
||||||
|
|
||||||
|
SETUP:
|
||||||
|
None
|
||||||
|
|
||||||
|
EXAMPLE:
|
||||||
|
|
||||||
|
|
||||||
|
COPYWRITE
|
||||||
|
Copyright 2021 Candela Technologies Inc
|
||||||
|
License: Free to distribute and modify. LANforge systems must be licensed.
|
||||||
|
|
||||||
|
INCLUDE_IN_README
|
||||||
|
"""
|
||||||
|
# may need pandas if a data frame is passed in
|
||||||
|
# import pandas as pd
|
||||||
|
import csv
|
||||||
|
import time
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
'''
|
||||||
|
Note teh delimiter for the kpi.csv is a tab
|
||||||
|
|
||||||
|
kpi.csv : specific file that is used for the database, dashboard and blog post
|
||||||
|
A blank entry is a valid entry in some cases.
|
||||||
|
|
||||||
|
Date: date of run
|
||||||
|
test-rig : testbed that the tests are run on for example ct_us_001
|
||||||
|
test-tag : test specific information to differenciate the test, LANforge radios used, security modes (wpa2 , open)
|
||||||
|
dut-hw-version : hardware version of the device under test
|
||||||
|
dut-sw-version : software version of the device under test
|
||||||
|
dut-model-num : model number / name of the device under test
|
||||||
|
dut-serial-num : serial number / serial number of the device under test
|
||||||
|
test-priority : test-priority is arbitrary number, choosing under 95 means it goes down at bottom of blog report, and higher priority goes at top.
|
||||||
|
test-id : script or test name , AP Auto, wifi capacity, data plane, dfs
|
||||||
|
short-description : short description of the test
|
||||||
|
pass/fail : set blank for performance tests
|
||||||
|
numeric-score : this is the value for the y-axis (x-axis is a timestamp), numeric value of what was measured
|
||||||
|
test-details : what was measured in the numeric-score, e.g. bits per second, bytes per second, upload speed, minimum cx time (ms)
|
||||||
|
Units : units used for the numeric-scort
|
||||||
|
Graph-Group - Items graphed together used by dashboard, For the lf_qa.py dashboard
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class lf_kpi_csv:
|
||||||
|
def __init__(self,
|
||||||
|
_kpi_headers=None,
|
||||||
|
_kpi_filename='kpi.csv', # Currently this is the only file name accepted
|
||||||
|
_kpi_path="",
|
||||||
|
_kpi_test_rig="TEST_RIG",
|
||||||
|
_kpi_test_tag="TEST_TAG",
|
||||||
|
_kpi_dut_hw_version="HW_VERSION",
|
||||||
|
_kpi_dut_sw_version="SW_VERSION",
|
||||||
|
_kpi_dut_model_num="MODEL_NUM",
|
||||||
|
_kpi_dut_serial_num="SERIAL_NUM",
|
||||||
|
_kpi_test_id="TEST_ID"
|
||||||
|
):
|
||||||
|
if _kpi_headers is None:
|
||||||
|
_kpi_headers = ['Date', 'test-rig', 'test-tag', 'dut-hw-version', 'dut-sw-version', 'dut-model-num',
|
||||||
|
'dut-serial-num',
|
||||||
|
'test-priority', 'test-id', 'short-description', 'pass/fail', 'numeric-score',
|
||||||
|
'test details', 'Units', 'Graph-Group', 'Subtest-Pass', 'Subtest-Fail']
|
||||||
|
self.kpi_headers = _kpi_headers
|
||||||
|
self.kpi_filename = _kpi_filename
|
||||||
|
self.kpi_full_path = ''
|
||||||
|
self.kpi_file = ""
|
||||||
|
self.kpi_path = _kpi_path
|
||||||
|
self.kpi_test_rig = _kpi_test_rig
|
||||||
|
self.kpi_test_tag = _kpi_test_tag
|
||||||
|
self.kpi_dut_hw_version = _kpi_dut_hw_version
|
||||||
|
self.kpi_dut_sw_version = _kpi_dut_sw_version
|
||||||
|
self.kpi_dut_model_num = _kpi_dut_model_num
|
||||||
|
self.kpi_dut_serial_num = _kpi_dut_serial_num
|
||||||
|
self.kpi_test_id = _kpi_test_id
|
||||||
|
self.kpi_rows = ""
|
||||||
|
try:
|
||||||
|
print("self.kpi_path {kpi_path}".format(kpi_path=self.kpi_path))
|
||||||
|
print("self.kpi_filename {kpi_filename}".format(kpi_filename=self.kpi_filename))
|
||||||
|
if self.kpi_path == "":
|
||||||
|
kpifile = self.kpi_filename
|
||||||
|
else:
|
||||||
|
kpifile = self.kpi_path + '/' + self.kpi_filename
|
||||||
|
print("kpifile {kpifile}".format(kpifile=kpifile))
|
||||||
|
self.kpi_file = open(kpifile, 'w')
|
||||||
|
self.kpi_writer = csv.DictWriter(self.kpi_file, delimiter="\t", fieldnames=self.kpi_headers)
|
||||||
|
self.kpi_writer.writeheader()
|
||||||
|
except:
|
||||||
|
print("lf_kpi_csv.py: {} WARNING unable to open".format(self.kpi_file))
|
||||||
|
|
||||||
|
self.kpi_dict = dict()
|
||||||
|
self.kpi_dict['Date'] = '{date}'.format(date=int(time.time()))
|
||||||
|
self.kpi_dict['test-rig'] = '{test_rig}'.format(test_rig=self.kpi_test_rig)
|
||||||
|
self.kpi_dict['test-tag'] = '{test_tag}'.format(test_tag=self.kpi_test_tag)
|
||||||
|
self.kpi_dict['dut-hw-version'] = '{dut_hw_version}'.format(dut_hw_version=self.kpi_dut_hw_version)
|
||||||
|
self.kpi_dict['dut-sw-version'] = '{dut_sw_version}'.format(dut_sw_version=self.kpi_dut_sw_version)
|
||||||
|
self.kpi_dict['dut-model-num'] = '{dut_model_num}'.format(dut_model_num=self.kpi_dut_model_num)
|
||||||
|
self.kpi_dict['dut-serial-num'] = '{dut_serial_num}'.format(dut_serial_num=self.kpi_dut_serial_num)
|
||||||
|
self.kpi_dict['test-priority'] = ''
|
||||||
|
self.kpi_dict['test-id'] = '{test_id}'.format(test_id=self.kpi_test_id)
|
||||||
|
self.kpi_dict['short-description'] = ''
|
||||||
|
self.kpi_dict['pass/fail'] = ''
|
||||||
|
self.kpi_dict['numeric-score'] = ''
|
||||||
|
self.kpi_dict['test details'] = ''
|
||||||
|
self.kpi_dict['Units'] = ''
|
||||||
|
self.kpi_dict['Graph-Group'] = ''
|
||||||
|
self.kpi_dict['Subtest-Pass'] = ''
|
||||||
|
self.kpi_dict['Subtest-Fail'] = ''
|
||||||
|
|
||||||
|
def kpi_csv_get_dict_update_time(self):
|
||||||
|
self.kpi_dict['Date'] = '{date}'.format(date=round(time.time() * 1000))
|
||||||
|
return self.kpi_dict
|
||||||
|
|
||||||
|
def kpi_csv_write_dict(self, kpi_dict):
|
||||||
|
self.kpi_writer.writerow(kpi_dict)
|
||||||
|
self.kpi_file.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# arguments
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog='lf_kpi_csv.py',
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
epilog='''\
|
||||||
|
lf_kpi_csv.py : unit test in lf_kpi_csv.py for exersiging lf_kpi_csv.py library
|
||||||
|
''',
|
||||||
|
description='''\
|
||||||
|
lf_kpi_csv.py
|
||||||
|
-----------
|
||||||
|
|
||||||
|
Summary :
|
||||||
|
---------
|
||||||
|
lf_kpi_csv.py library :
|
||||||
|
|
||||||
|
Date: date of run
|
||||||
|
test-rig : testbed that the tests are run on for example ct_us_001
|
||||||
|
test-tag : test specific information to differenciate the test, LANforge radios used, security modes (wpa2 , open)
|
||||||
|
dut-hw-version : hardware version of the device under test
|
||||||
|
dut-sw-version : software version of the device under test
|
||||||
|
dut-model-num : model number / name of the device under test
|
||||||
|
dut-serial-num : serial number / serial number of the device under test
|
||||||
|
test-priority : test-priority is arbitrary number, choosing under 95 means it goes down at bottom of blog report, and higher priority goes at top.
|
||||||
|
test-id : script or test name , AP Auto, wifi capacity, data plane, dfs
|
||||||
|
short-description : short description of the test
|
||||||
|
pass/fail : set blank for performance tests
|
||||||
|
numeric-score : this is the value for the y-axis (x-axis is a timestamp), numeric value of what was measured
|
||||||
|
test-details : what was measured in the numeric-score, e.g. bits per second, bytes per second, upload speed, minimum cx time (ms)
|
||||||
|
Units : units used for the numeric-scort
|
||||||
|
Graph-Group - Items graphed together used by dashboard, For the lf_qa.py dashboard
|
||||||
|
|
||||||
|
Example :
|
||||||
|
|
||||||
|
This module is included to assist in filling out the kpi.csv correctly
|
||||||
|
The Unit test is used for helping to become familiar with the library
|
||||||
|
|
||||||
|
---------
|
||||||
|
''')
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--local_lf_report_dir',
|
||||||
|
help='--local_lf_report_dir override the report path, primary use when running test in test suite',
|
||||||
|
default="")
|
||||||
|
parser.add_argument("--test_rig", default="lanforge",
|
||||||
|
help="test rig for kpi.csv, testbed that the tests are run on")
|
||||||
|
parser.add_argument("--test_tag", default="kpi_generation",
|
||||||
|
help="test tag for kpi.csv, test specific information to differenciate the test")
|
||||||
|
parser.add_argument("--dut_hw_version", default="hw_01",
|
||||||
|
help="dut hw version for kpi.csv, hardware version of the device under test")
|
||||||
|
parser.add_argument("--dut_sw_version", default="sw_01",
|
||||||
|
help="dut sw version for kpi.csv, software version of the device under test")
|
||||||
|
parser.add_argument("--dut_model_num", default="can_ap",
|
||||||
|
help="dut model for kpi.csv, model number / name of the device under test")
|
||||||
|
parser.add_argument("--test_priority", default="95",
|
||||||
|
help="dut model for kpi.csv, test-priority is arbitrary number")
|
||||||
|
parser.add_argument("--test_id", default="kpi_unit_test", help="test-id for kpi.csv, script or test name")
|
||||||
|
'''
|
||||||
|
Other values that are included in the kpi.csv row.
|
||||||
|
short-description : short description of the test
|
||||||
|
pass/fail : set blank for performance tests
|
||||||
|
numeric-score : this is the value for the y-axis (x-axis is a timestamp), numeric value of what was measured
|
||||||
|
test details : what was measured in the numeric-score, e.g. bits per second, bytes per second, upload speed, minimum cx time (ms)
|
||||||
|
Units : units used for the numeric-scort
|
||||||
|
Graph-Group - For the lf_qa.py dashboard
|
||||||
|
'''
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Get the report path to create the kpi.csv path
|
||||||
|
# kpi_path = report.get_report_path() in normal use case would get from lf_report.py library
|
||||||
|
kpi_csv = lf_kpi_csv(
|
||||||
|
_kpi_path=args.local_lf_report_dir,
|
||||||
|
_kpi_test_rig=args.test_rig,
|
||||||
|
_kpi_test_tag=args.test_tag,
|
||||||
|
_kpi_dut_hw_version=args.dut_hw_version,
|
||||||
|
_kpi_dut_sw_version=args.dut_sw_version,
|
||||||
|
_kpi_dut_model_num=args.dut_model_num,
|
||||||
|
_kpi_test_id=args.test_id)
|
||||||
|
|
||||||
|
results_dict = kpi_csv.kpi_dict
|
||||||
|
|
||||||
|
results_dict['Graph-Group'] = "graph_group"
|
||||||
|
results_dict['short-description'] = "short_description"
|
||||||
|
results_dict['numeric-score'] = "100"
|
||||||
|
results_dict['Units'] = "Mbps"
|
||||||
|
|
||||||
|
print("results_dict {results_dict}".format(results_dict=results_dict))
|
||||||
|
print("date {date}".format(date=results_dict['Date']))
|
||||||
|
|
||||||
|
kpi_csv.kpi_csv_write_dict(results_dict)
|
||||||
|
|
||||||
|
# reuse the dictionary
|
||||||
|
results_dict['Graph-Group'] = "graph_group_1_5"
|
||||||
|
results_dict['short-description'] = "short_description_1_5"
|
||||||
|
results_dict['numeric-score'] = "99"
|
||||||
|
results_dict['Units'] = "Mbps"
|
||||||
|
|
||||||
|
kpi_csv.kpi_csv_write_dict(results_dict)
|
||||||
|
|
||||||
|
# append to a row to the existing dictionary
|
||||||
|
results_dict_2 = kpi_csv.kpi_dict
|
||||||
|
# modify an entry
|
||||||
|
results_dict_2['test-tag'] = 'kpi_generation_2'
|
||||||
|
results_dict_2['Graph-Group'] = "graph_group"
|
||||||
|
results_dict_2['short-description'] = "short_description"
|
||||||
|
results_dict_2['numeric-score'] = "100"
|
||||||
|
results_dict_2['Units'] = "Mbps"
|
||||||
|
print("results_dict_2 {results_dict_2}".format(results_dict_2=results_dict_2))
|
||||||
|
print("date 2 {date}".format(date=results_dict_2['Date']))
|
||||||
|
kpi_csv.kpi_csv_write_dict(results_dict_2)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -146,14 +146,22 @@ class MeshTest(cvtest):
|
|||||||
upload_speed="56Kbps",
|
upload_speed="56Kbps",
|
||||||
download_speed="85%",
|
download_speed="85%",
|
||||||
duration="60s",
|
duration="60s",
|
||||||
enables=[],
|
enables=None,
|
||||||
disables=[],
|
disables=None,
|
||||||
raw_lines=[],
|
raw_lines=None,
|
||||||
raw_lines_file="",
|
raw_lines_file="",
|
||||||
sets=[],
|
sets=None,
|
||||||
):
|
):
|
||||||
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
|
super().__init__(lfclient_host=lf_host, lfclient_port=lf_port)
|
||||||
|
|
||||||
|
if enables is None:
|
||||||
|
enables = []
|
||||||
|
if disables is None:
|
||||||
|
disables = []
|
||||||
|
if raw_lines is None:
|
||||||
|
raw_lines = []
|
||||||
|
if sets is None:
|
||||||
|
sets = []
|
||||||
self.lf_host = lf_host
|
self.lf_host = lf_host
|
||||||
self.lf_port = lf_port
|
self.lf_port = lf_port
|
||||||
self.lf_user = lf_user
|
self.lf_user = lf_user
|
||||||
@@ -225,14 +233,14 @@ def main():
|
|||||||
Open this file in an editor and read the top notes for more details.
|
Open this file in an editor and read the top notes for more details.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
./lf_mesh_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \
|
./lf_mesh_test.py --mgr localhost --port 8080 --lf_user lanforge --lf_password lanforge \\
|
||||||
--instance_name mesh-instance --config_name test_con --upstream 1.1.eth1 \
|
--instance_name mesh-instance --config_name test_con --upstream 1.1.eth1 \\
|
||||||
--raw_line 'selected_dut2: RootAP wactest 08:36:c9:19:47:40 (1)' \
|
--raw_line 'selected_dut2: RootAP wactest 08:36:c9:19:47:40 (1)' \\
|
||||||
--raw_line 'selected_dut5: RootAP wactest 08:36:c9:19:47:50 (2)' \
|
--raw_line 'selected_dut5: RootAP wactest 08:36:c9:19:47:50 (2)' \\
|
||||||
--duration 15s \
|
--duration 15s \\
|
||||||
--download_speed 85% --upload_speed 56Kbps \
|
--download_speed 85% --upload_speed 56Kbps \\
|
||||||
--raw_line 'velocity: 100' \
|
--raw_line 'velocity: 100' \\
|
||||||
--raw_lines_file example-configs/mesh-ferndale-cfg.txt \
|
--raw_lines_file example-configs/mesh-ferndale-cfg.txt \\
|
||||||
--test_rig Ferndale-Mesh-01 --pull_report
|
--test_rig Ferndale-Mesh-01 --pull_report
|
||||||
|
|
||||||
NOTE: There is quite a lot of config needed, see example-configs/mesh-ferndale-cfg.txt
|
NOTE: There is quite a lot of config needed, see example-configs/mesh-ferndale-cfg.txt
|
||||||
@@ -246,9 +254,9 @@ def main():
|
|||||||
|
|
||||||
parser.add_argument("-u", "--upstream", type=str, default="",
|
parser.add_argument("-u", "--upstream", type=str, default="",
|
||||||
help="Upstream port for wifi capacity test ex. 1.1.eth2")
|
help="Upstream port for wifi capacity test ex. 1.1.eth2")
|
||||||
|
# argparse uses the % formatting so use %%
|
||||||
parser.add_argument("--download_speed", default="",
|
parser.add_argument("--download_speed", default="",
|
||||||
help="Specify requested download speed. Percentage of theoretical is also supported. Default: 85%")
|
help="Specify requested download speed. Percentage of theoretical is also supported. Default: 85%%")
|
||||||
parser.add_argument("--upload_speed", default="",
|
parser.add_argument("--upload_speed", default="",
|
||||||
help="Specify requested upload speed. Percentage of theoretical is also supported. Default: 0")
|
help="Specify requested upload speed. Percentage of theoretical is also supported. Default: 0")
|
||||||
parser.add_argument("--duration", default="",
|
parser.add_argument("--duration", default="",
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ Realm = realm.Realm
|
|||||||
class MultiPsk(Realm):
|
class MultiPsk(Realm):
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
host=None,
|
host=None,
|
||||||
port=None,
|
port=8080,
|
||||||
ssid=None,
|
ssid=None,
|
||||||
input=None,
|
input=None,
|
||||||
security=None,
|
security=None,
|
||||||
@@ -57,8 +57,10 @@ class MultiPsk(Realm):
|
|||||||
sta_prefix="sta",
|
sta_prefix="sta",
|
||||||
debug_=False,
|
debug_=False,
|
||||||
):
|
):
|
||||||
self.host = host
|
super().__init__(lfclient_host=host,
|
||||||
self.port = port
|
lfclient_port=port),
|
||||||
|
self.lfclient_host = host
|
||||||
|
self.lfclient_port = port
|
||||||
self.ssid = ssid
|
self.ssid = ssid
|
||||||
self.input = input
|
self.input = input
|
||||||
self.security = security
|
self.security = security
|
||||||
@@ -69,8 +71,7 @@ class MultiPsk(Realm):
|
|||||||
self.resource = resource
|
self.resource = resource
|
||||||
self.sta_prefix = sta_prefix
|
self.sta_prefix = sta_prefix
|
||||||
self.debug = debug_
|
self.debug = debug_
|
||||||
self.local_realm = realm.Realm(lfclient_host=self.host, lfclient_port=self.port)
|
self.station_profile = self.new_station_profile()
|
||||||
self.station_profile = self.local_realm.new_station_profile()
|
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
station_list = []
|
station_list = []
|
||||||
@@ -84,30 +85,30 @@ class MultiPsk(Realm):
|
|||||||
else:
|
else:
|
||||||
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=self.start_id,
|
station_list = LFUtils.portNameSeries(prefix_="sta", start_id_=self.start_id,
|
||||||
end_id_=input['num_station'] - 1, padding_number_=100,
|
end_id_=input['num_station'] - 1, padding_number_=100,
|
||||||
radio=input['radio'])
|
radio=self.radio)
|
||||||
# implementation for non vlan pending ****
|
# implementation for non vlan pending ****
|
||||||
print("creating stations")
|
print("creating stations")
|
||||||
self.station_profile.use_security(self.security, self.ssid, str(input['password']))
|
self.station_profile.use_security(self.security, self.ssid, self.passwd)
|
||||||
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
|
self.station_profile.set_command_flag("add_sta", "create_admin_down", 1)
|
||||||
self.station_profile.set_command_param("set_port", "report_timer", 1500)
|
self.station_profile.set_command_param("set_port", "report_timer", 1500)
|
||||||
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
|
self.station_profile.set_command_flag("set_port", "rpt_timer", 1)
|
||||||
self.station_profile.create(radio=input['radio'], sta_names_=station_list, debug=self.local_realm.debug)
|
self.station_profile.create(radio=self.radio, sta_names_=station_list, debug=self.debug)
|
||||||
self.local_realm.wait_until_ports_appear(sta_list=station_list)
|
self.wait_until_ports_appear(sta_list=station_list)
|
||||||
self.station_profile.admin_up()
|
self.station_profile.admin_up()
|
||||||
if self.local_realm.wait_for_ip(station_list, timeout_sec=120):
|
if self.wait_for_ip(station_list, timeout_sec=120):
|
||||||
print("All stations got IPs")
|
print("All stations got IPs")
|
||||||
else:
|
else:
|
||||||
print("Stations failed to get IPs")
|
print("Stations failed to get IPs")
|
||||||
|
|
||||||
print("create udp endp")
|
print("create udp endp")
|
||||||
self.cx_profile_udp = self.local_realm.new_l3_cx_profile()
|
self.cx_profile_udp = self.new_l3_cx_profile()
|
||||||
self.cx_profile_udp.side_a_min_bps = 128000
|
self.cx_profile_udp.side_a_min_bps = 128000
|
||||||
self.cx_profile_udp.side_b_min_bps = 128000
|
self.cx_profile_udp.side_b_min_bps = 128000
|
||||||
self.cx_profile_udp.side_a_min_pdu = 1200
|
self.cx_profile_udp.side_a_min_pdu = 1200
|
||||||
self.cx_profile_udp.side_b_min_pdu = 1500
|
self.cx_profile_udp.side_b_min_pdu = 1500
|
||||||
self.cx_profile_udp.report_timer = 1000
|
self.cx_profile_udp.report_timer = 1000
|
||||||
self.cx_profile_udp.name_prefix = "udp"
|
self.cx_profile_udp.name_prefix = "udp"
|
||||||
port_list = list(self.local_realm.find_ports_like("%s+" % self.sta_prefix))
|
port_list = list(self.find_ports_like("%s+" % self.sta_prefix))
|
||||||
# print("port list", port_list)
|
# print("port list", port_list)
|
||||||
if (port_list is None) or (len(port_list) < 1):
|
if (port_list is None) or (len(port_list) < 1):
|
||||||
raise ValueError("Unable to find ports named '%s'+" % self.sta_prefix)
|
raise ValueError("Unable to find ports named '%s'+" % self.sta_prefix)
|
||||||
@@ -118,13 +119,13 @@ class MultiPsk(Realm):
|
|||||||
|
|
||||||
# Create TCP endpoints
|
# Create TCP endpoints
|
||||||
print("create tcp endp")
|
print("create tcp endp")
|
||||||
self.l3_tcp_profile = self.local_realm.new_l3_cx_profile()
|
self.l3_tcp_profile = self.new_l3_cx_profile()
|
||||||
self.l3_tcp_profile.side_a_min_bps = 128000
|
self.l3_tcp_profile.side_a_min_bps = 128000
|
||||||
self.l3_tcp_profile.side_b_min_bps = 56000
|
self.l3_tcp_profile.side_b_min_bps = 56000
|
||||||
self.l3_tcp_profile.name_prefix = "tcp"
|
self.l3_tcp_profile.name_prefix = "tcp"
|
||||||
self.l3_tcp_profile.report_timer = 1000
|
self.l3_tcp_profile.report_timer = 1000
|
||||||
self.l3_tcp_profile.create(endp_type="lf_tcp",
|
self.l3_tcp_profile.create(endp_type="lf_tcp",
|
||||||
side_a=list(self.local_realm.find_ports_like("%s+" % self.sta_prefix)),
|
side_a=list(self.find_ports_like("%s+" % self.sta_prefix)),
|
||||||
side_b="%d.%s" % (self.resource, input['upstream']),
|
side_b="%d.%s" % (self.resource, input['upstream']),
|
||||||
suppress_related_commands=True)
|
suppress_related_commands=True)
|
||||||
|
|
||||||
@@ -140,7 +141,7 @@ class MultiPsk(Realm):
|
|||||||
if "." in i['upstream']:
|
if "." in i['upstream']:
|
||||||
# print(str(i['upstream']) + " is a vlan upstream port")
|
# print(str(i['upstream']) + " is a vlan upstream port")
|
||||||
print("checking its ip ..")
|
print("checking its ip ..")
|
||||||
data = self.local_realm.json_get("ports/list?fields=IP")
|
data = self.json_get("ports/list?fields=IP")
|
||||||
for val in data["interfaces"]:
|
for val in data["interfaces"]:
|
||||||
for j in val:
|
for j in val:
|
||||||
if "1." + str(self.resource) + "." + str(i['upstream']) == j:
|
if "1." + str(self.resource) + "." + str(i['upstream']) == j:
|
||||||
@@ -157,7 +158,7 @@ class MultiPsk(Realm):
|
|||||||
if "." not in i['upstream']:
|
if "." not in i['upstream']:
|
||||||
# print(str(i['upstream']) + " is not an vlan upstream port")
|
# print(str(i['upstream']) + " is not an vlan upstream port")
|
||||||
print("checking its ip ..")
|
print("checking its ip ..")
|
||||||
data = self.local_realm.json_get("ports/list?fields=IP")
|
data = self.json_get("ports/list?fields=IP")
|
||||||
for val in data["interfaces"]:
|
for val in data["interfaces"]:
|
||||||
for j in val:
|
for j in val:
|
||||||
if "1." + str(self.resource) + "." + str(i['upstream']) == j:
|
if "1." + str(self.resource) + "." + str(i['upstream']) == j:
|
||||||
@@ -168,11 +169,8 @@ class MultiPsk(Realm):
|
|||||||
return non_vlan_ips
|
return non_vlan_ips
|
||||||
|
|
||||||
def get_sta_ip(self):
|
def get_sta_ip(self):
|
||||||
# this function gives station ip dict eg{'eth2.100': '172.17.0.100'}
|
|
||||||
# self.input = [{'password': 'lanforge1', 'upstream': 'eth2.100', 'mac': '', 'num_station': 1, 'radio': 'wiphy4'}, {'password': 'lanforge2', 'upstream': 'eth2.200', 'mac': '', 'num_station': 1, 'radio': 'wiphy4'}, {'password': 'lanforge3', 'upstream': 'eth2', 'mac': '', 'num_station': 1, 'radio': 'wiphy0'}]
|
|
||||||
# port_list = ['1.1.sta200', '1.1.sta00', '1.1.sta100']
|
|
||||||
station_ip = {}
|
station_ip = {}
|
||||||
port_list = list(self.local_realm.find_ports_like("%s+" % self.sta_prefix))
|
port_list = list(self.find_ports_like("%s+" % self.sta_prefix))
|
||||||
# print("port list", port_list)
|
# print("port list", port_list)
|
||||||
# port list ['1.1.sta200', '1.1.sta00', '1.1.sta100']
|
# port list ['1.1.sta200', '1.1.sta00', '1.1.sta100']
|
||||||
for name, id in zip(port_list, self.input):
|
for name, id in zip(port_list, self.input):
|
||||||
@@ -182,7 +180,7 @@ class MultiPsk(Realm):
|
|||||||
# print(x)
|
# print(x)
|
||||||
|
|
||||||
if name == "1." + str(self.resource) + ".sta" + str(x):
|
if name == "1." + str(self.resource) + ".sta" + str(x):
|
||||||
data = self.local_realm.json_get("ports/list?fields=IP")
|
data = self.json_get("ports/list?fields=IP")
|
||||||
for i in data["interfaces"]:
|
for i in data["interfaces"]:
|
||||||
# print(i)
|
# print(i)
|
||||||
for j in i:
|
for j in i:
|
||||||
@@ -227,7 +225,7 @@ class MultiPsk(Realm):
|
|||||||
# print(x)
|
# print(x)
|
||||||
|
|
||||||
if name == "1." + str(self.resource) + ".sta" + str(x):
|
if name == "1." + str(self.resource) + ".sta" + str(x):
|
||||||
data = self.local_realm.json_get("ports/list?fields=IP")
|
data = self.json_get("ports/list?fields=IP")
|
||||||
for i in data["interfaces"]:
|
for i in data["interfaces"]:
|
||||||
# print(i)
|
# print(i)
|
||||||
for j in i:
|
for j in i:
|
||||||
@@ -241,7 +239,7 @@ class MultiPsk(Realm):
|
|||||||
def get_non_vlan_sta_ip(self):
|
def get_non_vlan_sta_ip(self):
|
||||||
station_nonvlan_ip = {}
|
station_nonvlan_ip = {}
|
||||||
x = ""
|
x = ""
|
||||||
port_list = list(self.local_realm.find_ports_like("%s+" % self.sta_prefix))
|
port_list = list(self.find_ports_like("%s+" % self.sta_prefix))
|
||||||
# print("port list", port_list)
|
# print("port list", port_list)
|
||||||
for id in self.input:
|
for id in self.input:
|
||||||
if "." not in id['upstream']:
|
if "." not in id['upstream']:
|
||||||
@@ -249,7 +247,7 @@ class MultiPsk(Realm):
|
|||||||
# print(x)
|
# print(x)
|
||||||
for name in port_list:
|
for name in port_list:
|
||||||
if name == "1.1.sta00":
|
if name == "1.1.sta00":
|
||||||
data = self.local_realm.json_get("ports/list?fields=IP")
|
data = self.json_get("ports/list?fields=IP")
|
||||||
for i in data["interfaces"]:
|
for i in data["interfaces"]:
|
||||||
# print(i)
|
# print(i)
|
||||||
for j in i:
|
for j in i:
|
||||||
@@ -270,11 +268,10 @@ class MultiPsk(Realm):
|
|||||||
y = station_ip[j].split('.')
|
y = station_ip[j].split('.')
|
||||||
if x[0] == y[0] and x[1] == y[1]:
|
if x[0] == y[0] and x[1] == y[1]:
|
||||||
print("station got ip from vlan")
|
print("station got ip from vlan")
|
||||||
x = "Pass"
|
return "Pass"
|
||||||
else:
|
else:
|
||||||
print("station did not got ip from vlan")
|
print("station did not got ip from vlan")
|
||||||
x = "Fail"
|
return "Fail"
|
||||||
return x
|
|
||||||
|
|
||||||
def compare_nonvlan_ip_nat(self):
|
def compare_nonvlan_ip_nat(self):
|
||||||
non_vlan_sta_ip = self.get_non_vlan_sta_ip()
|
non_vlan_sta_ip = self.get_non_vlan_sta_ip()
|
||||||
@@ -312,27 +309,22 @@ class MultiPsk(Realm):
|
|||||||
self.cx_profile_udp.cleanup()
|
self.cx_profile_udp.cleanup()
|
||||||
self.l3_tcp_profile.cleanup()
|
self.l3_tcp_profile.cleanup()
|
||||||
self.station_profile.cleanup()
|
self.station_profile.cleanup()
|
||||||
LFUtils.wait_until_ports_disappear(base_url=self.local_realm.lfclient_url, port_list=self.station_profile.station_names,
|
LFUtils.wait_until_ports_disappear(base_url=self.lfclient_host, port_list=self.station_profile.station_names,
|
||||||
debug=self.debug)
|
debug=self.debug)
|
||||||
print("Test Completed")
|
print("Test Completed")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser(
|
parser = Realm.create_basic_argparse(
|
||||||
prog="lf_multipsk.py",
|
prog="lf_multipsk.py",
|
||||||
formatter_class=argparse.RawTextHelpFormatter,
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
description="lanforge webpage download Test Script")
|
description="lanforge webpage download Test Script")
|
||||||
parser.add_argument('--mgr', help='hostname for where LANforge GUI is running', default='localhost')
|
|
||||||
parser.add_argument('--mgr_port', help='port LANforge GUI HTTP service is running on', default=8080)
|
|
||||||
parser.add_argument('--ssid', help='WiFi SSID for client to associate to')
|
|
||||||
parser.add_argument('--security', help='WiFi Security protocol: {open|wep|wpa2|wpa3', default="wpa2")
|
|
||||||
parser.add_argument('--mode', help="specify mode of ap eg BRIDGE or NAT", default="BRIDGE")
|
|
||||||
parser.add_argument('--n_vlan', help="type number of vlan using in test eg 1 or 2", default=1)
|
parser.add_argument('--n_vlan', help="type number of vlan using in test eg 1 or 2", default=1)
|
||||||
# parser.add_argument('--input', nargs="+", help="specify list of parameters like passwords,upstream,mac address, number of clients and radio as input, eg password@123,eth2.100,"",1,wiphy0 lanforge@123,eth2.100,"",1,wiphy1")
|
parser.add_argument('--mode', help="Mode for lf_multipsk", default=None)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
input_data = [{
|
input_data = [{
|
||||||
"password": "lanforge1",
|
"password": args.passwd,
|
||||||
"upstream": "eth2.100",
|
"upstream": "eth2.100",
|
||||||
"mac": "",
|
"mac": "",
|
||||||
"num_station": 1,
|
"num_station": 1,
|
||||||
@@ -364,8 +356,11 @@ def main():
|
|||||||
multi_obj = MultiPsk(host=args.mgr,
|
multi_obj = MultiPsk(host=args.mgr,
|
||||||
port=args.mgr_port,
|
port=args.mgr_port,
|
||||||
ssid=args.ssid,
|
ssid=args.ssid,
|
||||||
|
passwd=args.passwd,
|
||||||
input=input_data,
|
input=input_data,
|
||||||
security=args.security)
|
security=args.security,
|
||||||
|
debug_=args.debug,
|
||||||
|
radio=args.radio)
|
||||||
|
|
||||||
multi_obj.build()
|
multi_obj.build()
|
||||||
multi_obj.start()
|
multi_obj.start()
|
||||||
|
|||||||
76
py-scripts/lf_port_probe.py
Executable file
76
py-scripts/lf_port_probe.py
Executable file
@@ -0,0 +1,76 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import pprint
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if sys.version_info[0] != 3:
|
||||||
|
print("This script requires Python 3")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
if 'py-json' not in sys.path:
|
||||||
|
sys.path.append(os.path.join(os.path.abspath('..'), 'py-json'))
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from LANforge.lfcli_base import LFCliBase
|
||||||
|
|
||||||
|
|
||||||
|
# see https://stackoverflow.com/questions/9295439/python-json-loads-fails-with-valueerror-invalid-control-character-at-line-1-c/16544933#16544933
|
||||||
|
# re-load and reexport JSON with strict=False?
|
||||||
|
|
||||||
|
class ProbePort2(LFCliBase):
|
||||||
|
def __init__(self,
|
||||||
|
lfhost=None,
|
||||||
|
lfport=None,
|
||||||
|
debug=False,
|
||||||
|
eid_str=None):
|
||||||
|
super().__init__(_lfjson_host=lfhost,
|
||||||
|
_lfjson_port=lfport,
|
||||||
|
_debug=debug)
|
||||||
|
hunks = eid_str.split(".")
|
||||||
|
self.probepath = "/probe/1/%s/%s" % (hunks[-2], hunks[-1])
|
||||||
|
# self.decoder = json.JSONDecoder()
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.json_post(self.probepath, {})
|
||||||
|
sleep(0.2)
|
||||||
|
response = self.json_get(self.probepath)
|
||||||
|
if not response:
|
||||||
|
print("problem probing port %s" % self.probepath)
|
||||||
|
exit(1)
|
||||||
|
# pprint.pprint(response)
|
||||||
|
if "probe-results" not in response:
|
||||||
|
print("problem probing port %s" % self.probepath)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
probe_res = response["probe-results"][0]
|
||||||
|
#pprint.pprint(probe_res)
|
||||||
|
for (key, value) in probe_res.items():
|
||||||
|
# probe_results = [key]
|
||||||
|
print("port "+key)
|
||||||
|
# pprint.pprint(value['probe results'])
|
||||||
|
xlated_results = str(value['probe results']).replace("\\n", "\n").replace("\\r", "\r").replace("\\t", "\t")
|
||||||
|
print(xlated_results)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = LFCliBase.create_bare_argparse(
|
||||||
|
prog=__name__,
|
||||||
|
description='''\
|
||||||
|
Example:
|
||||||
|
./port_probe.py --port 1.1.eth0
|
||||||
|
''')
|
||||||
|
|
||||||
|
parser.add_argument('--mode', help='Used to force mode of stations')
|
||||||
|
parser.add_argument('--port_eid', help='EID of station to be used', default="1.1.eth0")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
probe = ProbePort2(lfhost=args.mgr,
|
||||||
|
lfport=args.mgr_port,
|
||||||
|
debug=args.debug,
|
||||||
|
eid_str=args.port_eid)
|
||||||
|
probe.run()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,20 +1,20 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
'''
|
"""
|
||||||
NAME: lf_report.py
|
NAME: lf_report.py
|
||||||
|
|
||||||
PURPOSE:
|
PURPOSE:
|
||||||
|
|
||||||
This program is a helper class for reporting results for a lanforge python script.
|
This program is a helper class for reporting results for a lanforge python script.
|
||||||
The class will generate an output directory based on date and time in the /home/lanforge/html-reports/ .
|
The class will generate an output directory based on date and time in the /home/lanforge/html-reports/ .
|
||||||
If the reports-data is not present then the date and time directory will be created in the current directory.
|
If the reports-data is not present then the date and time directory will be created in the current directory.
|
||||||
The banner and Candela Technology logo will be copied in the results directory.
|
The banner and Candela Technology logo will be copied in the results directory.
|
||||||
The results directory may be over written and many of the other paramaters during construction.
|
The results directory may be over written and many of the other paramaters during construction.
|
||||||
Creating the date time directory on construction was a design choice.
|
Creating the date time directory on construction was a design choice.
|
||||||
|
|
||||||
EXAMPLE:
|
EXAMPLE:
|
||||||
|
|
||||||
This is a helper class, a unit test is included at the bottom of the file.
|
This is a helper class, a unit test is included at the bottom of the file.
|
||||||
To test lf_report.py and lf_graph.py together use the lf_report_test.py file
|
To test lf_report.py and lf_graph.py together use the lf_report_test.py file
|
||||||
|
|
||||||
LICENSE:
|
LICENSE:
|
||||||
@@ -23,7 +23,7 @@ LICENSE:
|
|||||||
|
|
||||||
|
|
||||||
INCLUDE_IN_README
|
INCLUDE_IN_README
|
||||||
'''
|
"""
|
||||||
# CAUTION: adding imports to this file which are not in update_dependencies.py is not advised
|
# CAUTION: adding imports to this file which are not in update_dependencies.py is not advised
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
@@ -31,25 +31,27 @@ import datetime
|
|||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pdfkit
|
import pdfkit
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
|
||||||
# internal candela references included during intial phases, to be deleted at future date
|
# internal candela references included during intial phases, to be deleted at future date
|
||||||
# https://candelatech.atlassian.net/wiki/spaces/LANFORGE/pages/372703360/Scripting+Data+Collection+March+2021
|
# https://candelatech.atlassian.net/wiki/spaces/LANFORGE/pages/372703360/Scripting+Data+Collection+March+2021
|
||||||
# base report class
|
# base report class
|
||||||
class lf_report():
|
|
||||||
|
class lf_report:
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
# _path the report directory under which the report directories will be created.
|
# _path the report directory under which the report directories will be created.
|
||||||
_path="/home/lanforge/html-reports",
|
_path="/home/lanforge/html-reports",
|
||||||
_alt_path="",
|
_alt_path="",
|
||||||
_date="",
|
_date="",
|
||||||
_title="LANForge Test Run Heading",
|
_title="LANForge Unit Test Run Heading",
|
||||||
_table_title="LANForge Table Heading",
|
_table_title="LANForge Table Heading",
|
||||||
_graph_title="LANForge Graph Title",
|
_graph_title="LANForge Graph Title",
|
||||||
_obj="",
|
_obj="",
|
||||||
_obj_title="",
|
_obj_title="",
|
||||||
_output_html="outfile.html",
|
_output_html="outfile.html",
|
||||||
_output_pdf="outfile.pdf",
|
_output_pdf="outfile.pdf",
|
||||||
_results_dir_name="LANforge_Test_Results",
|
_results_dir_name="LANforge_Test_Results_Unit_Test",
|
||||||
_output_format='html', # pass in on the write functionality, current not used
|
_output_format='html', # pass in on the write functionality, current not used
|
||||||
_dataframe="",
|
_dataframe="",
|
||||||
_path_date_time="",
|
_path_date_time="",
|
||||||
@@ -76,6 +78,7 @@ class lf_report():
|
|||||||
self.output_html = _output_html
|
self.output_html = _output_html
|
||||||
self.path_date_time = _path_date_time
|
self.path_date_time = _path_date_time
|
||||||
self.write_output_html = ""
|
self.write_output_html = ""
|
||||||
|
self.write_output_index_html = ""
|
||||||
self.output_pdf = _output_pdf
|
self.output_pdf = _output_pdf
|
||||||
self.write_output_pdf = ""
|
self.write_output_pdf = ""
|
||||||
self.banner_html = ""
|
self.banner_html = ""
|
||||||
@@ -271,6 +274,17 @@ class lf_report():
|
|||||||
print("write_html failed")
|
print("write_html failed")
|
||||||
return self.write_output_html
|
return self.write_output_html
|
||||||
|
|
||||||
|
def write_index_html(self):
|
||||||
|
self.write_output_index_html = str(self.path_date_time) + '/' + str("index.html")
|
||||||
|
print("write_output_index_html: {}".format(self.write_output_index_html))
|
||||||
|
try:
|
||||||
|
test_file = open(self.write_output_index_html, "w")
|
||||||
|
test_file.write(self.html)
|
||||||
|
test_file.close()
|
||||||
|
except:
|
||||||
|
print("write_index_html failed")
|
||||||
|
return self.write_output_index_html
|
||||||
|
|
||||||
def write_html_with_timestamp(self):
|
def write_html_with_timestamp(self):
|
||||||
self.write_output_html = "{}/{}-{}".format(self.path_date_time, self.date, self.output_html)
|
self.write_output_html = "{}/{}-{}".format(self.path_date_time, self.date, self.output_html)
|
||||||
print("write_output_html: {}".format(self.write_output_html))
|
print("write_output_html: {}".format(self.write_output_html))
|
||||||
@@ -456,7 +470,7 @@ class lf_report():
|
|||||||
setup_information = """
|
setup_information = """
|
||||||
<!-- Test Setup Information -->
|
<!-- Test Setup Information -->
|
||||||
<table width='700px' border='1' cellpadding='2' cellspacing='0' style='border-top-color: gray; border-top-style: solid; border-top-width: 1px; border-right-color: gray; border-right-style: solid; border-right-width: 1px; border-bottom-color: gray; border-bottom-style: solid; border-bottom-width: 1px; border-left-color: gray; border-left-style: solid; border-left-width: 1px'>
|
<table width='700px' border='1' cellpadding='2' cellspacing='0' style='border-top-color: gray; border-top-style: solid; border-top-width: 1px; border-right-color: gray; border-right-style: solid; border-right-width: 1px; border-bottom-color: gray; border-bottom-style: solid; border-bottom-width: 1px; border-left-color: gray; border-left-style: solid; border-left-width: 1px'>
|
||||||
|
|
||||||
<tr>
|
<tr>
|
||||||
<td>""" + str(value) + """</td>
|
<td>""" + str(value) + """</td>
|
||||||
<td>
|
<td>
|
||||||
@@ -496,7 +510,7 @@ class lf_report():
|
|||||||
function fallbackCopyTextToClipboard(text) {
|
function fallbackCopyTextToClipboard(text) {
|
||||||
var textArea = document.createElement("textarea");
|
var textArea = document.createElement("textarea");
|
||||||
textArea.value = text;
|
textArea.value = text;
|
||||||
|
|
||||||
// Avoid scrolling to bottom
|
// Avoid scrolling to bottom
|
||||||
textArea.style.top = "0";
|
textArea.style.top = "0";
|
||||||
textArea.style.left = "0";
|
textArea.style.left = "0";
|
||||||
@@ -561,6 +575,16 @@ function copyTextToClipboard(ele) {
|
|||||||
|
|
||||||
# Unit Test
|
# Unit Test
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog="lf_report.py",
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
description="Reporting library Unit Test")
|
||||||
|
parser.add_argument('--lfmgr', help='sample argument: where LANforge GUI is running', default='localhost')
|
||||||
|
# the args parser is not really used , this is so the report is not generated when testing
|
||||||
|
# the imports with --help
|
||||||
|
args = parser.parse_args()
|
||||||
|
print("LANforge manager {lfmgr}".format(lfmgr=args.lfmgr))
|
||||||
|
|
||||||
# Testing: generate data frame
|
# Testing: generate data frame
|
||||||
dataframe = pd.DataFrame({
|
dataframe = pd.DataFrame({
|
||||||
'product': ['CT521a-264-1ac-1n', 'CT521a-1ac-1ax', 'CT522-264-1ac2-1n', 'CT523c-2ac2-db-10g-cu',
|
'product': ['CT521a-264-1ac-1n', 'CT521a-1ac-1ax', 'CT522-264-1ac2-1n', 'CT523c-2ac2-db-10g-cu',
|
||||||
@@ -573,7 +597,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
print(dataframe)
|
print(dataframe)
|
||||||
|
|
||||||
# Testing: generate data frame
|
# Testing: generate data frame
|
||||||
dataframe2 = pd.DataFrame({
|
dataframe2 = pd.DataFrame({
|
||||||
'station': [1, 2, 3, 4, 5, 6, 7],
|
'station': [1, 2, 3, 4, 5, 6, 7],
|
||||||
'time_seconds': [23, 78, 22, 19, 45, 22, 25]
|
'time_seconds': [23, 78, 22, 19, 45, 22, 25]
|
||||||
@@ -605,4 +629,3 @@ if __name__ == "__main__":
|
|||||||
report.write_pdf()
|
report.write_pdf()
|
||||||
|
|
||||||
print("report path {}".format(report.get_path()))
|
print("report path {}".format(report.get_path()))
|
||||||
|
|
||||||
|
|||||||
@@ -2,13 +2,13 @@
|
|||||||
'''
|
'''
|
||||||
NAME: lf_report_test.py
|
NAME: lf_report_test.py
|
||||||
|
|
||||||
PURPOSE:
|
PURPOSE:
|
||||||
Common file for testing lf_report and lf_graph Library generates html and pdf output
|
Common file for testing lf_report and lf_graph Library generates html and pdf output
|
||||||
|
|
||||||
SETUP:
|
SETUP:
|
||||||
/lanforge/html-reports directory needs to be present or output generated in local file
|
/lanforge/html-reports directory needs to be present or output generated in local file
|
||||||
|
|
||||||
EXAMPLE:
|
EXAMPLE:
|
||||||
./lf_report_test.py : currently script does not accept input
|
./lf_report_test.py : currently script does not accept input
|
||||||
|
|
||||||
COPYWRITE
|
COPYWRITE
|
||||||
@@ -26,8 +26,9 @@ import numpy as np
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pdfkit
|
import pdfkit
|
||||||
import random
|
import random
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
sys.path.append(os.path.join(os.path.abspath(__file__ + "../../../")))
|
||||||
|
|
||||||
lf_report = importlib.import_module("py-scripts.lf_report")
|
lf_report = importlib.import_module("py-scripts.lf_report")
|
||||||
@@ -38,10 +39,45 @@ lf_scatter_graph = lf_graph.lf_scatter_graph
|
|||||||
lf_stacked_graph = lf_graph.lf_stacked_graph
|
lf_stacked_graph = lf_graph.lf_stacked_graph
|
||||||
lf_horizontal_stacked_graph = lf_graph.lf_horizontal_stacked_graph
|
lf_horizontal_stacked_graph = lf_graph.lf_horizontal_stacked_graph
|
||||||
|
|
||||||
|
|
||||||
# Unit Test
|
# Unit Test
|
||||||
if __name__ == "__main__":
|
|
||||||
|
|
||||||
|
def main():
|
||||||
# Testing: generate data frame
|
# Testing: generate data frame
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog="lf_report_test.py",
|
||||||
|
formatter_class=argparse.RawTextHelpFormatter,
|
||||||
|
description='''\
|
||||||
|
-----------------
|
||||||
|
NAME: lf_report_test.py
|
||||||
|
|
||||||
|
PURPOSE:
|
||||||
|
Common file for testing lf_report and lf_graph Library generates html and pdf output
|
||||||
|
|
||||||
|
SETUP:
|
||||||
|
/lanforge/html-reports directory needs to be present or output generated in local file
|
||||||
|
|
||||||
|
EXAMPLE:
|
||||||
|
./lf_report_test.py : currently script does not accept input
|
||||||
|
|
||||||
|
COPYWRITE
|
||||||
|
Copyright 2021 Candela Technologies Inc
|
||||||
|
License: Free to distribute and modify. LANforge systems must be licensed.
|
||||||
|
|
||||||
|
INCLUDE_IN_README
|
||||||
|
''')
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'--mgr',
|
||||||
|
'--lfmgr',
|
||||||
|
dest='lfmgr',
|
||||||
|
help='sample argument: where LANforge GUI is running',
|
||||||
|
default='localhost')
|
||||||
|
# the args parser is not really used , this is so the report is not generated when testing
|
||||||
|
# the imports with --help
|
||||||
|
args = parser.parse_args()
|
||||||
|
print("LANforge manager {lfmgr}".format(lfmgr=args.lfmgr))
|
||||||
|
|
||||||
dataframe = pd.DataFrame({
|
dataframe = pd.DataFrame({
|
||||||
'product': ['CT521a-264-1ac-1n', 'CT521a-1ac-1ax', 'CT522-264-1ac2-1n', 'CT523c-2ac2-db-10g-cu',
|
'product': ['CT521a-264-1ac-1n', 'CT521a-1ac-1ax', 'CT522-264-1ac2-1n', 'CT523c-2ac2-db-10g-cu',
|
||||||
'CT523c-3ac2-db-10g-cu', 'CT523c-8ax-ac10g-cu', 'CT523c-192-2ac2-1ac-10g'],
|
'CT523c-3ac2-db-10g-cu', 'CT523c-8ax-ac10g-cu', 'CT523c-192-2ac2-1ac-10g'],
|
||||||
@@ -53,7 +89,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
print(dataframe)
|
print(dataframe)
|
||||||
|
|
||||||
# Testing: generate data frame
|
# Testing: generate data frame
|
||||||
dataframe2 = pd.DataFrame({
|
dataframe2 = pd.DataFrame({
|
||||||
'station': [1, 2, 3, 4, 5, 6, 7],
|
'station': [1, 2, 3, 4, 5, 6, 7],
|
||||||
'time_seconds': [23, 78, 22, 19, 45, 22, 25]
|
'time_seconds': [23, 78, 22, 19, 45, 22, 25]
|
||||||
@@ -108,7 +144,7 @@ if __name__ == "__main__":
|
|||||||
_xaxis_categories=x_axis_values,
|
_xaxis_categories=x_axis_values,
|
||||||
_graph_image_name="Bi-single_radio_2.4GHz",
|
_graph_image_name="Bi-single_radio_2.4GHz",
|
||||||
_label=["bi-downlink", "bi-uplink", 'uplink'],
|
_label=["bi-downlink", "bi-uplink", 'uplink'],
|
||||||
_color=['darkorange', 'forestgreen','blueviolet'],
|
_color=['darkorange', 'forestgreen', 'blueviolet'],
|
||||||
_color_edge='red',
|
_color_edge='red',
|
||||||
_grp_title="Throughput for each clients",
|
_grp_title="Throughput for each clients",
|
||||||
_xaxis_step=5,
|
_xaxis_step=5,
|
||||||
@@ -117,7 +153,7 @@ if __name__ == "__main__":
|
|||||||
_text_rotation=45,
|
_text_rotation=45,
|
||||||
_xticks_font=7,
|
_xticks_font=7,
|
||||||
_legend_loc="best",
|
_legend_loc="best",
|
||||||
_legend_box=(1,1),
|
_legend_box=(1, 1),
|
||||||
_legend_ncol=1,
|
_legend_ncol=1,
|
||||||
_legend_fontsize=None,
|
_legend_fontsize=None,
|
||||||
_enable_csv=True)
|
_enable_csv=True)
|
||||||
@@ -127,7 +163,7 @@ if __name__ == "__main__":
|
|||||||
print("graph name {}".format(graph_png))
|
print("graph name {}".format(graph_png))
|
||||||
|
|
||||||
report.set_graph_image(graph_png)
|
report.set_graph_image(graph_png)
|
||||||
# need to move the graph image to the results
|
# need to move the graph image to the results
|
||||||
report.move_graph_image()
|
report.move_graph_image()
|
||||||
if graph.enable_csv:
|
if graph.enable_csv:
|
||||||
report.set_csv_filename(graph_png)
|
report.set_csv_filename(graph_png)
|
||||||
@@ -140,7 +176,7 @@ if __name__ == "__main__":
|
|||||||
_graph_image_name="image_name1",
|
_graph_image_name="image_name1",
|
||||||
_color=None,
|
_color=None,
|
||||||
_label=["s1", "s2", "s3"],
|
_label=["s1", "s2", "s3"],
|
||||||
_enable_csv = False)
|
_enable_csv=False)
|
||||||
graph_png = graph2.build_scatter_graph()
|
graph_png = graph2.build_scatter_graph()
|
||||||
|
|
||||||
print("graph name {}".format(graph_png))
|
print("graph name {}".format(graph_png))
|
||||||
@@ -149,14 +185,15 @@ if __name__ == "__main__":
|
|||||||
report.move_graph_image()
|
report.move_graph_image()
|
||||||
|
|
||||||
report.build_graph()
|
report.build_graph()
|
||||||
# this will generate graph which is independent,we can customize the value with different colors
|
# this will generate graph which is independent,we can customize the value
|
||||||
|
# with different colors
|
||||||
graph2 = lf_scatter_graph(_x_data_set=set1, _y_data_set=[45, 67, 45, 34], _values=[0, 0, 0, 1],
|
graph2 = lf_scatter_graph(_x_data_set=set1, _y_data_set=[45, 67, 45, 34], _values=[0, 0, 0, 1],
|
||||||
_xaxis_name="x-axis",
|
_xaxis_name="x-axis",
|
||||||
_yaxis_name="y-axis",
|
_yaxis_name="y-axis",
|
||||||
_graph_image_name="image_name_map",
|
_graph_image_name="image_name_map",
|
||||||
_color=None,
|
_color=None,
|
||||||
_label=["s1", "s2"],
|
_label=["s1", "s2"],
|
||||||
_enable_csv = False)
|
_enable_csv=False)
|
||||||
graph_png = graph2.build_scatter_graph()
|
graph_png = graph2.build_scatter_graph()
|
||||||
|
|
||||||
print("graph name {}".format(graph_png))
|
print("graph name {}".format(graph_png))
|
||||||
@@ -165,14 +202,15 @@ if __name__ == "__main__":
|
|||||||
report.move_graph_image()
|
report.move_graph_image()
|
||||||
|
|
||||||
report.build_graph()
|
report.build_graph()
|
||||||
dataset = [["1", "2", "3", "4"], [12, 45, 67, 34], [23, 67, 23, 12], [25, 45, 34, 23]]
|
dataset = [["1", "2", "3", "4"], [12, 45, 67, 34],
|
||||||
|
[23, 67, 23, 12], [25, 45, 34, 23]]
|
||||||
graph = lf_stacked_graph(_data_set=dataset,
|
graph = lf_stacked_graph(_data_set=dataset,
|
||||||
_xaxis_name="Stations",
|
_xaxis_name="Stations",
|
||||||
_yaxis_name="Login PASS/FAIL",
|
_yaxis_name="Login PASS/FAIL",
|
||||||
_label=['Success', 'Fail', 'both'],
|
_label=['Success', 'Fail', 'both'],
|
||||||
_graph_image_name="login_pass_fail1",
|
_graph_image_name="login_pass_fail1",
|
||||||
_color=None,
|
_color=None,
|
||||||
_enable_csv = False)
|
_enable_csv=False)
|
||||||
|
|
||||||
graph_png = graph.build_stacked_graph()
|
graph_png = graph.build_stacked_graph()
|
||||||
|
|
||||||
@@ -192,7 +230,7 @@ if __name__ == "__main__":
|
|||||||
_graph_image_name="image_name_pass_fail",
|
_graph_image_name="image_name_pass_fail",
|
||||||
_color=["r", "g"],
|
_color=["r", "g"],
|
||||||
_figsize=(9, 4),
|
_figsize=(9, 4),
|
||||||
_enable_csv = False)
|
_enable_csv=False)
|
||||||
|
|
||||||
graph_png = graph.build_horizontal_stacked_graph()
|
graph_png = graph.build_horizontal_stacked_graph()
|
||||||
|
|
||||||
@@ -215,3 +253,5 @@ if __name__ == "__main__":
|
|||||||
# report.write_pdf(_page_size = 'Legal', _orientation='Portrait')
|
# report.write_pdf(_page_size = 'Legal', _orientation='Portrait')
|
||||||
|
|
||||||
# report.generate_report()
|
# report.generate_report()
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user