mirror of
https://github.com/Telecominfraproject/wlan-testing.git
synced 2026-01-07 21:01:37 +00:00
Merge branch 'master' into staging-wifi-3200
This commit is contained in:
296
.github/workflows/uc-sanity.yml
vendored
296
.github/workflows/uc-sanity.yml
vendored
@@ -1,296 +0,0 @@
|
||||
name: uCentral sanity testing
|
||||
env:
|
||||
# thirdparties
|
||||
DOCKER_SERVER: tip-tip-wlan-cloud-docker-repo.jfrog.io
|
||||
DOCKER_USER_NAME: wlan-testing-cicd
|
||||
DOCKER_USER_PASSWORD: ${{ secrets.DOCKER_USER_PASSWORD }}
|
||||
# AWS credentials
|
||||
AWS_EKS_NAME: tip-wlan-main
|
||||
AWS_DEFAULT_OUTPUT: json
|
||||
AWS_DEFAULT_REGION: us-east-2
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_CLIENT_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_CLIENT_KEY }}
|
||||
# Cloud SDK certs
|
||||
CACERT: ${{ secrets.CACERT }}
|
||||
CAKEY: ${{ secrets.CAKEY }}
|
||||
ALLURE_CLI_VERSION: 2.14.0
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
testbeds:
|
||||
default: 'basic-03'
|
||||
description: 'Testbed(s) to test'
|
||||
required: false
|
||||
marker_expression:
|
||||
default: 'uc_sanity'
|
||||
description: 'Markers expression that will be passed to the pytest command.'
|
||||
required: false
|
||||
schedule:
|
||||
- cron: '15 0 * * *'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# checkout needed repositories
|
||||
- name: Checkout Testing repo
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: wlan-testing
|
||||
ref: feature-ucentral-wifi-2526
|
||||
|
||||
- name: Checkout LANforge scripts
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: wlan-lanforge-scripts
|
||||
repository: Telecominfraproject/wlan-lanforge-scripts
|
||||
|
||||
- name: import LANforge scripts
|
||||
working-directory: wlan-testing
|
||||
run: ./sync_repos.bash
|
||||
|
||||
# build and push docker image
|
||||
- name: docker login
|
||||
run: docker login ${{ env.DOCKER_SERVER }} -u ${{ env.DOCKER_USER_NAME }} -p ${{ env.DOCKER_USER_PASSWORD }}
|
||||
- name: build docker image
|
||||
working-directory: wlan-testing
|
||||
run: docker build -t ${{ env.DOCKER_SERVER }}/cloud-sdk-nightly:${{ github.run_id }} -f docker/Dockerfile .
|
||||
- name: push docker image
|
||||
run: docker push ${{ env.DOCKER_SERVER }}/cloud-sdk-nightly:${{ github.run_id }}
|
||||
|
||||
generate-matrix:
|
||||
name: generate testbed matrix
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- name: generate-matrix
|
||||
id: set-matrix
|
||||
run: |
|
||||
TESTBEDS="${{ github.event.inputs.testbeds || 'basic-03' }}"
|
||||
TESTBEDS=$(echo $TESTBEDS | sed "s/,/\",\"/g" | sed 's/^/[\"/g' | sed 's/$/\"]/g')
|
||||
TESTBEDS=$(echo "$TESTBEDS" | jq -c 'map({"testbed":.})')
|
||||
echo "::set-output name=matrix::{\"include\":${TESTBEDS}}"
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ build, generate-matrix ]
|
||||
strategy:
|
||||
max-parallel: 1
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson( needs.generate-matrix.outputs.matrix ) }}
|
||||
steps:
|
||||
- name: get EKS access credentials
|
||||
run: aws eks update-kubeconfig --name ${{ env.AWS_EKS_NAME }}
|
||||
|
||||
- name: install Allure CLI tool
|
||||
run: |
|
||||
wget https://repo.maven.apache.org/maven2/io/qameta/allure/allure-commandline/${{ env.ALLURE_CLI_VERSION }}/allure-commandline-${{ env.ALLURE_CLI_VERSION }}.tgz
|
||||
tar -xzf allure-commandline-${{ env.ALLURE_CLI_VERSION }}.tgz
|
||||
|
||||
- name: set job name
|
||||
id: job
|
||||
run: echo "::set-output name=name::testing-${{ github.run_number }}"
|
||||
|
||||
- name: prepare namespace
|
||||
id: namespace
|
||||
run: |
|
||||
NAMESPACE="testing-${{ github.run_id }}-${{ matrix.testbed }}"
|
||||
kubectl create ns $NAMESPACE
|
||||
kubectl config set-context --current --namespace=$NAMESPACE
|
||||
echo "::set-output name=name::${NAMESPACE}"
|
||||
|
||||
- name: create configuration.py secret
|
||||
run: |
|
||||
cat << EOF > configuration.py
|
||||
${{ secrets.LAB_CONFIGURATION }}
|
||||
EOF
|
||||
|
||||
kubectl create secret generic configuration --from-file=configuration=./configuration.py
|
||||
|
||||
- name: run sanity tests
|
||||
run: |
|
||||
cat <<EOF | kubectl apply -f -
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: "${{ steps.job.outputs.name }}"
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: tests
|
||||
image: ${{ env.DOCKER_SERVER }}/cloud-sdk-nightly:${{ github.run_id }}
|
||||
command:
|
||||
- /bin/bash
|
||||
- -x
|
||||
- -c
|
||||
- |
|
||||
cd tests
|
||||
pytest -m "${{ github.event.inputs.marker_expression || 'uc_sanity' }}" -s -vvv --testbed="${{ matrix.testbed }}" --skip-testrail --alluredir=/tmp/allure-results
|
||||
ret=\$?
|
||||
# sleep some time to be able to download the Allure results
|
||||
sleep 60
|
||||
exit \$ret
|
||||
volumeMounts:
|
||||
- name: configuration
|
||||
mountPath: "/wlan-testing/tests/configuration.py"
|
||||
subPath: configuration
|
||||
readOnly: true
|
||||
imagePullSecrets:
|
||||
- name: tip-docker-registry-key
|
||||
restartPolicy: Never
|
||||
volumes:
|
||||
- name: configuration
|
||||
secret:
|
||||
secretName: configuration
|
||||
backoffLimit: 0
|
||||
EOF
|
||||
|
||||
# wait for pod to spawn
|
||||
sleep 1
|
||||
|
||||
podname=$(kubectl get pods --no-headers -o custom-columns=":metadata.name" -l job-name="${{ steps.job.outputs.name }}" | sed "s/pod\///")
|
||||
|
||||
kubectl wait "pod/$podname" --for condition=ready
|
||||
|
||||
#sleep 30 # wait for the pod to come up
|
||||
|
||||
until [ -s test_everything.xml ]
|
||||
do
|
||||
sleep 10
|
||||
echo "waiting for tests to complete..."
|
||||
kubectl cp $podname:/wlan-testing/tests/test_everything.xml test_everything.xml >/dev/null 2>&1
|
||||
done
|
||||
echo "tests completed"
|
||||
|
||||
echo "downloading allure results..."
|
||||
kubectl cp $podname:/tmp/allure-results allure-results >/dev/null 2>&1
|
||||
|
||||
echo "waiting for pod to exit"
|
||||
kubectl logs -f $podname >/dev/null 2>&1
|
||||
|
||||
exit $(kubectl get pod $podname --output="jsonpath={.status.containerStatuses[].state.terminated.exitCode}")
|
||||
|
||||
- name: print logs
|
||||
if: always()
|
||||
run: |
|
||||
podname=$(kubectl get pods --no-headers -o custom-columns=":metadata.name" -l job-name="${{ steps.job.outputs.name }}" | sed "s/pod\///")
|
||||
kubectl logs $podname
|
||||
|
||||
- name: upload Allure results as artifact
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: allure-results-${{ matrix.testbed }}
|
||||
path: allure-results
|
||||
|
||||
- name: cleanup
|
||||
if: always()
|
||||
run: |
|
||||
kubectl delete ns "${{ steps.namespace.outputs.name }}" --wait=true
|
||||
|
||||
report:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ test, generate-matrix ]
|
||||
if: always()
|
||||
strategy:
|
||||
max-parallel: 1
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson( needs.generate-matrix.outputs.matrix ) }}
|
||||
steps:
|
||||
- name: install Allure CLI tool
|
||||
run: |
|
||||
wget https://repo.maven.apache.org/maven2/io/qameta/allure/allure-commandline/${{ env.ALLURE_CLI_VERSION }}/allure-commandline-${{ env.ALLURE_CLI_VERSION }}.tgz
|
||||
tar -xzf allure-commandline-${{ env.ALLURE_CLI_VERSION }}.tgz
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: allure-results-${{ matrix.testbed }}
|
||||
path: allure-results
|
||||
|
||||
- name: checkout testing repo
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: wlan-testing
|
||||
|
||||
- name: get reports branch
|
||||
uses: actions/checkout@v2
|
||||
continue-on-error: true
|
||||
with:
|
||||
ref: gh-pages
|
||||
path: reports
|
||||
|
||||
- name: copy history into results
|
||||
run: |
|
||||
if [ -e "reports/sanity/${{ matrix.testbed }}/latest" ] ; then
|
||||
cp -r reports/sanity/${{ matrix.testbed }}/latest/history/ allure-results/history
|
||||
fi
|
||||
|
||||
- name: add report metadata
|
||||
run: |
|
||||
cat << EOF >> allure-results/environment.properties
|
||||
Testbed=${{ matrix.testbed }}
|
||||
Tests.CommitId=$(cd wlan-testing && git rev-parse --short HEAD)
|
||||
CiRun.Id=${{ github.run_id }}
|
||||
CiRun.Number=${{ github.run_number }}
|
||||
CiRun.Url=https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}
|
||||
EOF
|
||||
|
||||
- name: generate Allure report
|
||||
run: allure-${{ env.ALLURE_CLI_VERSION }}/bin/allure generate
|
||||
|
||||
- name: upload Allure report as artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: allure-report-${{ matrix.testbed }}
|
||||
path: allure-report
|
||||
|
||||
# doing this to be able to aggregate multiple reports together later on
|
||||
- name: copy results into report
|
||||
run: cp -r allure-results allure-report/results
|
||||
|
||||
- name: copy new report
|
||||
if: ${{ (github.event.inputs.marker_expression || 'uc_sanity') == 'uc_sanity' }}
|
||||
run: |
|
||||
mkdir -p reports/sanity/${{ matrix.testbed }}
|
||||
cp -Tr allure-report reports/sanity/${{ matrix.testbed }}/${{ github.run_number }}
|
||||
|
||||
- name: update latest symlink
|
||||
if: ${{ (github.event.inputs.marker_expression || 'uc_sanity') == 'uc_sanity' }}
|
||||
working-directory: reports/sanity/${{ matrix.testbed }}
|
||||
run: ln -fns ${{ github.run_number }} latest
|
||||
|
||||
- name: generate new index.html
|
||||
run: python wlan-testing/.github/tools/generate_directory_index.py -r reports
|
||||
|
||||
- name: commit reports update
|
||||
working-directory: reports
|
||||
run: |
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
git add .
|
||||
git commit -m "Automated deployment: $(date -u)"
|
||||
|
||||
- name: push
|
||||
#if: github.ref == 'refs/heads/master'
|
||||
uses: ad-m/github-push-action@v0.6.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: gh-pages
|
||||
directory: reports
|
||||
|
||||
cleanup:
|
||||
needs: [ test ]
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- name: cleanup Docker image
|
||||
run: curl -u${{ env.DOCKER_USER_NAME }}:${{ env.DOCKER_USER_PASSWORD }} -X DELETE "https://tip.jfrog.io/artifactory/tip-wlan-cloud-docker-repo/cloud-sdk-nightly/${{ github.run_id }}"
|
||||
20
.github/workflows/ucentralgw-deployment.yaml
vendored
20
.github/workflows/ucentralgw-deployment.yaml
vendored
@@ -20,15 +20,9 @@ env:
|
||||
testbeds: '[
|
||||
{
|
||||
"namespace": "qa01",
|
||||
"ucentralgw_version": "master",
|
||||
"ucentralsec_version": "main",
|
||||
"ucentralgwui_version": "main"
|
||||
},
|
||||
{
|
||||
"namespace": "dev01",
|
||||
"ucentralgw_version": "master",
|
||||
"ucentralsec_version": "main",
|
||||
"ucentralgwui_version": "main"
|
||||
"ucentralgw_version": "v2.0.0",
|
||||
"ucentralsec_version": "v2.0.0",
|
||||
"ucentralgwui_version": "v2.0.0"
|
||||
}
|
||||
]'
|
||||
|
||||
@@ -61,12 +55,10 @@ jobs:
|
||||
with:
|
||||
path: wlan-cloud-ucentral-deploy
|
||||
repository: Telecominfraproject/wlan-cloud-ucentral-deploy
|
||||
- name: Checkout Helm values repo
|
||||
- name: Checkout repo with Helm values
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: toolsmith
|
||||
repository: Telecominfraproject/Toolsmith
|
||||
token: ${{ secrets.PAT_TOKEN }}
|
||||
path: wlan-testing
|
||||
|
||||
- name: Prepare certificates from secrets
|
||||
run: |
|
||||
@@ -92,7 +84,7 @@ jobs:
|
||||
helm dependency update
|
||||
helm upgrade --install --create-namespace \
|
||||
--namespace ucentral-${{ matrix.namespace }} --wait --timeout 20m \
|
||||
-f ../../toolsmith/helm-values/assembly-ucentral/values.ucentral-qa.yaml \
|
||||
-f ../../wlan-testing/helm/ucentral/values.ucentral-qa.yaml \
|
||||
--set ucentralgw.configProperties."rtty\.token"=${{ secrets.RTTY_TOKEN }} \
|
||||
--set ucentralsec.configProperties."authentication\.default\.username"=${{ secrets.UCENTRALGW_AUTH_USERNAME }} \
|
||||
--set ucentralsec.configProperties."authentication\.default\.password"=${{ secrets.UCENTRALGW_AUTH_PASSWORD }} \
|
||||
|
||||
121
helm/ucentral/values.ucentral-qa.yaml
Normal file
121
helm/ucentral/values.ucentral-qa.yaml
Normal file
@@ -0,0 +1,121 @@
|
||||
ucentralgw:
|
||||
fullnameOverride: ucentralgw
|
||||
|
||||
services:
|
||||
ucentralgw:
|
||||
type: LoadBalancer
|
||||
annotations:
|
||||
service.beta.kubernetes.io/aws-load-balancer-type: "nlb-ip"
|
||||
service.beta.kubernetes.io/aws-load-balancer-scheme: internet-facing
|
||||
service.beta.kubernetes.io/aws-load-balancer-healthcheck-port: "16102"
|
||||
service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl
|
||||
service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285"
|
||||
service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16002,16003,17002"
|
||||
|
||||
configProperties:
|
||||
# -> Public part
|
||||
# File uploader
|
||||
# rtty
|
||||
rtty.enabled: "true"
|
||||
|
||||
certs:
|
||||
restapi-ca.pem: |
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDojCCAoqgAwIBAgIUPVYBpqNbcLYygF6Mx+qxSWwQyFowDQYJKoZIhvcNAQEL
|
||||
BQAwaTELMAkGA1UEBhMCVVMxJDAiBgNVBAoTG1RlbGVjb20gSW5mcmEgUHJvamVj
|
||||
dCwgSW5jLjEMMAoGA1UECxMDVElQMSYwJAYDVQQDEx1UZWxlY29tIEluZnJhIFBy
|
||||
b2plY3QgUm9vdCBDQTAeFw0yMTA0MTMyMjQyNDRaFw0zMTA0MTMyMjM4NDZaMGkx
|
||||
CzAJBgNVBAYTAlVTMSQwIgYDVQQKExtUZWxlY29tIEluZnJhIFByb2plY3QsIElu
|
||||
Yy4xDDAKBgNVBAsTA1RJUDEmMCQGA1UEAxMdVGVsZWNvbSBJbmZyYSBQcm9qZWN0
|
||||
IFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIGCibwf5u
|
||||
AAwZ+1H8U0e3u2V+0d2gSctucoK86XwUmfe1V2a/qlCYZd29r80IuN1IIeB0naIm
|
||||
KnK/MzXW87clF6tFd1+HzEvmlY/W4KyIXalVCTEzirFSvBEG2oZpM0yC3AefytAO
|
||||
aOpA00LaM3xTfTqMKIRhJBuLy0I4ANUVG6ixVebbGuc78IodleqiLoWy2Q9QHyEO
|
||||
t/7hZndJhiVogh0PveRhho45EbsACu7ymDY+JhlIleevqwlE3iQoq0YcmYADHno6
|
||||
Eq8vcwLpZFxihupUafkd1T3WJYQAJf9coCjBu2qIhNgrcrGD8R9fGswwNRzMRMpX
|
||||
720+GjcDW3bJAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFAJG
|
||||
lmB5sVP2qfL3xZ8hQOTpkQH6MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsF
|
||||
AAOCAQEAVjl9dm4epG9NUYnagT9sg7scVQEPfz3Lt6w1NXJXgD8mAUlK0jXmEyvM
|
||||
dCPD4514n+8+lM7US8fh+nxc7jO//LwK17Wm9FblgjNFR7+anv0Q99T9fP19DLlF
|
||||
PSNHL2emogy1bl1lLTAoj8nxg2wVKPDSHBGviQ5LR9fsWUIJDv9Bs5k0qWugWYSj
|
||||
19S6qnHeskRDB8MqRLhKMG82oDVLerSnhD0P6HjySBHgTTU7/tYS/OZr1jI6MPbG
|
||||
L+/DtiR5fDVMNdBSGU89UNTi0wHY9+RFuNlIuvZC+x/swF0V9R5mN+ywquTPtDLA
|
||||
5IOM7ItsRmen6u3qu+JXros54e4juQ==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
ucentralsec:
|
||||
fullnameOverride: ucentralsec
|
||||
|
||||
services:
|
||||
ucentralsec:
|
||||
type: LoadBalancer
|
||||
annotations:
|
||||
service.beta.kubernetes.io/aws-load-balancer-type: "nlb-ip"
|
||||
service.beta.kubernetes.io/aws-load-balancer-scheme: internet-facing
|
||||
service.beta.kubernetes.io/aws-load-balancer-healthcheck-port: "16101"
|
||||
service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl
|
||||
service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285"
|
||||
service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16001,17001"
|
||||
|
||||
certs:
|
||||
restapi-ca.pem: |
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDojCCAoqgAwIBAgIUPVYBpqNbcLYygF6Mx+qxSWwQyFowDQYJKoZIhvcNAQEL
|
||||
BQAwaTELMAkGA1UEBhMCVVMxJDAiBgNVBAoTG1RlbGVjb20gSW5mcmEgUHJvamVj
|
||||
dCwgSW5jLjEMMAoGA1UECxMDVElQMSYwJAYDVQQDEx1UZWxlY29tIEluZnJhIFBy
|
||||
b2plY3QgUm9vdCBDQTAeFw0yMTA0MTMyMjQyNDRaFw0zMTA0MTMyMjM4NDZaMGkx
|
||||
CzAJBgNVBAYTAlVTMSQwIgYDVQQKExtUZWxlY29tIEluZnJhIFByb2plY3QsIElu
|
||||
Yy4xDDAKBgNVBAsTA1RJUDEmMCQGA1UEAxMdVGVsZWNvbSBJbmZyYSBQcm9qZWN0
|
||||
IFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIGCibwf5u
|
||||
AAwZ+1H8U0e3u2V+0d2gSctucoK86XwUmfe1V2a/qlCYZd29r80IuN1IIeB0naIm
|
||||
KnK/MzXW87clF6tFd1+HzEvmlY/W4KyIXalVCTEzirFSvBEG2oZpM0yC3AefytAO
|
||||
aOpA00LaM3xTfTqMKIRhJBuLy0I4ANUVG6ixVebbGuc78IodleqiLoWy2Q9QHyEO
|
||||
t/7hZndJhiVogh0PveRhho45EbsACu7ymDY+JhlIleevqwlE3iQoq0YcmYADHno6
|
||||
Eq8vcwLpZFxihupUafkd1T3WJYQAJf9coCjBu2qIhNgrcrGD8R9fGswwNRzMRMpX
|
||||
720+GjcDW3bJAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFAJG
|
||||
lmB5sVP2qfL3xZ8hQOTpkQH6MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsF
|
||||
AAOCAQEAVjl9dm4epG9NUYnagT9sg7scVQEPfz3Lt6w1NXJXgD8mAUlK0jXmEyvM
|
||||
dCPD4514n+8+lM7US8fh+nxc7jO//LwK17Wm9FblgjNFR7+anv0Q99T9fP19DLlF
|
||||
PSNHL2emogy1bl1lLTAoj8nxg2wVKPDSHBGviQ5LR9fsWUIJDv9Bs5k0qWugWYSj
|
||||
19S6qnHeskRDB8MqRLhKMG82oDVLerSnhD0P6HjySBHgTTU7/tYS/OZr1jI6MPbG
|
||||
L+/DtiR5fDVMNdBSGU89UNTi0wHY9+RFuNlIuvZC+x/swF0V9R5mN+ywquTPtDLA
|
||||
5IOM7ItsRmen6u3qu+JXros54e4juQ==
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
rttys:
|
||||
enabled: true
|
||||
|
||||
services:
|
||||
rttys:
|
||||
type: LoadBalancer
|
||||
annotations:
|
||||
service.beta.kubernetes.io/aws-load-balancer-type: "nlb-ip"
|
||||
service.beta.kubernetes.io/aws-load-balancer-scheme: internet-facing
|
||||
service.beta.kubernetes.io/aws-load-balancer-healthcheck-port: "5914"
|
||||
service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl
|
||||
service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285"
|
||||
service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "5912,5913"
|
||||
|
||||
ucentralgwui:
|
||||
services:
|
||||
ucentralgwui:
|
||||
type: NodePort
|
||||
|
||||
ingresses:
|
||||
default:
|
||||
enabled: true
|
||||
annotations:
|
||||
kubernetes.io/ingress.class: alb
|
||||
alb.ingress.kubernetes.io/scheme: internet-facing
|
||||
alb.ingress.kubernetes.io/group.name: wlan-cicd
|
||||
alb.ingress.kubernetes.io/certificate-arn: arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285
|
||||
alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80}, {"HTTPS": 443}]'
|
||||
alb.ingress.kubernetes.io/actions.ssl-redirect: '{"Type": "redirect", "RedirectConfig": { "Protocol": "HTTPS", "Port": "443", "StatusCode": "HTTP_302"}}'
|
||||
paths:
|
||||
- path: /*
|
||||
serviceName: ucentralgwui
|
||||
servicePort: http
|
||||
|
||||
public_env_variables:
|
||||
ALLOW_UCENTRALSEC_CHANGE: false
|
||||
|
||||
@@ -18,13 +18,16 @@ if 'perfecto_libs' not in sys.path:
|
||||
|
||||
pytestmark = [pytest.mark.sanity, pytest.mark.interop, pytest.mark.android, pytest.mark.interop_and, pytest.mark.ClientConnectivity]
|
||||
|
||||
from android_lib import closeApp, set_APconnMobileDevice_android, verifyUploadDownloadSpeed_android, Toggle_AirplaneMode_android, ForgetWifiConnection, openApp
|
||||
from android_lib import closeApp, set_APconnMobileDevice_android, verifyUploadDownloadSpeed_android,\
|
||||
Toggle_AirplaneMode_android, ForgetWifiConnection, openApp
|
||||
|
||||
setup_params_general = {
|
||||
"mode": "NAT",
|
||||
"ssid_modes": {
|
||||
"wpa": [{"ssid_name": "ssid_wpa_2g", "appliedRadios": ["is2dot4GHz"], "security_key": "something"},
|
||||
{"ssid_name": "ssid_wpa_5g", "appliedRadios": ["is5GHzU", "is5GHz", "is5GHzL"],"security_key": "something"}],
|
||||
"open": [{"ssid_name": "ssid_open_2g", "appliedRadios": ["is2dot4GHz"]},
|
||||
{"ssid_name": "ssid_open_5g", "appliedRadios": ["is5GHzU", "is5GHz", "is5GHzL"]}],
|
||||
"wpa2_personal": [
|
||||
{"ssid_name": "ssid_wpa2_2g", "appliedRadios": ["is2dot4GHz"], "security_key": "something"},
|
||||
{"ssid_name": "ssid_wpa2_5g", "appliedRadios": ["is5GHzU", "is5GHz", "is5GHzL"],"security_key": "something"}]},
|
||||
@@ -50,7 +53,7 @@ class TestNatMode(object):
|
||||
ssidName = profile_data["ssid_name"]
|
||||
ssidPassword = profile_data["security_key"]
|
||||
print ("SSID_NAME: " + ssidName)
|
||||
#print ("SSID_PASS: " + ssidPassword)
|
||||
print ("SSID_PASS: " + ssidPassword)
|
||||
|
||||
if ssidName not in get_vif_state:
|
||||
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
|
||||
@@ -68,7 +71,7 @@ class TestNatMode(object):
|
||||
|
||||
#ForgetWifi
|
||||
ForgetWifiConnection(request, setup_perfectoMobile_android, ssidName, connData)
|
||||
|
||||
|
||||
|
||||
@pytest.mark.twog
|
||||
@pytest.mark.wpa2_personal
|
||||
@@ -77,7 +80,7 @@ class TestNatMode(object):
|
||||
ssidName = profile_data["ssid_name"]
|
||||
ssidPassword = profile_data["security_key"]
|
||||
print ("SSID_NAME: " + ssidName)
|
||||
#print ("SSID_PASS: " + ssidPassword)
|
||||
print ("SSID_PASS: " + ssidPassword)
|
||||
|
||||
if ssidName not in get_vif_state:
|
||||
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
|
||||
@@ -96,8 +99,6 @@ class TestNatMode(object):
|
||||
#ForgetWifi
|
||||
ForgetWifiConnection(request, setup_perfectoMobile_android, ssidName, connData)
|
||||
|
||||
|
||||
|
||||
@pytest.mark.twog
|
||||
@pytest.mark.wpa
|
||||
def test_ClientConnectivity_2g_WPA(self, request, get_vif_state, get_APToMobileDevice_data, setup_perfectoMobile_android):
|
||||
@@ -105,7 +106,7 @@ class TestNatMode(object):
|
||||
ssidName = profile_data["ssid_name"]
|
||||
ssidPassword = profile_data["security_key"]
|
||||
print ("SSID_NAME: " + ssidName)
|
||||
#print ("SSID_PASS: " + ssidPassword)
|
||||
print ("SSID_PASS: " + ssidPassword)
|
||||
|
||||
if ssidName not in get_vif_state:
|
||||
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
|
||||
@@ -123,7 +124,8 @@ class TestNatMode(object):
|
||||
|
||||
#ForgetWifi
|
||||
ForgetWifiConnection(request, setup_perfectoMobile_android, ssidName, connData)
|
||||
|
||||
|
||||
|
||||
@pytest.mark.fiveg
|
||||
@pytest.mark.wpa
|
||||
def test_ClientConnectivity_5g_WPA(self, request, get_vif_state, get_APToMobileDevice_data, setup_perfectoMobile_android):
|
||||
@@ -131,7 +133,7 @@ class TestNatMode(object):
|
||||
ssidName = profile_data["ssid_name"]
|
||||
ssidPassword = profile_data["security_key"]
|
||||
print ("SSID_NAME: " + ssidName)
|
||||
#print ("SSID_PASS: " + ssidPassword)
|
||||
print ("SSID_PASS: " + ssidPassword)
|
||||
|
||||
if ssidName not in get_vif_state:
|
||||
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
|
||||
@@ -149,3 +151,57 @@ class TestNatMode(object):
|
||||
|
||||
#ForgetWifi
|
||||
ForgetWifiConnection(request, setup_perfectoMobile_android, ssidName, connData)
|
||||
|
||||
@pytest.mark.twog
|
||||
@pytest.mark.open
|
||||
def test_ClientConnectivity_2g_OPEN(self, request, get_vif_state, get_APToMobileDevice_data, setup_perfectoMobile_android):
|
||||
|
||||
profile_data = setup_params_general["ssid_modes"]["open"][0]
|
||||
ssidName = profile_data["ssid_name"]
|
||||
ssidPassword = "[BLANK]"
|
||||
print("SSID_NAME: " + ssidName)
|
||||
print("SSID_PASS: " + ssidPassword)
|
||||
if ssidName not in get_vif_state:
|
||||
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
|
||||
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
|
||||
|
||||
report = setup_perfectoMobile_android[1]
|
||||
driver = setup_perfectoMobile_android[0]
|
||||
connData = get_APToMobileDevice_data
|
||||
|
||||
# Set Wifi/AP Mode
|
||||
set_APconnMobileDevice_android(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
|
||||
|
||||
# Toggle AirplaneMode
|
||||
assert Toggle_AirplaneMode_android(request, setup_perfectoMobile_android, connData)
|
||||
|
||||
# ForgetWifi
|
||||
ForgetWifiConnection(request, setup_perfectoMobile_android, ssidName, connData)
|
||||
|
||||
@pytest.mark.fiveg
|
||||
@pytest.mark.open
|
||||
def test_ClientConnectivity_5g_OPEN(self, request, get_vif_state, get_APToMobileDevice_data, setup_perfectoMobile_android):
|
||||
|
||||
profile_data = setup_params_general["ssid_modes"]["open"][1]
|
||||
ssidName = profile_data["ssid_name"]
|
||||
ssidPassword = "[BLANK]"
|
||||
print("SSID_NAME: " + ssidName)
|
||||
print("SSID_PASS: " + ssidPassword)
|
||||
|
||||
if ssidName not in get_vif_state:
|
||||
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
|
||||
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
|
||||
|
||||
report = setup_perfectoMobile_android[1]
|
||||
driver = setup_perfectoMobile_android[0]
|
||||
connData = get_APToMobileDevice_data
|
||||
|
||||
# Set Wifi/AP Mode
|
||||
set_APconnMobileDevice_android(request, ssidName, ssidPassword, setup_perfectoMobile_android, connData)
|
||||
|
||||
# Toggle AirplaneMode
|
||||
assert Toggle_AirplaneMode_android(request, setup_perfectoMobile_android, connData)
|
||||
|
||||
# ForgetWifi
|
||||
ForgetWifiConnection(request, setup_perfectoMobile_android, ssidName, connData)
|
||||
|
||||
|
||||
@@ -25,9 +25,12 @@ setup_params_general = {
|
||||
"ssid_modes": {
|
||||
"wpa": [{"ssid_name": "ssid_wpa_2g", "appliedRadios": ["is2dot4GHz"], "security_key": "something"},
|
||||
{"ssid_name": "ssid_wpa_5g", "appliedRadios": ["is5GHzU", "is5GHz", "is5GHzL"],"security_key": "something"}],
|
||||
"open": [{"ssid_name": "ssid_open_2g", "appliedRadios": ["is2dot4GHz"]},
|
||||
{"ssid_name": "ssid_open_5g", "appliedRadios": ["is5GHzU", "is5GHz", "is5GHzL"]}],
|
||||
"wpa2_personal": [
|
||||
{"ssid_name": "ssid_wpa2_2g", "appliedRadios": ["is2dot4GHz"], "security_key": "something"},
|
||||
{"ssid_name": "ssid_wpa2_5g", "appliedRadios": ["is5GHzU", "is5GHz", "is5GHzL"],"security_key": "something"}]},
|
||||
|
||||
"rf": {},
|
||||
"radius": False
|
||||
}
|
||||
@@ -148,4 +151,58 @@ class TestNatMode(object):
|
||||
verifyUploadDownloadSpeediOS(request, setup_perfectoMobile_iOS, connData)
|
||||
|
||||
#ForgetWifi
|
||||
ForgetWifiConnection(request, setup_perfectoMobile_iOS, ssidName, connData)
|
||||
|
||||
|
||||
@pytest.mark.twog
|
||||
@pytest.mark.open
|
||||
def test_ClientConnectivity_2g_OPEN(self, request, get_vif_state, get_APToMobileDevice_data, setup_perfectoMobile_iOS):
|
||||
|
||||
profile_data = setup_params_general["ssid_modes"]["open"][0]
|
||||
ssidName = profile_data["ssid_name"]
|
||||
ssidPassword = "[BLANK]"
|
||||
print("SSID_NAME: " + ssidName)
|
||||
print("SSID_PASS: " + ssidPassword)
|
||||
if ssidName not in get_vif_state:
|
||||
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
|
||||
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
|
||||
|
||||
report = setup_perfectoMobile_iOS[1]
|
||||
driver = setup_perfectoMobile_iOS[0]
|
||||
connData = get_APToMobileDevice_data
|
||||
|
||||
# Set Wifi/AP Mode
|
||||
set_APconnMobileDevice_iOS(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
|
||||
|
||||
# Verify Upload download Speed from device Selection
|
||||
verifyUploadDownloadSpeediOS(request, setup_perfectoMobile_iOS, connData)
|
||||
|
||||
# ForgetWifi
|
||||
ForgetWifiConnection(request, setup_perfectoMobile_iOS, ssidName, connData)
|
||||
|
||||
|
||||
@pytest.mark.fiveg
|
||||
@pytest.mark.open
|
||||
def test_ClientConnectivity_5g_OPEN(self, request, get_vif_state, get_APToMobileDevice_data, setup_perfectoMobile_iOS):
|
||||
|
||||
profile_data = setup_params_general["ssid_modes"]["open"][1]
|
||||
ssidName = profile_data["ssid_name"]
|
||||
ssidPassword = "[BLANK]"
|
||||
print("SSID_NAME: " + ssidName)
|
||||
print("SSID_PASS: " + ssidPassword)
|
||||
if ssidName not in get_vif_state:
|
||||
allure.attach(name="retest,vif state ssid not available:", body=str(get_vif_state))
|
||||
pytest.xfail("SSID NOT AVAILABLE IN VIF STATE")
|
||||
|
||||
report = setup_perfectoMobile_iOS[1]
|
||||
driver = setup_perfectoMobile_iOS[0]
|
||||
connData = get_APToMobileDevice_data
|
||||
|
||||
# Set Wifi/AP Mode
|
||||
set_APconnMobileDevice_iOS(request, ssidName, ssidPassword, setup_perfectoMobile_iOS, connData)
|
||||
|
||||
# Verify Upload download Speed from device Selection
|
||||
verifyUploadDownloadSpeediOS(request, setup_perfectoMobile_iOS, connData)
|
||||
|
||||
# ForgetWifi
|
||||
ForgetWifiConnection(request, setup_perfectoMobile_iOS, ssidName, connData)
|
||||
Reference in New Issue
Block a user