diff --git a/.github/workflows/ow_docker-compose.yml b/.github/workflows/ow_docker-compose.yml index 93662eb25..071adba4f 100644 --- a/.github/workflows/ow_docker-compose.yml +++ b/.github/workflows/ow_docker-compose.yml @@ -1,9 +1,5 @@ name: 2.x testing with Docker Compose deployment env: - # thirdparties - DOCKER_SERVER: tip-tip-wlan-cloud-docker-repo.jfrog.io - DOCKER_USER_NAME: wlan-testing-cicd - DOCKER_USER_PASSWORD: ${{ secrets.DOCKER_USER_PASSWORD }} # AWS credentials AWS_EKS_NAME: tip-wlan-main AWS_DEFAULT_OUTPUT: json @@ -59,30 +55,13 @@ jobs: build: runs-on: ubuntu-latest steps: - # checkout needed repositories - - name: Checkout Testing repo - uses: actions/checkout@v2 + - uses: actions/checkout@v2 + - name: build and push Docker image + uses: ./.github/actions/build-and-push-docker with: - path: wlan-testing - - - name: Checkout LANforge scripts - uses: actions/checkout@v2 - with: - path: wlan-lanforge-scripts - repository: Telecominfraproject/wlan-lanforge-scripts - - - name: import LANforge scripts - working-directory: wlan-testing - run: ./sync_repos.bash - - # build and push docker image - - name: docker login - run: docker login ${{ env.DOCKER_SERVER }} -u ${{ env.DOCKER_USER_NAME }} -p ${{ env.DOCKER_USER_PASSWORD }} - - name: build docker image - working-directory: wlan-testing - run: docker build -t ${{ env.DOCKER_SERVER }}/cloud-sdk-nightly:${{ github.run_id }} -f docker/Dockerfile . - - name: push docker image - run: docker push ${{ env.DOCKER_SERVER }}/cloud-sdk-nightly:${{ github.run_id }} + registry: tip-tip-wlan-cloud-docker-repo.jfrog.io + registry_user: wlan-testing-cicd + registry_password: ${{ secrets.DOCKER_USER_PASSWORD }} deploy-controller: name: Deploy OpenWIFI Cloud SDK @@ -479,7 +458,7 @@ jobs: if: always() uses: actions/upload-artifact@v2 with: - name: allure-results-${{ github.event.inputs.testbeds || 'basic-05' }} + name: allure-results-docker-compose-${{ github.event.inputs.marker_expression || 'sdk_restapi' }} path: allure-results - name: cleanup @@ -492,92 +471,58 @@ jobs: needs: [ test ] if: always() steps: - - name: install Allure CLI tool - run: | - wget https://repo.maven.apache.org/maven2/io/qameta/allure/allure-commandline/${{ env.ALLURE_CLI_VERSION }}/allure-commandline-${{ env.ALLURE_CLI_VERSION }}.tgz - tar -xzf allure-commandline-${{ env.ALLURE_CLI_VERSION }}.tgz + - name: checkout testing repo + uses: actions/checkout@v2 - uses: actions/download-artifact@v2 with: - name: allure-results-${{ github.event.inputs.testbeds || 'basic-05' }} + name: allure-results-docker-compose-${{ github.event.inputs.marker_expression || 'sdk_restapi' }} path: allure-results - - name: checkout testing repo - uses: actions/checkout@v2 - with: - path: wlan-testing - - - name: get reports branch - uses: actions/checkout@v2 + - name: download history of previous run continue-on-error: true - with: - ref: gh-pages - path: reports - - - name: copy history into results run: | - if [ -e "reports/docker-compose/${{ github.event.inputs.marker_expression || 'sdk_restapi' }}/${{ github.event.inputs.testbeds || 'basic-05' }} /latest" ] ; then - cp -r reports/docker-compose/${{ github.event.inputs.marker_expression || 'sdk_restapi' }}/${{ github.event.inputs.testbeds || 'basic-05' }}/latest/history/ allure-results/history - fi - - - name: add report metadata - run: | - cat << EOF >> allure-results/environment.properties - Testbed=${{ github.event.inputs.testbeds || 'basic-05' }} - Tests.CommitId=$(cd wlan-testing && git rev-parse --short HEAD) - CiRun.Id=${{ github.run_id }} - CiRun.Number=${{ github.run_number }} - CiRun.Url=https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} - EOF + LAST_RUN_ID=$(aws s3api head-object --bucket openwifi-allure-reports --key docker-compose-${{ github.event.inputs.marker_expression || 'sdk_restapi' }}/latest/index.html | jq -r .Metadata.latest) + aws s3 cp --recursive s3://openwifi-allure-reports/docker-compose-${{ github.event.inputs.marker_expression || 'sdk_restapi' }}/$LAST_RUN_ID/history history - name: generate Allure report - run: allure-${{ env.ALLURE_CLI_VERSION }}/bin/allure generate + uses: ./.github/actions/generate-allure-report + with: + results_path: ./allure-results + history_path: ./history - name: upload Allure report as artifact uses: actions/upload-artifact@v2 with: - name: allure-report-${{ github.event.inputs.testbeds || 'basic-05' }} + name: allure-report-docker-compose-${{ github.event.inputs.marker_expression || 'sdk_restapi' }} path: allure-report # doing this to be able to aggregate multiple reports together later on - name: copy results into report - run: cp -r allure-results allure-report/results - - - name: copy new report run: | - mkdir -p reports/docker-compose/${{ github.event.inputs.marker_expression || 'sdk_restapi' }}/${{ github.event.inputs.testbeds || 'basic-05' }} - cp -Tr allure-report reports/docker-compose/${{ github.event.inputs.marker_expression || 'sdk_restapi' }}/${{ github.event.inputs.testbeds || 'basic-05' }}/${{ github.run_number }} + cp -r allure-results allure-report/results - - name: update latest symlink - working-directory: reports/docker-compose/${{ github.event.inputs.marker_expression || 'sdk_restapi' }}/${{ github.event.inputs.testbeds || 'basic-05' }} - run: ln -fns ${{ github.run_number }} latest - - - name: generate new index.html - run: python wlan-testing/.github/tools/generate_directory_index.py -r reports - - - name: commit reports update - working-directory: reports - run: | - git config --global user.name "github-actions[bot]" - git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com" - git add . - git commit -m "Automated deployment: $(date -u)" - - - name: push + - name: upload to S3 if: github.ref == 'refs/heads/master' - uses: ad-m/github-push-action@v0.6.0 + uses: ./wlan-testing/.github/actions/allure-report-to-s3 with: - github_token: ${{ secrets.GITHUB_TOKEN }} - branch: gh-pages - directory: reports + test_type: docker-compose-${{ github.event.inputs.marker_expression || 'sdk_restapi' }} + testbed: ${{ github.event.inputs.testbeds || 'basic-05' }} + report_path: allure-report + s3_access_key_id: ${{ secrets.ALLURE_S3_ACCESS_KEY_ID }} + s3_access_key_secret: ${{ secrets.ALLURE_S3_ACCESS_KEY_SECRET }} delete-docker-image: needs: [ test ] runs-on: ubuntu-latest if: always() steps: + - uses: actions/checkout@v2 - name: cleanup Docker image - run: curl -u${{ env.DOCKER_USER_NAME }}:${{ env.DOCKER_USER_PASSWORD }} -X DELETE "https://tip.jfrog.io/artifactory/tip-wlan-cloud-docker-repo/cloud-sdk-nightly/${{ github.run_id }}" + uses: ./.github/actions/cleanup-docker + with: + registry_user: wlan-testing-cicd + registry_password: ${{ secrets.DOCKER_USER_PASSWORD }} # - name: cleanup docker-compose-deployment image # run: curl -u${{ env.DOCKER_USER_NAME }}:${{ env.DOCKER_USER_PASSWORD }} -X DELETE "https://tip.jfrog.io/artifactory/tip-wlan-cloud-docker-repo/docker-compose-deployment/${{ github.run_id }}" diff --git a/.github/workflows/uc_loadsim.yml b/.github/workflows/uc_loadsim.yml new file mode 100644 index 000000000..8705d2604 --- /dev/null +++ b/.github/workflows/uc_loadsim.yml @@ -0,0 +1,163 @@ +name: OpenWifi 2.0 load simulation +env: + # AWS credentials + AWS_EKS_NAME: tip-wlan-main + AWS_DEFAULT_OUTPUT: json + AWS_DEFAULT_REGION: us-east-2 + AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_CLIENT_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_CLIENT_KEY }} + +on: + workflow_dispatch: + inputs: + sdk_version: + default: 'main' + description: 'Version of Cloud SDK to test' + required: true + +defaults: + run: + shell: bash + +jobs: + loadsim: + runs-on: ubuntu-latest + outputs: + gateway_url: ${{ steps.gateway_url.outputs.value }} + sec_url: ${{ steps.sec_url.outputs.value }} + namespace: ${{ steps.namespace.outputs.value }} + steps: + - name: Checkout Testing repo + uses: actions/checkout@v2 + with: + path: wlan-testing + + - name: Get EKS access credentials + run: aws eks update-kubeconfig --name ${{ env.AWS_EKS_NAME }} + + - name: Prepare namespace + id: namespace + run: | + NAMESPACE="ls-${{ github.run_number }}" + kubectl create ns $NAMESPACE + kubectl config set-context --current --namespace=$NAMESPACE + echo "::set-output name=name::${NAMESPACE}" + + - name: Set gateway URL output + id: gateway_url + run: echo "::set-output name=name::gw-${{ steps.namespace.outputs.name }}.cicd.lab.wlan.tip.build" + + - name: Set sec service URL output + id: sec_url + run: echo "::set-output name=value::sec-${{ steps.namespace.outputs.name }}.cicd.lab.wlan.tip.build" + + - name: Set ls service URL output + id: ls_url + run: echo "::set-output name=value::ls-${{ steps.namespace.outputs.name }}.cicd.lab.wlan.tip.build" + + - name: Prepare certificates from secrets + working-directory: wlan-testing/helm/ucentral + run: | + echo "${{ secrets.DIGICERT_CERT }}" | base64 -d > cert.pem + echo "${{ secrets.DIGICERT_KEY }}" | base64 -d > key.pem + echo "${{ secrets.LOADSIM_DIGICERT_CERT }}" | base64 -d > device-cert.pem + echo "${{ secrets.LOADSIM_DIGICERT_KEY }}" | base64 -d > device-key.pem + + - name: Deploy CloudSDK instance with load simulator + working-directory: wlan-testing/helm/ucentral + run: | + export NAMESPACE="openwifi-${{ steps.namespace.outputs.name }}" + export DEPLOY_METHOD=git + export CHART_VERSION="${{ github.event.inputs.sdk_version }}" + export OWGW_VERSION=master + export OWGWUI_VERSION=main + export OWSEC_VERSION=main + export OWFMS_VERSION=main + export OWPROV_VERSION=main + export OWPROVUI_VERSION=main + export VALUES_FILE_LOCATION=values.ucentral-qa.yaml,values.ucentral-qa.test-nodes.yaml,values.ucentral-qa.owls-enabled.yaml + export RTTY_TOKEN=${{ secrets.RTTY_TOKEN }} + export OWGW_AUTH_USERNAME=${{ secrets.UCENTRALGW_AUTH_USERNAME }} + export OWGW_AUTH_PASSWORD=${{ secrets.UCENTRALGW_AUTH_PASSWORD }} + export OWFMS_S3_SECRET=${{ secrets.UCENTRALFMS_S3_SECRET }} + export OWFMS_S3_KEY=${{ secrets.UCENTRALFMS_S3_KEY }} + export CERT_LOCATION=cert.pem + export KEY_LOCATION=key.pem + export DEVICE_CERT_LOCATION=device-cert.pem + export DEVICE_KEY_LOCATION=device-key.pem + export OWSEC_NEW_PASSWORD=${{ secrets.OWSEC_NEW_PASSWORD }} + ./deploy.sh + + - name: Wait for DNS to propagate + run: sleep 300 + + - name: Checkout OWLS for CLI tool + uses: actions/checkout@v2 + with: + repository: Telecominfraproject/wlan-cloud-owls + path: wlan-cloud-owls + + - name: Run simulation + working-directory: wlan-cloud-owls/test_scripts/curl + run: | + export OWGW="${{ steps.gw_url.ouputs.name }}:15002" + export OWSEC="${{ steps.sec_url.ouputs.name }}:16001" + export OWLS="${{ steps.ls_url.output.name }}:16007" + curl -s -X POST -H 'Content-Type: application/json' https://$OWSEC/api/v1/oauth2 -d '{"userId": "tip@ucentral.com", "password": "'${{ secrets.OWSEC_NEW_PASSWORD }}'"}' | jq '.access_token' -r > access_token + echo + echo "[Info] Creating simulation" + cat > input_sim_info.json < output_sim_status.json + export SIM_STATE=$(cat output_sim_status.json | jq '.state' -r) + done + echo + echo "[Info] Final results:" + cat output_sim_status.json | jq . + + - name: Show resource state on deployment failure + if: failure() + run: | + kubectl get pods --namespace openwifi-${{ steps.namespace.outputs.name }} + kubectl get services --namespace openwifi-${{ steps.namespace.outputs.name }} + kubectl get persistentvolumeclaims --namespace openwifi-${{ steps.namespace.outputs.name }} + - name: Describe pods on deployment failure + if: failure() + run: | + kubectl describe pods --namespace openwifi-${{ steps.namespace.outputs.name }} + - name: Describe services on deployment failure + if: failure() + run: | + kubectl describe services --namespace openwifi-${{ steps.namespace.outputs.name }} + - name: Describe persistentvolumeclaims on deployment failure + if: failure() + run: | + kubectl describe persistentvolumeclaims --namespace openwifi-${{ steps.namespace.outputs.name }} + + - name: Cleanup + if: always() + run: | + helm delete -n openwifi-${{ steps.namespace.outputs.name }} tip-openwifi || true + kubectl delete ns "${{ steps.namespace.outputs.name }}" --wait=true diff --git a/helm/ucentral/deploy.sh b/helm/ucentral/deploy.sh index c8b1e5f5f..50feb500a 100755 --- a/helm/ucentral/deploy.sh +++ b/helm/ucentral/deploy.sh @@ -30,8 +30,18 @@ usage () { echo "- OWFMS_VERSION - OpenWIFI Firmware version to deploy (will be used for Docker image tag and git branch for Helm chart if git deployment is required)"; echo "- OWPROV_VERSION - OpenWIFI Provisioning version to deploy (will be used for Docker image tag and git branch for Helm chart if git deployment is required)"; echo "- OWPROVUI_VERSION - OpenWIFI Provisioning Web UI version to deploy (will be used for Docker image tag and git branch for Helm chart if git deployment is required)"; + echo; + echo "Optional environment variables:" + echo; + echo "- EXTRA_VALUES - extra values that should be passed to Helm deployment separated by comma (,)" + echo "- DEVICE_CERT_LOCATION - path to certificate in PEM format that will be used for load simulator"; + echo "- DEVICE_KEY_LOCATION - path to private key in PEM format that will be used for load simulator"; } +# Global variables +VALUES_FILE_LOCATION_SPLITTED=() +EXTRA_VALUES_SPLITTED=() + # Helper functions check_if_chart_version_is_release() { PARSED_CHART_VERSION=$(echo $CHART_VERSION | grep -xP "v\d+\.\d+\.\d+.*") @@ -70,6 +80,9 @@ fi [ -z ${CERT_LOCATION+x} ] && echo "CERT_LOCATION is unset" && usage && exit 1 [ -z ${KEY_LOCATION+x} ] && echo "KEY_LOCATION is unset" && usage && exit 1 +[ -z ${DEVICE_CERT_LOCATION+x} ] && echo "DEVICE_CERT_LOCATION is unset, setting it to CERT_LOCATION" && export DEVICE_CERT_LOCATION=$CERT_LOCATION +[ -z ${DEVICE_KEY_LOCATION+x} ] && echo "DEVICE_KEY_LOCATION is unset, setting it to KEY_LOCATION" && export DEVICE_KEY_LOCATION=$KEY_LOCATION + # Transform some environment variables export OWGW_VERSION_TAG=$(echo ${OWGW_VERSION} | tr '/' '-') export OWGWUI_VERSION_TAG=$(echo ${OWGWUI_VERSION} | tr '/' '-') @@ -78,6 +91,9 @@ export OWFMS_VERSION_TAG=$(echo ${OWFMS_VERSION} | tr '/' '-') export OWPROV_VERSION_TAG=$(echo ${OWPROV_VERSION} | tr '/' '-') export OWPROVUI_VERSION_TAG=$(echo ${OWPROVUI_VERSION} | tr '/' '-') +# Debug get bash version +bash --version > /dev/stderr + # Check deployment method that's required for this environment helm plugin install https://github.com/databus23/helm-diff || true if [[ "$DEPLOY_METHOD" == "git" ]]; then @@ -110,36 +126,45 @@ else fi fi +VALUES_FILES_FLAGS=() +IFS=',' read -ra VALUES_FILE_LOCATION_SPLITTED <<< "$VALUES_FILE_LOCATION" +for VALUE_FILE in ${VALUES_FILE_LOCATION_SPLITTED[*]}; do + VALUES_FILES_FLAGS+=("-f" $VALUE_FILE) +done +EXTRA_VALUES_FLAGS=() +IFS=',' read -ra EXTRA_VALUES_SPLITTED <<< "$EXTRA_VALUES" +for EXTRA_VALUE in ${EXTRA_VALUES_SPLITTED[*]}; do + EXTRA_VALUES_FLAGS+=("--set" $EXTRA_VALUE) +done + # Run the deployment helm upgrade --install --create-namespace --wait --timeout 60m \ --namespace openwifi-${NAMESPACE} \ - -f $VALUES_FILE_LOCATION \ - --set owgw.configProperties."rtty\.token"=${RTTY_TOKEN} \ - --set owsec.configProperties."authentication\.default\.username"=${OWGW_AUTH_USERNAME} \ - --set owsec.configProperties."authentication\.default\.password"=${OWGW_AUTH_PASSWORD} \ - --set rttys.config.token=${RTTY_TOKEN} \ - --set owfms.configProperties."s3\.secret"=${OWFMS_S3_SECRET} \ - --set owfms.configProperties."s3\.key"=${OWFMS_S3_KEY} \ + ${VALUES_FILES_FLAGS[*]} \ --set owgw.services.owgw.annotations."external-dns\.alpha\.kubernetes\.io/hostname"=gw-${NAMESPACE}.cicd.lab.wlan.tip.build \ --set owgw.configProperties."openwifi\.fileuploader\.host\.0\.name"=gw-${NAMESPACE}.cicd.lab.wlan.tip.build \ --set owgw.configProperties."rtty\.server"=rtty-${NAMESPACE}.cicd.lab.wlan.tip.build \ --set owgw.configProperties."openwifi\.system\.uri\.public"=https://gw-${NAMESPACE}.cicd.lab.wlan.tip.build:16002 \ --set owgw.configProperties."openwifi\.system\.uri\.private"=https://gw-${NAMESPACE}.cicd.lab.wlan.tip.build:17002 \ --set owgw.configProperties."openwifi\.system\.uri\.ui"=https://webui-${NAMESPACE}.cicd.lab.wlan.tip.build \ + --set owgw.configProperties."rtty\.token"=${RTTY_TOKEN} \ --set owgw.public_env_variables.OWSEC=sec-${NAMESPACE}.cicd.lab.wlan.tip.build:16001 \ + --set owsec.configProperties."authentication\.default\.username"=${OWGW_AUTH_USERNAME} \ + --set owsec.configProperties."authentication\.default\.password"=${OWGW_AUTH_PASSWORD} \ --set owsec.services.owsec.annotations."external-dns\.alpha\.kubernetes\.io/hostname"=sec-${NAMESPACE}.cicd.lab.wlan.tip.build \ --set owsec.configProperties."openwifi\.system\.uri\.public"=https://sec-${NAMESPACE}.cicd.lab.wlan.tip.build:16001 \ --set owsec.configProperties."openwifi\.system\.uri\.private"=https://sec-${NAMESPACE}.cicd.lab.wlan.tip.build:17001 \ --set owsec.configProperties."openwifi\.system\.uri\.ui"=https://webui-${NAMESPACE}.cicd.lab.wlan.tip.build \ - --set rttys.services.rttys.annotations."external-dns\.alpha\.kubernetes\.io/hostname"=rtty-${NAMESPACE}.cicd.lab.wlan.tip.build \ - --set owgwui.ingresses.default.annotations."external-dns\.alpha\.kubernetes\.io/hostname"=webui-${NAMESPACE}.cicd.lab.wlan.tip.build \ - --set owgwui.ingresses.default.hosts={webui-${NAMESPACE}.cicd.lab.wlan.tip.build} \ - --set owgwui.public_env_variables.DEFAULT_UCENTRALSEC_URL=https://sec-${NAMESPACE}.cicd.lab.wlan.tip.build:16001 \ + --set owfms.configProperties."s3\.secret"=${OWFMS_S3_SECRET} \ + --set owfms.configProperties."s3\.key"=${OWFMS_S3_KEY} \ --set owfms.services.owfms.annotations."external-dns\.alpha\.kubernetes\.io/hostname"=fms-${NAMESPACE}.cicd.lab.wlan.tip.build \ --set owfms.configProperties."openwifi\.system\.uri\.public"=https://fms-${NAMESPACE}.cicd.lab.wlan.tip.build:16004 \ --set owfms.configProperties."openwifi\.system\.uri\.private"=https://fms-${NAMESPACE}.cicd.lab.wlan.tip.build:17004 \ --set owfms.configProperties."openwifi\.system\.uri\.ui"=https://webui-${NAMESPACE}.cicd.lab.wlan.tip.build \ --set owfms.public_env_variables.OWSEC=sec-${NAMESPACE}.cicd.lab.wlan.tip.build:16001 \ + --set owgwui.ingresses.default.annotations."external-dns\.alpha\.kubernetes\.io/hostname"=webui-${NAMESPACE}.cicd.lab.wlan.tip.build \ + --set owgwui.ingresses.default.hosts={webui-${NAMESPACE}.cicd.lab.wlan.tip.build} \ + --set owgwui.public_env_variables.DEFAULT_UCENTRALSEC_URL=https://sec-${NAMESPACE}.cicd.lab.wlan.tip.build:16001 \ --set owprov.services.owprov.annotations."external-dns\.alpha\.kubernetes\.io/hostname"=prov-${NAMESPACE}.cicd.lab.wlan.tip.build \ --set owprov.configProperties."openwifi\.system\.uri\.public"=https://prov-${NAMESPACE}.cicd.lab.wlan.tip.build:16005 \ --set owprov.configProperties."openwifi\.system\.uri\.private"=https://prov-${NAMESPACE}.cicd.lab.wlan.tip.build:17005 \ @@ -148,8 +173,19 @@ helm upgrade --install --create-namespace --wait --timeout 60m \ --set owprovui.ingresses.default.annotations."external-dns\.alpha\.kubernetes\.io/hostname"=provui-${NAMESPACE}.cicd.lab.wlan.tip.build \ --set owprovui.ingresses.default.hosts={provui-${NAMESPACE}.cicd.lab.wlan.tip.build} \ --set owprovui.public_env_variables.DEFAULT_UCENTRALSEC_URL=https://sec-${NAMESPACE}.cicd.lab.wlan.tip.build:16001 \ + --set rttys.config.token=${RTTY_TOKEN} \ + --set rttys.services.rttys.annotations."external-dns\.alpha\.kubernetes\.io/hostname"=rtty-${NAMESPACE}.cicd.lab.wlan.tip.build \ --set clustersysteminfo.public_env_variables.OWSEC=sec-${NAMESPACE}.cicd.lab.wlan.tip.build:16001 \ --set clustersysteminfo.secret_env_variables.OWSEC_NEW_PASSWORD=${OWSEC_NEW_PASSWORD} \ + --set owls.services.owls.annotations."external-dns\.alpha\.kubernetes\.io/hostname"=ls-${NAMESPACE}.cicd.lab.wlan.tip.build \ + --set owls.configProperties."openwifi\.system\.uri\.public"=https://ls-${NAMESPACE}.cicd.lab.wlan.tip.build:16007 \ + --set owls.configProperties."openwifi\.system\.uri\.private"=https://ls-${NAMESPACE}.cicd.lab.wlan.tip.build:17007 \ + --set owls.configProperties."openwifi\.system\.uri\.ui"=https://webui-${NAMESPACE}.cicd.lab.wlan.tip.build \ + --set owlsui.ingresses.default.annotations."external-dns\.alpha\.kubernetes\.io/hostname"=lsui-${NAMESPACE}.cicd.lab.wlan.tip.build \ + --set owlsui.ingresses.default.hosts={lsui-${NAMESPACE}.cicd.lab.wlan.tip.build} \ + --set owlsui.public_env_variables.DEFAULT_UCENTRALSEC_URL=https://sec-${NAMESPACE}.cicd.lab.wlan.tip.build:16001 \ + --set haproxy.service.annotations."external-dns\.alpha\.kubernetes\.io/hostname"="gw-${NAMESPACE}.cicd.lab.wlan.tip.build\,sec-${NAMESPACE}.cicd.lab.wlan.tip.build\,fms-${NAMESPACE}.cicd.lab.wlan.tip.build\,prov-${NAMESPACE}.cicd.lab.wlan.tip.build\,rtty-${NAMESPACE}.cicd.lab.wlan.tip.build" \ + ${EXTRA_VALUES_FLAGS[*]} \ --set-file owgw.certs."restapi-cert\.pem"=$CERT_LOCATION \ --set-file owgw.certs."restapi-key\.pem"=$KEY_LOCATION \ --set-file owgw.certs."websocket-cert\.pem"=$CERT_LOCATION \ @@ -162,4 +198,8 @@ helm upgrade --install --create-namespace --wait --timeout 60m \ --set-file owfms.certs."restapi-key\.pem"=$KEY_LOCATION \ --set-file owprov.certs."restapi-cert\.pem"=$CERT_LOCATION \ --set-file owprov.certs."restapi-key\.pem"=$KEY_LOCATION \ + --set-file owls.certs."restapi-cert\.pem"=$CERT_LOCATION \ + --set-file owls.certs."restapi-key\.pem"=$KEY_LOCATION \ + --set-file owls.certs."device-cert\.pem"=$DEVICE_CERT_LOCATION \ + --set-file owls.certs."device-key\.pem"=$DEVICE_KEY_LOCATION \ tip-openwifi $DEPLOY_SOURCE diff --git a/helm/ucentral/values.ucentral-qa.external-db.yaml b/helm/ucentral/values.ucentral-qa.external-db.yaml index a9ad34d54..a7e5d1036 100644 --- a/helm/ucentral/values.ucentral-qa.external-db.yaml +++ b/helm/ucentral/values.ucentral-qa.external-db.yaml @@ -1,73 +1,10 @@ owgw: - services: - owgw: - type: LoadBalancer - annotations: - service.beta.kubernetes.io/aws-load-balancer-type: "nlb-ip" - service.beta.kubernetes.io/aws-load-balancer-scheme: internet-facing - service.beta.kubernetes.io/aws-load-balancer-healthcheck-port: "16102" - service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl - service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285" - service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16002,16003,17002" - external-dns.alpha.kubernetes.io/ttl: "60" configProperties: - # -> Public part - # File uploader - # rtty - rtty.enabled: "true" storage.type: postgresql storage.type.postgresql.host: owgw-pgsql storage.type.postgresql.database: owgw - resources: - requests: - cpu: 100m - memory: 100Mi - limits: - cpu: 100m - memory: 200Mi - - securityContext: - sysctls: - - name: net.ipv4.tcp_keepalive_intvl - value: "5" - - name: net.ipv4.tcp_keepalive_probes - value: "2" - - name: net.ipv4.tcp_keepalive_time - value: "45" - - podAnnotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - - podSecurityPolicy: - enabled: true - - certs: - restapi-ca.pem: | - -----BEGIN CERTIFICATE----- - MIIDojCCAoqgAwIBAgIUPVYBpqNbcLYygF6Mx+qxSWwQyFowDQYJKoZIhvcNAQEL - BQAwaTELMAkGA1UEBhMCVVMxJDAiBgNVBAoTG1RlbGVjb20gSW5mcmEgUHJvamVj - dCwgSW5jLjEMMAoGA1UECxMDVElQMSYwJAYDVQQDEx1UZWxlY29tIEluZnJhIFBy - b2plY3QgUm9vdCBDQTAeFw0yMTA0MTMyMjQyNDRaFw0zMTA0MTMyMjM4NDZaMGkx - CzAJBgNVBAYTAlVTMSQwIgYDVQQKExtUZWxlY29tIEluZnJhIFByb2plY3QsIElu - Yy4xDDAKBgNVBAsTA1RJUDEmMCQGA1UEAxMdVGVsZWNvbSBJbmZyYSBQcm9qZWN0 - IFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIGCibwf5u - AAwZ+1H8U0e3u2V+0d2gSctucoK86XwUmfe1V2a/qlCYZd29r80IuN1IIeB0naIm - KnK/MzXW87clF6tFd1+HzEvmlY/W4KyIXalVCTEzirFSvBEG2oZpM0yC3AefytAO - aOpA00LaM3xTfTqMKIRhJBuLy0I4ANUVG6ixVebbGuc78IodleqiLoWy2Q9QHyEO - t/7hZndJhiVogh0PveRhho45EbsACu7ymDY+JhlIleevqwlE3iQoq0YcmYADHno6 - Eq8vcwLpZFxihupUafkd1T3WJYQAJf9coCjBu2qIhNgrcrGD8R9fGswwNRzMRMpX - 720+GjcDW3bJAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFAJG - lmB5sVP2qfL3xZ8hQOTpkQH6MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsF - AAOCAQEAVjl9dm4epG9NUYnagT9sg7scVQEPfz3Lt6w1NXJXgD8mAUlK0jXmEyvM - dCPD4514n+8+lM7US8fh+nxc7jO//LwK17Wm9FblgjNFR7+anv0Q99T9fP19DLlF - PSNHL2emogy1bl1lLTAoj8nxg2wVKPDSHBGviQ5LR9fsWUIJDv9Bs5k0qWugWYSj - 19S6qnHeskRDB8MqRLhKMG82oDVLerSnhD0P6HjySBHgTTU7/tYS/OZr1jI6MPbG - L+/DtiR5fDVMNdBSGU89UNTi0wHY9+RFuNlIuvZC+x/swF0V9R5mN+ywquTPtDLA - 5IOM7ItsRmen6u3qu+JXros54e4juQ== - -----END CERTIFICATE----- - postgresql: enabled: true fullnameOverride: owgw-pgsql @@ -77,276 +14,3 @@ owgw: # from https://github.com/Telecominfraproject/wlan-cloud-ucentralgw/blob/master/helm/values.yaml postgresqlUsername: stephb postgresqlPassword: snoopy99 - -owsec: - services: - owsec: - type: LoadBalancer - annotations: - service.beta.kubernetes.io/aws-load-balancer-type: "nlb-ip" - service.beta.kubernetes.io/aws-load-balancer-scheme: internet-facing - service.beta.kubernetes.io/aws-load-balancer-healthcheck-port: "16101" - service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl - service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285" - service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16001,17001" - external-dns.alpha.kubernetes.io/ttl: "60" - - resources: - requests: - cpu: 10m - memory: 15Mi - limits: - cpu: 100m - memory: 100Mi - - podAnnotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - - certs: - restapi-ca.pem: | - -----BEGIN CERTIFICATE----- - MIIDojCCAoqgAwIBAgIUPVYBpqNbcLYygF6Mx+qxSWwQyFowDQYJKoZIhvcNAQEL - BQAwaTELMAkGA1UEBhMCVVMxJDAiBgNVBAoTG1RlbGVjb20gSW5mcmEgUHJvamVj - dCwgSW5jLjEMMAoGA1UECxMDVElQMSYwJAYDVQQDEx1UZWxlY29tIEluZnJhIFBy - b2plY3QgUm9vdCBDQTAeFw0yMTA0MTMyMjQyNDRaFw0zMTA0MTMyMjM4NDZaMGkx - CzAJBgNVBAYTAlVTMSQwIgYDVQQKExtUZWxlY29tIEluZnJhIFByb2plY3QsIElu - Yy4xDDAKBgNVBAsTA1RJUDEmMCQGA1UEAxMdVGVsZWNvbSBJbmZyYSBQcm9qZWN0 - IFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIGCibwf5u - AAwZ+1H8U0e3u2V+0d2gSctucoK86XwUmfe1V2a/qlCYZd29r80IuN1IIeB0naIm - KnK/MzXW87clF6tFd1+HzEvmlY/W4KyIXalVCTEzirFSvBEG2oZpM0yC3AefytAO - aOpA00LaM3xTfTqMKIRhJBuLy0I4ANUVG6ixVebbGuc78IodleqiLoWy2Q9QHyEO - t/7hZndJhiVogh0PveRhho45EbsACu7ymDY+JhlIleevqwlE3iQoq0YcmYADHno6 - Eq8vcwLpZFxihupUafkd1T3WJYQAJf9coCjBu2qIhNgrcrGD8R9fGswwNRzMRMpX - 720+GjcDW3bJAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFAJG - lmB5sVP2qfL3xZ8hQOTpkQH6MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsF - AAOCAQEAVjl9dm4epG9NUYnagT9sg7scVQEPfz3Lt6w1NXJXgD8mAUlK0jXmEyvM - dCPD4514n+8+lM7US8fh+nxc7jO//LwK17Wm9FblgjNFR7+anv0Q99T9fP19DLlF - PSNHL2emogy1bl1lLTAoj8nxg2wVKPDSHBGviQ5LR9fsWUIJDv9Bs5k0qWugWYSj - 19S6qnHeskRDB8MqRLhKMG82oDVLerSnhD0P6HjySBHgTTU7/tYS/OZr1jI6MPbG - L+/DtiR5fDVMNdBSGU89UNTi0wHY9+RFuNlIuvZC+x/swF0V9R5mN+ywquTPtDLA - 5IOM7ItsRmen6u3qu+JXros54e4juQ== - -----END CERTIFICATE----- - -rttys: - enabled: true - - services: - rttys: - type: LoadBalancer - annotations: - service.beta.kubernetes.io/aws-load-balancer-type: "nlb-ip" - service.beta.kubernetes.io/aws-load-balancer-scheme: internet-facing - service.beta.kubernetes.io/aws-load-balancer-healthcheck-port: "5914" - service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl - service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285" - service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "5912,5913" - external-dns.alpha.kubernetes.io/ttl: "60" - - resources: - requests: - cpu: 10m - memory: 15Mi - limits: - cpu: 100m - memory: 100Mi - - podAnnotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - -owgwui: - services: - owgwui: - type: NodePort - - ingresses: - default: - enabled: true - annotations: - kubernetes.io/ingress.class: alb - alb.ingress.kubernetes.io/scheme: internet-facing - alb.ingress.kubernetes.io/group.name: wlan-cicd - alb.ingress.kubernetes.io/certificate-arn: arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285 - alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80}, {"HTTPS": 443}]' - alb.ingress.kubernetes.io/actions.ssl-redirect: '{"Type": "redirect", "RedirectConfig": { "Protocol": "HTTPS", "Port": "443", "StatusCode": "HTTP_302"}}' - external-dns.alpha.kubernetes.io/ttl: "60" - paths: - - path: /* - serviceName: owgwui - servicePort: http - - public_env_variables: - ALLOW_UCENTRALSEC_CHANGE: false - - resources: - requests: - cpu: 10m - memory: 30Mi - limits: - cpu: 10m - memory: 30Mi - - podAnnotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - -owfms: - services: - owfms: - type: LoadBalancer - annotations: - service.beta.kubernetes.io/aws-load-balancer-type: "nlb-ip" - service.beta.kubernetes.io/aws-load-balancer-scheme: internet-facing - service.beta.kubernetes.io/aws-load-balancer-healthcheck-port: "16104" - service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl - service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285" - service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16004,17004" - external-dns.alpha.kubernetes.io/ttl: "60" - - resources: - requests: - cpu: 10m - memory: 30Mi - limits: - cpu: 50m - memory: 80Mi - - podAnnotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - - certs: - restapi-ca.pem: | - -----BEGIN CERTIFICATE----- - MIIDojCCAoqgAwIBAgIUPVYBpqNbcLYygF6Mx+qxSWwQyFowDQYJKoZIhvcNAQEL - BQAwaTELMAkGA1UEBhMCVVMxJDAiBgNVBAoTG1RlbGVjb20gSW5mcmEgUHJvamVj - dCwgSW5jLjEMMAoGA1UECxMDVElQMSYwJAYDVQQDEx1UZWxlY29tIEluZnJhIFBy - b2plY3QgUm9vdCBDQTAeFw0yMTA0MTMyMjQyNDRaFw0zMTA0MTMyMjM4NDZaMGkx - CzAJBgNVBAYTAlVTMSQwIgYDVQQKExtUZWxlY29tIEluZnJhIFByb2plY3QsIElu - Yy4xDDAKBgNVBAsTA1RJUDEmMCQGA1UEAxMdVGVsZWNvbSBJbmZyYSBQcm9qZWN0 - IFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIGCibwf5u - AAwZ+1H8U0e3u2V+0d2gSctucoK86XwUmfe1V2a/qlCYZd29r80IuN1IIeB0naIm - KnK/MzXW87clF6tFd1+HzEvmlY/W4KyIXalVCTEzirFSvBEG2oZpM0yC3AefytAO - aOpA00LaM3xTfTqMKIRhJBuLy0I4ANUVG6ixVebbGuc78IodleqiLoWy2Q9QHyEO - t/7hZndJhiVogh0PveRhho45EbsACu7ymDY+JhlIleevqwlE3iQoq0YcmYADHno6 - Eq8vcwLpZFxihupUafkd1T3WJYQAJf9coCjBu2qIhNgrcrGD8R9fGswwNRzMRMpX - 720+GjcDW3bJAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFAJG - lmB5sVP2qfL3xZ8hQOTpkQH6MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsF - AAOCAQEAVjl9dm4epG9NUYnagT9sg7scVQEPfz3Lt6w1NXJXgD8mAUlK0jXmEyvM - dCPD4514n+8+lM7US8fh+nxc7jO//LwK17Wm9FblgjNFR7+anv0Q99T9fP19DLlF - PSNHL2emogy1bl1lLTAoj8nxg2wVKPDSHBGviQ5LR9fsWUIJDv9Bs5k0qWugWYSj - 19S6qnHeskRDB8MqRLhKMG82oDVLerSnhD0P6HjySBHgTTU7/tYS/OZr1jI6MPbG - L+/DtiR5fDVMNdBSGU89UNTi0wHY9+RFuNlIuvZC+x/swF0V9R5mN+ywquTPtDLA - 5IOM7ItsRmen6u3qu+JXros54e4juQ== - -----END CERTIFICATE----- - -owprov: - services: - owprov: - type: LoadBalancer - annotations: - service.beta.kubernetes.io/aws-load-balancer-type: "nlb-ip" - service.beta.kubernetes.io/aws-load-balancer-scheme: internet-facing - service.beta.kubernetes.io/aws-load-balancer-healthcheck-port: "16105" - service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl - service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285" - service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16005,17005" - external-dns.alpha.kubernetes.io/ttl: "60" - - resources: - requests: - cpu: 10m - memory: 20Mi - limits: - cpu: 100m - memory: 100Mi - - podAnnotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - - certs: - restapi-ca.pem: | - -----BEGIN CERTIFICATE----- - MIIDojCCAoqgAwIBAgIUPVYBpqNbcLYygF6Mx+qxSWwQyFowDQYJKoZIhvcNAQEL - BQAwaTELMAkGA1UEBhMCVVMxJDAiBgNVBAoTG1RlbGVjb20gSW5mcmEgUHJvamVj - dCwgSW5jLjEMMAoGA1UECxMDVElQMSYwJAYDVQQDEx1UZWxlY29tIEluZnJhIFBy - b2plY3QgUm9vdCBDQTAeFw0yMTA0MTMyMjQyNDRaFw0zMTA0MTMyMjM4NDZaMGkx - CzAJBgNVBAYTAlVTMSQwIgYDVQQKExtUZWxlY29tIEluZnJhIFByb2plY3QsIElu - Yy4xDDAKBgNVBAsTA1RJUDEmMCQGA1UEAxMdVGVsZWNvbSBJbmZyYSBQcm9qZWN0 - IFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIGCibwf5u - AAwZ+1H8U0e3u2V+0d2gSctucoK86XwUmfe1V2a/qlCYZd29r80IuN1IIeB0naIm - KnK/MzXW87clF6tFd1+HzEvmlY/W4KyIXalVCTEzirFSvBEG2oZpM0yC3AefytAO - aOpA00LaM3xTfTqMKIRhJBuLy0I4ANUVG6ixVebbGuc78IodleqiLoWy2Q9QHyEO - t/7hZndJhiVogh0PveRhho45EbsACu7ymDY+JhlIleevqwlE3iQoq0YcmYADHno6 - Eq8vcwLpZFxihupUafkd1T3WJYQAJf9coCjBu2qIhNgrcrGD8R9fGswwNRzMRMpX - 720+GjcDW3bJAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFAJG - lmB5sVP2qfL3xZ8hQOTpkQH6MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsF - AAOCAQEAVjl9dm4epG9NUYnagT9sg7scVQEPfz3Lt6w1NXJXgD8mAUlK0jXmEyvM - dCPD4514n+8+lM7US8fh+nxc7jO//LwK17Wm9FblgjNFR7+anv0Q99T9fP19DLlF - PSNHL2emogy1bl1lLTAoj8nxg2wVKPDSHBGviQ5LR9fsWUIJDv9Bs5k0qWugWYSj - 19S6qnHeskRDB8MqRLhKMG82oDVLerSnhD0P6HjySBHgTTU7/tYS/OZr1jI6MPbG - L+/DtiR5fDVMNdBSGU89UNTi0wHY9+RFuNlIuvZC+x/swF0V9R5mN+ywquTPtDLA - 5IOM7ItsRmen6u3qu+JXros54e4juQ== - -----END CERTIFICATE----- - -owprovui: - services: - owprovui: - type: NodePort - - ingresses: - default: - enabled: true - annotations: - kubernetes.io/ingress.class: alb - alb.ingress.kubernetes.io/scheme: internet-facing - alb.ingress.kubernetes.io/group.name: wlan-cicd - alb.ingress.kubernetes.io/certificate-arn: arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285 - alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80}, {"HTTPS": 443}]' - alb.ingress.kubernetes.io/actions.ssl-redirect: '{"Type": "redirect", "RedirectConfig": { "Protocol": "HTTPS", "Port": "443", "StatusCode": "HTTP_302"}}' - external-dns.alpha.kubernetes.io/ttl: "60" - paths: - - path: /* - serviceName: owprovui - servicePort: http - - public_env_variables: - ALLOW_UCENTRALSEC_CHANGE: false - -# resources: -# requests: -# cpu: 10m -# memory: 30m -# limits: -# cpu: 100m -# memory: 100m - - podAnnotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - -kafka: - commonAnnotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - heapOpts: -Xmx512m -Xms512m - resources: - requests: - cpu: 100m - memory: 512Mi - limits: - cpu: 200m - memory: 1Gi - readinessProbe: - initialDelaySeconds: 45 - livenessProbe: - initialDelaySeconds: 60 - zookeeper: - commonAnnotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - heapSize: 256 - resources: - requests: - cpu: 100m - memory: 256Mi - limits: - cpu: 200m - memory: 384Mi - -clustersysteminfo: - enabled: true diff --git a/helm/ucentral/values.ucentral-qa.haproxy-enabled.yaml b/helm/ucentral/values.ucentral-qa.haproxy-enabled.yaml new file mode 100644 index 000000000..1e675eb43 --- /dev/null +++ b/helm/ucentral/values.ucentral-qa.haproxy-enabled.yaml @@ -0,0 +1,46 @@ +owgw: + services: + owgw: + type: ClusterIP + annotations: + service.beta.kubernetes.io/aws-load-balancer-type: "none" + +owsec: + services: + owsec: + type: ClusterIP + annotations: + service.beta.kubernetes.io/aws-load-balancer-type: "none" + +rttys: + services: + rttys: + type: ClusterIP + annotations: + service.beta.kubernetes.io/aws-load-balancer-type: "none" + +owfms: + services: + owfms: + type: ClusterIP + annotations: + service.beta.kubernetes.io/aws-load-balancer-type: "none" + +owprov: + services: + owprov: + type: ClusterIP + annotations: + service.beta.kubernetes.io/aws-load-balancer-type: "none" + +haproxy: + enabled: true + service: + type: LoadBalancer + annotations: + service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl + service.beta.kubernetes.io/aws-load-balancer-healthcheck-port: "8080" + service.beta.kubernetes.io/aws-load-balancer-scheme: internet-facing + service.beta.kubernetes.io/aws-load-balancer-ssl-cert: arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285 + service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16004,17004,16002,16003,17002,16005,17005,16001,17001,5912,5913" + service.beta.kubernetes.io/aws-load-balancer-type: nlb-ip diff --git a/helm/ucentral/values.ucentral-qa.owls-enabled.yaml b/helm/ucentral/values.ucentral-qa.owls-enabled.yaml new file mode 100644 index 000000000..c2cc0e685 --- /dev/null +++ b/helm/ucentral/values.ucentral-qa.owls-enabled.yaml @@ -0,0 +1,71 @@ +owgw: + configProperties: + simulatorid: 53494D020202 + +owls: + enabled: true + services: + owls: + type: LoadBalancer + annotations: + service.beta.kubernetes.io/aws-load-balancer-type: "nlb-ip" + service.beta.kubernetes.io/aws-load-balancer-scheme: internet-facing + service.beta.kubernetes.io/aws-load-balancer-healthcheck-port: "16107" + service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl + service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285" + service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16007,17007" + external-dns.alpha.kubernetes.io/ttl: "60" + + podAnnotations: + cluster-autoscaler.kubernetes.io/safe-to-evict: "false" + + certs: + restapi-ca.pem: | + -----BEGIN CERTIFICATE----- + MIIDojCCAoqgAwIBAgIUPVYBpqNbcLYygF6Mx+qxSWwQyFowDQYJKoZIhvcNAQEL + BQAwaTELMAkGA1UEBhMCVVMxJDAiBgNVBAoTG1RlbGVjb20gSW5mcmEgUHJvamVj + dCwgSW5jLjEMMAoGA1UECxMDVElQMSYwJAYDVQQDEx1UZWxlY29tIEluZnJhIFBy + b2plY3QgUm9vdCBDQTAeFw0yMTA0MTMyMjQyNDRaFw0zMTA0MTMyMjM4NDZaMGkx + CzAJBgNVBAYTAlVTMSQwIgYDVQQKExtUZWxlY29tIEluZnJhIFByb2plY3QsIElu + Yy4xDDAKBgNVBAsTA1RJUDEmMCQGA1UEAxMdVGVsZWNvbSBJbmZyYSBQcm9qZWN0 + IFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIGCibwf5u + AAwZ+1H8U0e3u2V+0d2gSctucoK86XwUmfe1V2a/qlCYZd29r80IuN1IIeB0naIm + KnK/MzXW87clF6tFd1+HzEvmlY/W4KyIXalVCTEzirFSvBEG2oZpM0yC3AefytAO + aOpA00LaM3xTfTqMKIRhJBuLy0I4ANUVG6ixVebbGuc78IodleqiLoWy2Q9QHyEO + t/7hZndJhiVogh0PveRhho45EbsACu7ymDY+JhlIleevqwlE3iQoq0YcmYADHno6 + Eq8vcwLpZFxihupUafkd1T3WJYQAJf9coCjBu2qIhNgrcrGD8R9fGswwNRzMRMpX + 720+GjcDW3bJAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFAJG + lmB5sVP2qfL3xZ8hQOTpkQH6MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsF + AAOCAQEAVjl9dm4epG9NUYnagT9sg7scVQEPfz3Lt6w1NXJXgD8mAUlK0jXmEyvM + dCPD4514n+8+lM7US8fh+nxc7jO//LwK17Wm9FblgjNFR7+anv0Q99T9fP19DLlF + PSNHL2emogy1bl1lLTAoj8nxg2wVKPDSHBGviQ5LR9fsWUIJDv9Bs5k0qWugWYSj + 19S6qnHeskRDB8MqRLhKMG82oDVLerSnhD0P6HjySBHgTTU7/tYS/OZr1jI6MPbG + L+/DtiR5fDVMNdBSGU89UNTi0wHY9+RFuNlIuvZC+x/swF0V9R5mN+ywquTPtDLA + 5IOM7ItsRmen6u3qu+JXros54e4juQ== + -----END CERTIFICATE----- + +owlsui: + enabled: true + + services: + owlsui: + type: NodePort + + ingresses: + default: + enabled: true + annotations: + kubernetes.io/ingress.class: alb + alb.ingress.kubernetes.io/scheme: internet-facing + alb.ingress.kubernetes.io/group.name: wlan-cicd + alb.ingress.kubernetes.io/certificate-arn: arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285 + alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80}, {"HTTPS": 443}]' + alb.ingress.kubernetes.io/actions.ssl-redirect: '{"Type": "redirect", "RedirectConfig": { "Protocol": "HTTPS", "Port": "443", "StatusCode": "HTTP_302"}}' + external-dns.alpha.kubernetes.io/ttl: "60" + paths: + - path: /* + serviceName: owlsui + servicePort: http + + podAnnotations: + cluster-autoscaler.kubernetes.io/safe-to-evict: "false" diff --git a/helm/ucentral/values.ucentral-qa.test-nodes.yaml b/helm/ucentral/values.ucentral-qa.test-nodes.yaml new file mode 100644 index 000000000..d64698fd9 --- /dev/null +++ b/helm/ucentral/values.ucentral-qa.test-nodes.yaml @@ -0,0 +1,86 @@ +owgw: + nodeSelector: + env: tests + tolerations: + - key: "tests" + operator: "Exists" + effect: "NoSchedule" + + postgresql: + primary: + nodeSelector: + env: tests + tolerations: + - key: "tests" + operator: "Exists" + effect: "NoSchedule" + readReplicas: + nodeSelector: + env: tests + tolerations: + - key: "tests" + operator: "Exists" + effect: "NoSchedule" + +owsec: + nodeSelector: + env: tests + tolerations: + - key: "tests" + operator: "Exists" + effect: "NoSchedule" + +owgwui: + nodeSelector: + env: tests + tolerations: + - key: "tests" + operator: "Exists" + effect: "NoSchedule" + +owfms: + nodeSelector: + env: tests + tolerations: + - key: "tests" + operator: "Exists" + effect: "NoSchedule" + +owprov: + nodeSelector: + env: tests + tolerations: + - key: "tests" + operator: "Exists" + effect: "NoSchedule" + +owprovui: + nodeSelector: + env: tests + tolerations: + - key: "tests" + operator: "Exists" + effect: "NoSchedule" + +rttys: + nodeSelector: + env: tests + tolerations: + - key: "tests" + operator: "Exists" + effect: "NoSchedule" + +kafka: + nodeSelector: + env: tests + tolerations: + - key: "tests" + operator: "Exists" + effect: "NoSchedule" + zookeeper: + nodeSelector: + env: tests + tolerations: + - key: "tests" + operator: "Exists" + effect: "NoSchedule" diff --git a/helm/ucentral/values.ucentral-qa.yaml b/helm/ucentral/values.ucentral-qa.yaml index a7e06d7a1..81c8f9487 100644 --- a/helm/ucentral/values.ucentral-qa.yaml +++ b/helm/ucentral/values.ucentral-qa.yaml @@ -16,12 +16,8 @@ owgw: service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285" service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16002,16003,17002" - external-dns.alpha.kubernetes.io/ttl: "60" configProperties: - # -> Public part - # File uploader - # rtty rtty.enabled: "true" resources: @@ -90,7 +86,6 @@ owsec: service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285" service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16001,17001" - external-dns.alpha.kubernetes.io/ttl: "60" resources: requests: @@ -141,7 +136,6 @@ rttys: service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285" service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "5912,5913" - external-dns.alpha.kubernetes.io/ttl: "60" resources: requests: @@ -169,7 +163,6 @@ owgwui: alb.ingress.kubernetes.io/certificate-arn: arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285 alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80}, {"HTTPS": 443}]' alb.ingress.kubernetes.io/actions.ssl-redirect: '{"Type": "redirect", "RedirectConfig": { "Protocol": "HTTPS", "Port": "443", "StatusCode": "HTTP_302"}}' - external-dns.alpha.kubernetes.io/ttl: "60" paths: - path: /* serviceName: owgwui @@ -207,7 +200,6 @@ owfms: service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285" service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16004,17004" - external-dns.alpha.kubernetes.io/ttl: "60" resources: requests: @@ -262,7 +254,6 @@ owprov: service.beta.kubernetes.io/aws-load-balancer-backend-protocol: ssl service.beta.kubernetes.io/aws-load-balancer-ssl-cert: "arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285" service.beta.kubernetes.io/aws-load-balancer-ssl-ports: "16005,17005" - external-dns.alpha.kubernetes.io/ttl: "60" resources: requests: @@ -315,7 +306,6 @@ owprovui: alb.ingress.kubernetes.io/certificate-arn: arn:aws:acm:us-east-2:289708231103:certificate/bfa89c7a-5b64-4a8a-bcfe-ffec655b5285 alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80}, {"HTTPS": 443}]' alb.ingress.kubernetes.io/actions.ssl-redirect: '{"Type": "redirect", "RedirectConfig": { "Protocol": "HTTPS", "Port": "443", "StatusCode": "HTTP_302"}}' - external-dns.alpha.kubernetes.io/ttl: "60" paths: - path: /* serviceName: owprovui @@ -364,3 +354,4 @@ kafka: clustersysteminfo: enabled: true + delay: 60 # delaying to wait for AWS Route53 DNS propagation diff --git a/libs/controller/controller_2x/controller.py b/libs/controller/controller_2x/controller.py index b0027c350..d3c669fcc 100644 --- a/libs/controller/controller_2x/controller.py +++ b/libs/controller/controller_2x/controller.py @@ -394,6 +394,25 @@ class UProfileUtility: self.base_profile_config['services']['lldp']['describe'] = "OpenWiFi - expressWiFi" self.base_profile_config['services']['lldp']['location'] = "Hotspot" + def set_captive_portal(self): + + if self.mode == "NAT": + max_client = { + "max-clients": 32 + } + # sourceFile = open('captive_config.py', 'w') + + self.base_profile_config["interfaces"][1]["name"] = "captive" + self.base_profile_config["interfaces"][1]["ipv4"]["subnet"] = "192.168.2.1/24" + self.base_profile_config["interfaces"][1]["captive"] = max_client + del self.base_profile_config["interfaces"][1]["ethernet"] + del self.base_profile_config["interfaces"][1]["services"] + del self.base_profile_config["metrics"]["wifi-frames"] + del self.base_profile_config["metrics"]["dhcp-snooping"] + # print(self.base_profile_config) + # print(self.base_profile_config, file=sourceFile) + # sourceFile.close() + def encryption_lookup(self, encryption="psk"): diff --git a/libs/perfecto_libs/iOS_lib.py b/libs/perfecto_libs/iOS_lib.py index b92a0ac47..8696309f9 100644 --- a/libs/perfecto_libs/iOS_lib.py +++ b/libs/perfecto_libs/iOS_lib.py @@ -2189,7 +2189,7 @@ def wifi_connect_eap(request, WifiName, User, ttls_passwd, setup_perfectoMobile, #Captive Portal def captive_portal_ios(request, WifiName, WifiPass, setup_perfectoMobile, connData): print("\n-------------------------------------") - print("CaptivePortal Connection IOS") + print("CaptivePortal Connection iOS") print("-------------------------------------") reportFlag = True @@ -2210,12 +2210,11 @@ def captive_portal_ios(request, WifiName, WifiPass, setup_perfectoMobile, connDa openApp(connData["bundleId-iOS-Settings"], setup_perfectoMobile) try: - time.sleep(2) - driver.implicitly_wait(2) + time.sleep(1) try: print("Verifying Connected Wifi Connection") report.step_start("Loading Wifi Page") - element = driver.find_element_by_xpath("//XCUIElementTypeCell[@name='Wi-Fi']") + element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((MobileBy.XPATH, "//XCUIElementTypeCell[@name='Wi-Fi']"))) element.click() except NoSuchElementException: print("Exception: Verify Xpath - unable to click on Wifi") @@ -2273,12 +2272,11 @@ def captive_portal_ios(request, WifiName, WifiPass, setup_perfectoMobile, connDa try: print("getting in to Additional details") - additional_details_element = driver.find_element_by_xpath( - "//*[@label='selected']/parent::*/parent::*/XCUIElementTypeButton[@label='More Info']") + additional_details_element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((MobileBy.XPATH, "//*[@label='selected']/parent::*/parent::*/XCUIElementTypeButton[@label='More Info']"))) additional_details_element.click() try: print("Forget Connected Network") - forget_ssid = driver.find_element_by_xpath("//*[@label='Forget This Network']") + forget_ssid = WebDriverWait(driver, 10).until(EC.presence_of_element_located((MobileBy.XPATH, "//*[@label='Forget This Network']"))) forget_ssid.click() print("Forget old ssid") try: @@ -2357,45 +2355,36 @@ def captive_portal_ios(request, WifiName, WifiPass, setup_perfectoMobile, connDa # ---------------------Click on join------------------------------- try: - time.sleep(4) - driver.implicitly_wait(4) + time.sleep(8) try: time.sleep(8) - driver.implicitly_wait(2) print("Acceptiong terms and Services") report.step_start("loading Terms Page") - element = driver.find_element_by_xpath("//*[@label='Accept Terms of Service']") + element = WebDriverWait(driver,40).until(EC.presence_of_element_located((MobileBy.XPATH, "//*[@label='Accept Terms of Service']"))) element.click() except NoSuchElementException: print("Exception: Accept Terms of Service Button Not Found") reportFlag = False assert reportFlag try: - time.sleep(2) - driver.implicitly_wait(2) print("Continue to connect the services") report.step_start("Continiue Terms and Services Page") - element = driver.find_element_by_xpath("//*[@label='Continue']") + element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((MobileBy.XPATH, "//*[@label='Continue']"))) element.click() except NoSuchElementException: print("Exception: Continue to connect Terms of Service Button Not Found") try: - time.sleep(2) - driver.implicitly_wait(2) print("Continue to the services") report.step_start("Continue to use captive portal services") - element = driver.find_element_by_xpath("//*[@label='Continue']") + element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((MobileBy.XPATH, "//*[@label='Continue']"))) element.click() except NoSuchElementException: print("Exception: Continue to use captive portal services not found") try: time.sleep(2) - driver.implicitly_wait(2) print("Final Result Page") report.step_start("Final Confirmation Page for Captive Portal Login") - time.sleep(2) - driver.implicitly_wait(2) - element = driver.find_element_by_xpath("//*[@label='Done']") + element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((MobileBy.XPATH, "//*[@label='Done']"))) element.click() # if element == driver.find_element_by_xpath("//XCUIElementTypeOther[@label='Success']"): # element1 = driver.find_element_by_xpath("//*[@label='Done']") diff --git a/tests/configuration.py b/tests/configuration.py index b87e47e1c..7f0f0207e 100644 --- a/tests/configuration.py +++ b/tests/configuration.py @@ -692,4 +692,4 @@ PASSPOINT_PROFILE_INFO = { } } -open_flow = {} \ No newline at end of file +open_flow = {} diff --git a/tests/e2e/interOp/captive_portal/bridge_mode/__init__.py b/tests/e2e/interOp/captive_portal/nat_mode/__init__.py similarity index 100% rename from tests/e2e/interOp/captive_portal/bridge_mode/__init__.py rename to tests/e2e/interOp/captive_portal/nat_mode/__init__.py diff --git a/tests/e2e/interOp/captive_portal/bridge_mode/android/__init__.py b/tests/e2e/interOp/captive_portal/nat_mode/android/__init__.py similarity index 100% rename from tests/e2e/interOp/captive_portal/bridge_mode/android/__init__.py rename to tests/e2e/interOp/captive_portal/nat_mode/android/__init__.py diff --git a/tests/e2e/interOp/captive_portal/bridge_mode/android/test_captive_portal_modes.py b/tests/e2e/interOp/captive_portal/nat_mode/android/test_captive_portal_modes.py similarity index 87% rename from tests/e2e/interOp/captive_portal/bridge_mode/android/test_captive_portal_modes.py rename to tests/e2e/interOp/captive_portal/nat_mode/android/test_captive_portal_modes.py index 3423ee54d..b2dfc26dd 100644 --- a/tests/e2e/interOp/captive_portal/bridge_mode/android/test_captive_portal_modes.py +++ b/tests/e2e/interOp/captive_portal/nat_mode/android/test_captive_portal_modes.py @@ -21,32 +21,33 @@ from android_lib import closeApp, set_APconnMobileDevice_android, Toggle_Airplan get_ip_address_and, verifyUploadDownloadSpeed_android, wifi_connect, wifi_disconnect_and_forget, captive_portal_and pytestmark = [pytest.mark.sanity, pytest.mark.interop, pytest.mark.android, pytest.mark.interop_and, pytest.mark.captive_portal - ,pytest.mark.interop_uc_sanity, pytest.mark.bridge] + ,pytest.mark.regression_interop, pytest.mark.nat] setup_params_general = { - "mode": "BRIDGE", + "mode": "NAT", "ssid_modes": { - "open": [{"ssid_name": "captive_open_2g", "appliedRadios": ["2G"]}, - {"ssid_name": "captive_open_5g", "appliedRadios": ["5G"]}], - "wpa": [{"ssid_name": "captive_wpa_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, - {"ssid_name": "captive_wpa_5g", "appliedRadios": ["5G"], + "open": [{"ssid_name": "nat_captive_open_2g", "appliedRadios": ["2G"]}, + {"ssid_name": "nat_captive_open_5g", "appliedRadios": ["5G"]}], + "wpa": [{"ssid_name": "nat_captive_wpa_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, + {"ssid_name": "nat_captive_wpa_5g", "appliedRadios": ["5G"], "security_key": "lanforge"}], - "wpa2_personal": [{"ssid_name": "captive_wpa2_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, - {"ssid_name": "captive_wpa2_5g", "appliedRadios": ["5G"], + "wpa2_personal": [{"ssid_name": "nat_captive_wpa2_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, + {"ssid_name": "nat_captive_wpa2_5g", "appliedRadios": ["5G"], "security_key": "lanforge"}], "wpa3_personal": [ - {"ssid_name": "captive_wpa3_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, - {"ssid_name": "captive_wpa3_5g", "appliedRadios": ["5G"], + {"ssid_name": "nat_captive_wpa3_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, + {"ssid_name": "nat_captive_wpa3_5g", "appliedRadios": ["5G"], "security_key": "lanforge"}]}, "rf": {}, - "radius": False + "radius": False, + "captive_portal": True } @allure.suite(suite_name="interop sanity") -@allure.sub_suite(sub_suite_name="Bridge Mode Captive Portal : Suite-A") +@allure.sub_suite(sub_suite_name="Nat Mode Captive Portal : Suite-A") @pytest.mark.InteropsuiteA -@allure.feature("BRIDGE MODE CAPTIVE PORTAL") +@allure.feature("NAT MODE CAPTIVE PORTAL") @pytest.mark.parametrize( 'setup_profiles', [setup_params_general], @@ -54,14 +55,14 @@ setup_params_general = { scope="class" ) @pytest.mark.usefixtures("setup_profiles") -class TestBridgeModeCaptivePortalSuiteOneBridge(object): +class TestNATModeCaptivePortalSuiteOneNAT(object): """ Captive Portal SuiteA - pytest -m "captive portal and bridge and InteropsuiteA" + pytest -m "captive portal and nat and InteropsuiteA" """ @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5178", name="WIFI-5178") @pytest.mark.twog @pytest.mark.open - def test_Captive_Portal_Open_2g_BRIDGE(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, + def test_Captive_Portal_Open_2g_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_android): profile_data = setup_params_general["ssid_modes"]["open"][0] @@ -96,7 +97,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5141", name="WIFI-5141") @pytest.mark.fiveg @pytest.mark.open - def test_Captive_Portal_Open_5g_BRIDGE(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, + def test_Captive_Portal_Open_5g_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_android): profile_data = setup_params_general["ssid_modes"]["open"][1] @@ -132,7 +133,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5180", name="WIFI-5180") @pytest.mark.twog @pytest.mark.wpa - def test_Captive_Portal_WPA_2g_Bridge(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, + def test_Captive_Portal_WPA_2g_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_android): profile_data = setup_params_general["ssid_modes"]["wpa"][0] @@ -167,7 +168,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5144", name="WIFI-5144") @pytest.mark.fiveg @pytest.mark.wpa - def test_Captive_Portal_WPA_5g_Bridge(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, + def test_Captive_Portal_WPA_5g_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_android): profile_data = setup_params_general["ssid_modes"]["wpa"][1] @@ -203,7 +204,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5184", name="WIFI-5184") @pytest.mark.twog @pytest.mark.wpa2_personal - def test_Captive_Portal_WPA2_2g_Personal_Bridge(self, request, get_vif_state, get_ap_logs, + def test_Captive_Portal_WPA2_2g_Personal_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_android): @@ -240,7 +241,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5147", name="WIFI-5147") @pytest.mark.fiveg @pytest.mark.wpa2_personal - def test_Captive_Portal_WPA2_5g_Personal_Bridge(self, request, get_vif_state, get_ap_logs, + def test_Captive_Portal_WPA2_5g_Personal_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_android): diff --git a/tests/e2e/interOp/captive_portal/bridge_mode/ios/__init__.py b/tests/e2e/interOp/captive_portal/nat_mode/ios/__init__.py similarity index 100% rename from tests/e2e/interOp/captive_portal/bridge_mode/ios/__init__.py rename to tests/e2e/interOp/captive_portal/nat_mode/ios/__init__.py diff --git a/tests/e2e/interOp/captive_portal/bridge_mode/ios/test_captive_portal_modes.py b/tests/e2e/interOp/captive_portal/nat_mode/ios/test_captive_portal_modes.py similarity index 87% rename from tests/e2e/interOp/captive_portal/bridge_mode/ios/test_captive_portal_modes.py rename to tests/e2e/interOp/captive_portal/nat_mode/ios/test_captive_portal_modes.py index 0ab0afb1a..2853ad4aa 100644 --- a/tests/e2e/interOp/captive_portal/bridge_mode/ios/test_captive_portal_modes.py +++ b/tests/e2e/interOp/captive_portal/nat_mode/ios/test_captive_portal_modes.py @@ -22,48 +22,50 @@ from iOS_lib import closeApp, openApp, get_WifiIPAddress_iOS, ForgetWifiConnecti verifyUploadDownloadSpeediOS, get_ip_address_ios, captive_portal_ios, wifi_connect, wifi_disconnect_and_forget pytestmark = [pytest.mark.sanity, pytest.mark.interop, pytest.mark.ios, pytest.mark.interop_ios, - pytest.mark.captive_portal, pytest.mark.interop_uc_sanity, pytest.mark.bridge] + pytest.mark.captive_portal, pytest.mark.regression_interop, pytest.mark.nat] setup_params_general = { - "mode": "BRIDGE", + "mode": "NAT", "ssid_modes": { - "open": [{"ssid_name": "captive_open_2g", "appliedRadios": ["2G"]}, - {"ssid_name": "captive_open_5g", "appliedRadios": ["5G"]}], - "wpa": [{"ssid_name": "captive_wpa_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, - {"ssid_name": "captive_wpa_5g", "appliedRadios": ["5G"], + "open": [{"ssid_name": "nat_captive_open_2g", "appliedRadios": ["2G"]}, + {"ssid_name": "nat_captive_open_5g", "appliedRadios": ["5G"]}], + "wpa": [{"ssid_name": "nat_captive_wpa_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, + {"ssid_name": "nat_captive_wpa_5g", "appliedRadios": ["5G"], "security_key": "lanforge"}], - "wpa2": [{"ssid_name": "captive_wpa2_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, - {"ssid_name": "captive2_wpa_5g", "appliedRadios": ["5G"], + "wpa2_personal": [{"ssid_name": "nat_captive_wpa2_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, + {"ssid_name": "nat_captive_wpa2_5g", "appliedRadios": ["5G"], "security_key": "lanforge"}], "wpa3_personal": [ - {"ssid_name": "captive_wpa3_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, - {"ssid_name": "captive_wpa3_5g", "appliedRadios": ["5G"], + {"ssid_name": "nat_captive_wpa3_2g", "appliedRadios": ["2G"], "security_key": "lanforge"}, + {"ssid_name": "nat_captive_wpa3_5g", "appliedRadios": ["5G"], "security_key": "lanforge"}]}, "rf": {}, - "radius": False + "radius": False, + "captive_portal": True + } @allure.suite(suite_name="interop sanity") -@allure.sub_suite(sub_suite_name="Bridge Mode Captive Portal : Suite-A") +@allure.sub_suite(sub_suite_name="NAT Mode Captive Portal : Suite-A") @pytest.mark.InteropsuiteA -@allure.feature("BRIDGE MODE CAPTIVE PORTAL") -# @pytest.mark.parametrize( -# 'setup_profiles', -# [setup_params_general], -# indirect=True, -# scope="class" -# ) -#@pytest.mark.usefixtures("setup_profiles") -class TestBridgeModeCaptivePortalSuiteOneBridge(object): +@allure.feature("NAT MODE CAPTIVE PORTAL") +@pytest.mark.parametrize( + 'setup_profiles', + [setup_params_general], + indirect=True, + scope="class" +) +@pytest.mark.usefixtures("setup_profiles") +class TestNatModeCaptivePortalSuiteOneNAT(object): """ Captive Portal SuiteA - pytest -m "captive portal and bridge and InteropsuiteA" + pytest -m "captive portal and nat and InteropsuiteA" """ @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5179", name="WIFI-5179") @pytest.mark.twog @pytest.mark.open - def test_Captive_Portal_Open_2g_BRIDGE(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, + def test_Captive_Portal_Open_2g_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_iOS): profile_data = setup_params_general["ssid_modes"]["open"][0] @@ -98,7 +100,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5146", name="WIFI-5146") @pytest.mark.fiveg @pytest.mark.open - def test_Captive_Portal_Open_5g_BRIDGE(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, + def test_Captive_Portal_Open_5g_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_iOS): profile_data = setup_params_general["ssid_modes"]["open"][1] @@ -133,7 +135,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5182", name="WIFI-5182") @pytest.mark.twog @pytest.mark.wpa - def test_Captive_Portal_WPA_2g_Bridge(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, + def test_Captive_Portal_WPA_2g_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_iOS): profile_data = setup_params_general["ssid_modes"]["wpa"][0] @@ -168,7 +170,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5149", name="WIFI-5149") @pytest.mark.fiveg @pytest.mark.wpa - def test_Captive_Portal_WPA_5g_Bridge(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, + def test_Captive_Portal_WPA_5g_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_iOS): profile_data = setup_params_general["ssid_modes"]["wpa"][1] @@ -203,7 +205,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5186", name="WIFI-5186") @pytest.mark.twog @pytest.mark.wpa2_personal - def test_Captive_Portal_WPA2_2g_Personal_Bridge(self, request, get_vif_state, get_ap_logs, + def test_Captive_Portal_WPA2_2g_Personal_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_iOS): @@ -240,7 +242,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5153", name="WIFI-5153") @pytest.mark.fiveg @pytest.mark.wpa2_personal - def test_Captive_Portal_WPA2_5g_Personal_Bridge(self, request, get_vif_state, get_ap_logs, + def test_Captive_Portal_WPA2_5g_Personal_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_iOS): @@ -277,7 +279,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5130", name="WIFI-5130") @pytest.mark.twog @pytest.mark.wpa3_personal - def test_Captive_Portal_WPA3_2g_Personal_Bridge(self, request, get_vif_state, get_ap_logs, + def test_Captive_Portal_WPA3_2g_Personal_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_iOS): @@ -314,7 +316,7 @@ class TestBridgeModeCaptivePortalSuiteOneBridge(object): @allure.testcase(url="https://telecominfraproject.atlassian.net/browse/WIFI-5140", name="WIFI-5140") @pytest.mark.fiveg @pytest.mark.wpa3_personal - def test_Captive_Portal_WPA3_5g_Personal_Bridge(self, request, get_vif_state, get_ap_logs, + def test_Captive_Portal_WPA3_5g_Personal_NAT(self, request, get_vif_state, get_ap_logs, get_APToMobileDevice_data, setup_perfectoMobile_iOS): diff --git a/tests/fixtures_2x.py b/tests/fixtures_2x.py index 21dcf8c61..deb5bcd98 100644 --- a/tests/fixtures_2x.py +++ b/tests/fixtures_2x.py @@ -567,6 +567,13 @@ class Fixtures_2x: except Exception as e: pass + try: + if parameter['captive_portal']: + instantiate_profile_obj.set_captive_portal() + # print(json.loads(str(instantiate_profile_obj.base_profile_config).replace(" ", "").replace("'", '"'))) + except: + pass + ap_ssh = get_apnos(get_configuration['access_point'][0], pwd="../libs/apnos/", sdk="2.x") # Get ucentral status