mirror of
				https://github.com/Telecominfraproject/openafc_final.git
				synced 2025-10-31 18:17:46 +00:00 
			
		
		
		
	Migrate final Open AFC Repository
This commit is contained in:
		
							
								
								
									
										229
									
								
								CMakeLists.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										229
									
								
								CMakeLists.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,229 @@ | ||||
| cmake_policy(SET CMP0020 NEW) | ||||
| cmake_policy(SET CMP0048 NEW) | ||||
| cmake_policy(SET CMP0057 NEW) | ||||
| if(NOT ${CMAKE_VERSION} VERSION_LESS 3.10) | ||||
|     cmake_policy(SET CMP0071 OLD) | ||||
| endif() | ||||
|  | ||||
| # Debug includes RatapiDebug, WebDebug | ||||
| # Release includes RatapiRelease, WebRelease | ||||
| # EngineDebug | ||||
| # EngineRelease | ||||
| # RatapiDebug | ||||
| # RatapiRelease | ||||
| # WebDebug | ||||
| # WebRelease | ||||
| set(default_build_type "Debug") | ||||
| if(NOT CMAKE_BUILD_TYPE) | ||||
|   set(CMAKE_BUILD_TYPE "${default_build_type}") | ||||
| endif() | ||||
|  | ||||
| if ("${CMAKE_BUILD_TYPE}" STREQUAL "Debug" OR | ||||
|     "${CMAKE_BUILD_TYPE}" STREQUAL "RelWithDebInfo") | ||||
|     set(OPENAFC_BUILD_TYPE "EngineDebug" "RatapiDebug" "WebDebug") | ||||
| elseif ("${CMAKE_BUILD_TYPE}" STREQUAL "Release") | ||||
|     set(OPENAFC_BUILD_TYPE "EngineRelease" "RatapiRelease" "WebRelease") | ||||
| elseif (${CMAKE_BUILD_TYPE} MATCHES "EngineRatapiDebug") | ||||
|     set(OPENAFC_BUILD_TYPE "EngineDebug" "RatapiDebug") | ||||
| elseif (${CMAKE_BUILD_TYPE} MATCHES "EngineRatapiRelease") | ||||
|     set(OPENAFC_BUILD_TYPE "EngineRelease" "RatapiRelease") | ||||
| elseif (${CMAKE_BUILD_TYPE} MATCHES "RatapiWebDebug") | ||||
|     set(OPENAFC_BUILD_TYPE "RatapiDebug" "WebDebug") | ||||
| elseif (${CMAKE_BUILD_TYPE} MATCHES "Ulsprocessor") | ||||
|     set(OPENAFC_BUILD_TYPE "Ulsprocessor") | ||||
| else() | ||||
|     set(OPENAFC_BUILD_TYPE "${CMAKE_BUILD_TYPE}") | ||||
| endif() | ||||
|  | ||||
| message(STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}") | ||||
| message(STATUS "OPENAFC_BUILD_TYPE: ${OPENAFC_BUILD_TYPE}") | ||||
|  | ||||
| # External version naming | ||||
| file(READ "${CMAKE_SOURCE_DIR}/version.txt" VERSIONFILE) | ||||
| string(STRIP ${VERSIONFILE} VERSIONFILE) | ||||
|  | ||||
| if ("EngineDebug" IN_LIST OPENAFC_BUILD_TYPE OR | ||||
|     "EngineRelease" IN_LIST OPENAFC_BUILD_TYPE OR | ||||
|     "Ulsprocessor" IN_LIST OPENAFC_BUILD_TYPE) | ||||
|     project(fbrat VERSION ${VERSIONFILE}) | ||||
| else() | ||||
|     project(fbrat VERSION ${VERSIONFILE} LANGUAGES) | ||||
| endif() | ||||
|  | ||||
| cmake_minimum_required(VERSION 3.4) | ||||
| set(CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake ${CMAKE_MODULE_PATH}) | ||||
| option(BUILD_AFCENGINE "Build the AFC engine portion of the project" ON) | ||||
|  | ||||
| # Shared library ABI versioning | ||||
| set(SOVERSION "${PROJECT_VERSION}") | ||||
|  | ||||
| if ("EngineDebug" IN_LIST OPENAFC_BUILD_TYPE OR | ||||
|     "EngineRelease" IN_LIST OPENAFC_BUILD_TYPE OR | ||||
|     "Ulsprocessor" IN_LIST OPENAFC_BUILD_TYPE) | ||||
|   # Compiler and linker config | ||||
|   set(CMAKE_CXX_STANDARD 11) | ||||
|   set(CMAKE_CXX_STANDARD_REQUIRED ON) | ||||
|   #set(CMAKE_CXX_EXTENSIONS ON) # No GNU/MSVC extensions | ||||
|   if(UNIX) | ||||
|     set(CMAKE_C_FLAGS "${CMAKE_CXX_FLAGS} -lbsd") | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -lbsd") | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall") | ||||
|     #set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror") | ||||
|   endif(UNIX) | ||||
|   if(WIN32) | ||||
|     # Fix use of min()/max() in MSVC | ||||
|     add_definitions("-D_USE_MATH_DEFINES -DNOMINMAX") | ||||
|     # Attempt use of cmake auto-export | ||||
|     set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON) | ||||
|      | ||||
|     # /bigobj Increases Number of Sections in .Obj file (needed for projects with large number of inline functions) | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /bigobj") | ||||
|     # Ignore warning from CppMicroServices lib | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4180") | ||||
|     # Ignore generic C++ naming warnings and template-interface-export warning | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4503 /wd4251 /wd4275") | ||||
|  | ||||
|     # Search conan-package binaries also | ||||
|     list(APPEND CMAKE_PROGRAM_PATH ${CONAN_BIN_DIRS}) | ||||
|      | ||||
|     add_definitions("-DARMA_USE_CXX11") # Workaround MSVC lack of __cplusplus | ||||
|     add_definitions("-DCPL_DISABLE_DLL") # Workaround issue with "dllexport" in "cpl_port.h" | ||||
|   endif(WIN32) | ||||
|  | ||||
|   # debug build flags | ||||
|   if (DEBUG_AFC) | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DDEBUG_AFC=1") | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=format-extra-args") | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=format") | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=shadow") | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=switch") | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=return-type") | ||||
|     # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg") | ||||
|     message(STATUS "Using DEBUG_AFC build mode") | ||||
|   endif() | ||||
|  | ||||
|   if ("Ulsprocessor" IN_LIST OPENAFC_BUILD_TYPE) | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3 -larmadillo") | ||||
|   endif() | ||||
|  | ||||
|   message("CMAKE_C_FLAGS ${CMAKE_C_FLAGS}") | ||||
|   message("CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS}") | ||||
| endif() | ||||
|  | ||||
|  | ||||
|  | ||||
| # Coverage analysis | ||||
| include(CheckCoverage) | ||||
|  | ||||
| # Flag for special compilation flag for Fedora Build | ||||
| if (OS_FEDORA) | ||||
|   add_definitions("-DOS_FEDORA") | ||||
| endif(OS_FEDORA) | ||||
|  | ||||
| # Standard installation paths | ||||
| #  - PKG_INSTALL_LIBDIR directory of windows ".lib" files | ||||
| #  - PKG_INSTALL_BINDIR directory of windows ".dll" files and unix ".so" files | ||||
| #  - PKG_INSTALL_DATADIR directory for shared application-specific data | ||||
| #  - PKG_INSTALL_SYSCONFDIR root directory for system-default config files | ||||
| #  - PKG_MODULE_LIBDIR directory for storing plugin module shared objects | ||||
| if(UNIX) | ||||
|     # Name for config/data files under standard paths (incl. XDG paths) | ||||
|     set(PKG_APP_NAME "${PROJECT_NAME}") | ||||
|     include(GNUInstallDirs) | ||||
|     if(SHARE_INSTALL_PREFIX) | ||||
|         set(CMAKE_INSTALL_DATADIR ${SHARE_INSTALL_PREFIX}) | ||||
|     endif(SHARE_INSTALL_PREFIX) | ||||
|     if(SYSCONF_INSTALL_DIR) | ||||
|         set(CMAKE_INSTALL_SYSCONFDIR ${SYSCONF_INSTALL_DIR}) | ||||
|     endif(SYSCONF_INSTALL_DIR) | ||||
|     # Directly in system paths | ||||
|     set(PKG_INSTALL_LIBDIR ${CMAKE_INSTALL_LIBDIR}) | ||||
|     set(PKG_INSTALL_BINDIR ${CMAKE_INSTALL_BINDIR}) | ||||
|     set(PKG_INSTALL_SBINDIR ${CMAKE_INSTALL_SBINDIR}) | ||||
|     # Suffix under system paths | ||||
|     set(PKG_INSTALL_INCLUDEDIR ${CMAKE_INSTALL_INCLUDEDIR}/${PKG_APP_NAME}) | ||||
|     set(PKG_INSTALL_DATADIR ${CMAKE_INSTALL_DATADIR}/${PKG_APP_NAME}) | ||||
|     set(PKG_INSTALL_SYSCONFDIR ${CMAKE_INSTALL_SYSCONFDIR}) | ||||
|     set(XDG_INSTALL_SYSCONFDIR "${CMAKE_INSTALL_SYSCONFDIR}/xdg") | ||||
| endif(UNIX) | ||||
| if(WIN32) | ||||
|     # Name for config/data files under standard paths (inc. %PROGRAMFILES%)  | ||||
|     set(PKG_APP_NAME "${PROJECT_NAME}") | ||||
|     # All files under common PREFIX path (%PROGRAMFILES%/<PKG_APP_NAME>) | ||||
|     set(PKG_INSTALL_INCLUDEDIR "include") | ||||
|     # Libaries to link | ||||
|     set(PKG_INSTALL_LIBDIR "lib") | ||||
|     # Runtime binaries | ||||
|     set(PKG_INSTALL_BINDIR "bin") | ||||
|     set(PKG_INSTALL_SBINDIR "bin") | ||||
|     # External debug symbols | ||||
|     set(PKG_INSTALL_DEBUGDIR "debug") | ||||
|     # To be consistent with QStandardPaths::AppDataLocation | ||||
|     set(CMAKE_INSTALL_DATADIR "bin/data") | ||||
|     set(PKG_INSTALL_DATADIR "${CMAKE_INSTALL_DATADIR}/${PKG_APP_NAME}") | ||||
|     # on windows config is within datadir | ||||
|     set(PKG_INSTALL_SYSCONFDIR ${CMAKE_INSTALL_DATADIR}) | ||||
|     set(XDG_INSTALL_SYSCONFDIR "${CMAKE_INSTALL_DATADIR}") | ||||
| endif(WIN32) | ||||
| # Extended paths | ||||
| set(XDG_ICONS_INSTALL_DIR "${CMAKE_INSTALL_DATADIR}/icons") | ||||
| set(XDG_MIME_INSTALL_DIR "${CMAKE_INSTALL_DATADIR}/mime") | ||||
| set(XDG_APPS_INSTALL_DIR "${CMAKE_INSTALL_DATADIR}/applications") | ||||
| set(CMAKE_MODULE_NAME ${PROJECT_NAME}) | ||||
| set(PKG_INSTALL_CMAKE_CONFIG_DIR "${PKG_INSTALL_LIBDIR}/cmake/${CMAKE_MODULE_NAME}") | ||||
|  | ||||
| if ("Ulsprocessor" IN_LIST OPENAFC_BUILD_TYPE) | ||||
|     find_package(Qt5Core 5.3 REQUIRED) | ||||
| endif() | ||||
|  | ||||
| # build only engine code | ||||
| if ("EngineDebug" IN_LIST OPENAFC_BUILD_TYPE OR | ||||
|     "EngineRelease" IN_LIST OPENAFC_BUILD_TYPE) | ||||
|     # External libraries | ||||
|     find_package(Qt5Core 5.3 REQUIRED) | ||||
|     find_package(Qt5Concurrent REQUIRED) | ||||
|     find_package(Qt5Network REQUIRED) | ||||
|     find_package(Qt5Gui REQUIRED) | ||||
|     find_package(Qt5Sql REQUIRED) | ||||
|     find_package(Qt5Test REQUIRED) | ||||
|     find_package(Qt5Xml REQUIRED) | ||||
|     find_package(Qt5Widgets REQUIRED) | ||||
|  | ||||
|     find_package(Armadillo REQUIRED) | ||||
|     find_package(ZLIB REQUIRED) | ||||
|     find_package(minizip REQUIRED) | ||||
|  | ||||
|     find_package(GDAL REQUIRED) | ||||
|     if(NOT GDAL_FOUND) | ||||
|         message(FATAL_ERROR "Missing GDAL library") | ||||
|     endif() | ||||
|     if(GDAL_FOUND AND NOT TARGET GDAL::GDAL) | ||||
|       add_library(GDAL::GDAL UNKNOWN IMPORTED) | ||||
|       set_target_properties(GDAL::GDAL PROPERTIES | ||||
|         IMPORTED_LOCATION "${GDAL_LIBRARY}" | ||||
|         INTERFACE_INCLUDE_DIRECTORIES "${GDAL_INCLUDE_DIR}" | ||||
|       ) | ||||
|     endif() | ||||
|  | ||||
|     set(Boost_USE_MULTITHREADED ON) | ||||
|     set(Boost_USE_STATIC_RUNTIME OFF) | ||||
|     find_package(Boost 1.54 REQUIRED COMPONENTS log program_options regex system thread) | ||||
|     add_definitions("-DBOOST_ALL_DYN_LINK") | ||||
|  | ||||
| endif() | ||||
|  | ||||
|  | ||||
| # External library search options | ||||
| if(WIN32) | ||||
|     SET(CMAKE_FIND_LIBRARY_PREFIXES "") | ||||
|     SET(CMAKE_FIND_LIBRARY_SUFFIXES ".dll") | ||||
|     set(PATH_OPTS NO_DEFAULT_PATH NO_CMAKE_ENVIRONMENT_PATH NO_SYSTEM_ENVIRONMENT_PATH NO_CMAKE_SYSTEM_PATH) | ||||
| endif(WIN32) | ||||
|  | ||||
| # Build/install in source path | ||||
| add_subdirectory(src) | ||||
| add_subdirectory(pkg) | ||||
|  | ||||
| if(APIDOC_INSTALL_PATH) | ||||
|   configure_file(Doxyfile.in Doxyfile @ONLY) | ||||
| endif() | ||||
							
								
								
									
										2
									
								
								CPPLINT.cfg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								CPPLINT.cfg
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| filter=-whitespace/braces | ||||
| filter=-whitespace/parens | ||||
							
								
								
									
										106
									
								
								Customization.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										106
									
								
								Customization.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,106 @@ | ||||
| Copyright © 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| the software below. | ||||
| This work is licensed under the OpenAFC Project License, a copy of which is included with this software program. | ||||
|  | ||||
| # About Page Customization | ||||
| When a user first enter the webpage without loggin in, the about screen can be accessed on the navigation menu on the left side.  The about screen instructs the user of next steps to gain an account.  About page customization can be done via environment variables or via json file. | ||||
| ## Environment Variables | ||||
| ### Captcha | ||||
| Captcha configuration is needed if Captcha is to be enabled in the About page to protect the | ||||
| access request form.  | ||||
| ### Captcha Config | ||||
|  | ||||
| ``` | ||||
| USE_CAPTCHA=True | ||||
| CAPTCHA_SECRET='your-captcha-secrets' | ||||
| CAPTCHA_SITEKEY='your-captcha-sitekey' | ||||
| CAPTCHA_VERIFY='url-to-verify-captcha' | ||||
| ``` | ||||
|  | ||||
| ### Optional Mail Server Configuration | ||||
| If not, specified, a local server implementation is used, which does not use any encryption. | ||||
| ``` | ||||
| MAIL_SERVER= 'smtp.gmail.com' | ||||
| MAIL_PORT= 465 | ||||
| MAIL_USE_TLS = False | ||||
| MAIL_USE_SSL = True | ||||
| MAIL_USERNAME= 'afc-management-email-address' | ||||
| MAIL_PASSWORD = "password" | ||||
| ``` | ||||
|  | ||||
| ### Mail configuration | ||||
| Mail configuration specifies where email are sent, and what email account is used to send them.  This is used by the AFC server to send notifications to the admin when a new user signs up.  | ||||
| This is required for a functional About page to handle access requests submitted via the web form. | ||||
|  | ||||
| ``` | ||||
| REGISTRATION_DEST_EMAIL = 'where-the-registration-email-is-sent'  | ||||
| REGISTRATION_DEST_PDL_EMAIL = 'group-where-the-registration-email-is-sent' | ||||
| REGISTRATION_SRC_EMAIL = MAIL_USERNAME | ||||
| REGISTRATION_APPROVE_LINK='approval link to include in email' | ||||
| ``` | ||||
|  | ||||
| ##Json file config | ||||
| An preferred method other than using environment variables is via json config files.  The json file | ||||
| is to be put in a volume mounted on the container, and the path must be provided to the server  | ||||
| via environment variable (RATAPI_ARGR) e.g. using docker-compose environment variable or using secrets.   | ||||
|  | ||||
| e.g.  | ||||
| Docker compose file: | ||||
| ``` | ||||
|     rat_server: | ||||
|         environment: | ||||
|             - RATAPI_ARG=/path/ratapi_config.json | ||||
| ``` | ||||
|  | ||||
| The content of the json file is as below: | ||||
| ``` | ||||
| { | ||||
|     "USE_CAPTCHA":"True", | ||||
|     "CAPTCHA_SECRET":"somevalue" | ||||
|     "CAPTCHA_SITEKEY":"somevalue" | ||||
|     "CAPTCHA_VERIFY":"https://www.google.com/recaptcha/api/siteverify", | ||||
|  | ||||
|     "MAIL_SERVER":"smtp.gmail.com", | ||||
|     "MAIL_PORT":"465", | ||||
|     "MAIL_USE_TLS":"False", | ||||
|     "MAIL_USE_SSL":"True", | ||||
|     "MAIL_USERNAME":"afc-management-email-address" | ||||
|     "MAIL_PASSWORD":"password", | ||||
|  | ||||
|     "REGISTRATION_DEST_EMAIL":"where-the-registration-email-is-sent" | ||||
|     "REGISTRATION_DEST_PDL_EMAIL":"group-where-the-registration-email-is-sent" | ||||
|     "REGISTRATION_SRC_EMAIL":"afc-management-email-address" | ||||
|     "REGISTRATION_APPROVE_LINK":"approval link to include in email" | ||||
| } | ||||
| ``` | ||||
| Note that the path must be accessible to the httpd which runs under fbrat | ||||
| ``` | ||||
| chown -R 1003:1003 /localpath | ||||
| ``` | ||||
|  | ||||
|  | ||||
| # OIDC Configuration | ||||
| See OIDC_Login.md | ||||
|  | ||||
| # Miscellaneous Configurations | ||||
| ## private file | ||||
| Under root of the source code (at same level as src), you can create private directory. | ||||
| The structure: | ||||
| private/ | ||||
|     templates/ | ||||
|     images/ | ||||
| Under templates: various templates that can be used to customize various web form, for eg. | ||||
|     about.html: This is the page the user can access first to sign up as a new user.  This can be customized to give more detail sign up instruction | ||||
|     flask_user_layout.html: to customize the login page for the non-OIDC method. | ||||
| Under images: the files here customize various images of the web page. | ||||
|     logo.png: the company logo on the Information (i) page | ||||
|     background.png: the background image on the Information (i) page  | ||||
|  | ||||
| ## Company Name: | ||||
|    The config json file (RATAPI_ARG) accepts entry for "USER_APP_NAME" to customize the company name that appears in | ||||
|    various parts of the webpage. e.g. | ||||
|    { | ||||
|      ... snip ... | ||||
|      "USER_APP_NAME":"My company AFC" | ||||
|    } | ||||
							
								
								
									
										1777
									
								
								Doxyfile.in
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1777
									
								
								Doxyfile.in
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										13
									
								
								LICENSE.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								LICENSE.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | ||||
| Project License Copyright 2019-2024, Meta; 2021-2024, Broadcom and Cisco; 2021-2024, TIP and its Contributors; all rights reserved. | ||||
|  | ||||
| Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: | ||||
|  | ||||
| 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. | ||||
|  | ||||
| 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. | ||||
|  | ||||
| 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. | ||||
|  | ||||
| 4. To the extent any other open source software is included or referenced herein, such open source software is licensed under their applicable license terms and conditions and/or copyright notices found in those respective files. | ||||
|  | ||||
| THIS SOFTWARE IS PROVIDED BY TIP AND ITS CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||
							
								
								
									
										29
									
								
								NewUserRegistration.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								NewUserRegistration.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
| Copyright © 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| the software below. | ||||
| This work is licensed under the OpenAFC Project License, a copy of which is included with this software program. | ||||
|  | ||||
| # New User Creation | ||||
| New users can be created via the CLI or can be registered via the Web GUI | ||||
|  | ||||
| ## CLI to create users | ||||
| ``` | ||||
| rat-manage-api user create --role Admin --role AP --role Analysis myusername "Enter Your Password Here" | ||||
| ``` | ||||
| ## CLI to assign roles. | ||||
| Roles for existing users can be modified using CLI | ||||
| Note that for users added via "user create" command, the email and username are the same | ||||
| ``` | ||||
| rat-manage-api user update --role Admin --role AP --role Analysis --email "user@mycompany.com" | ||||
| ``` | ||||
|  | ||||
| # Web User Registration | ||||
| New user can request an account on the web page.  For non-OIDC method, use the register button and follow the instructions sent via email. | ||||
| For OIDC signin method, use the About link to fill out the request form.  If a request for access is granted, an email reply informs the user on next steps to get on board.  Custom configuration is required on the server to handle new user requests. See Customization.md for details. | ||||
| Regardless of method newly granted users will have default Trial roles upon first time the person logs in.  The Admin or Super user can use the web GUI to change a user roles | ||||
|  | ||||
| # New User access | ||||
| New Users who are granted access automatically have Trial roles and have access to the Virtual AP tab to submit requests.   | ||||
| Test user can chose from **TEST_US** **TEST_CA** or **TEST_BR** in the drop down in the Virtual AP tab.  The corresponding config for these test regsions should first be set in AFC Config tab by the admin. New Users who are granted access automatically have Trial roles and have access to the Virtual AP tab to submit requests. | ||||
|  | ||||
|  | ||||
							
								
								
									
										70
									
								
								OIDC_Login.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										70
									
								
								OIDC_Login.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,70 @@ | ||||
| Copyright © 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| the software below. | ||||
| This work is licensed under the OpenAFC Project License, a copy of which is included with this software program. | ||||
|  | ||||
| # **Introduction** | ||||
| You can configure AFC server to use one of two login mechanisms.  The OIDC login provides Single Sign On (SSO) where the handling of identity verification is done at the separate identity provider server.  The non OIDC login implements local authentication using the AFC server's local database. | ||||
|  | ||||
| # **Non OIDC Login** | ||||
| The legacy login mechanism performs authentication locally on the AFC server. It is configured by default.  Steps to configure are described in [README.md](/README.md) | ||||
|  | ||||
| # **OIDC Login** | ||||
| OIDC relies on an identity provider outside of the AFC server to verify the users identity.  Your organization could already have its own identity server for its employees.  When a user logs in the AFC application, AFC server forwards a request to the identity provider to authenticate.  For your organization's employees, the authentication is completed by your identity provider.  For federated users, the identity provider will further forward the user to his/her respective identity server for verification. | ||||
|  | ||||
| Background on OIDC can be found here: https://openid.net/connect/ | ||||
|  | ||||
| ## OIDC Configuration | ||||
| ### Use Json file | ||||
| The preferred method is to use an oidc config file in json format, e.g. file oidc.json : | ||||
| ``` | ||||
| { | ||||
|     "OIDC_LOGIN":"True", | ||||
|     "OIDC_CLIENT_ID":"1234", | ||||
|     "OIDC_CLIENT_SECRET":"my_secret_string", | ||||
|     "OIDC_DISCOVERY_URL":"https://accounts.mycompany.com" | ||||
| } | ||||
| ``` | ||||
|  | ||||
| And in docker-compose.yaml: | ||||
| ``` | ||||
| rat_server: | ||||
|         volumes: | ||||
|         - /hostpath:/localpath | ||||
|  | ||||
|         environment: | ||||
|         - OIDC_ARG=/localpath/oidc.json | ||||
| ``` | ||||
| In the above, OIDC_CLIENT_ID and OIDC_CLIENT_SECRET are the information which your server needs to present to the identity server to verify a user.  OIDC_DISCOVERY_URL is the url which returns various urls needed for verification.  One example of such discovery url is https://accounts.google.com/.well-known/openid-configuration | ||||
|  | ||||
| The path to the config file can be put in a mounted file, passed to the container via environment variable OIDC_ARG. This can be done via docker-compose file, or secrets | ||||
|  | ||||
| Note that the path must be accessible to the httpd which runs as fbrat username and fbrat group | ||||
| ``` | ||||
| chown -R 1003:1003 /localpath | ||||
| ``` | ||||
| ### Use Environment Variables | ||||
| The alternative method is to use environment variables to pass each parameter, e.g. | ||||
|  | ||||
| For example, docker-compose.yaml: | ||||
| ``` | ||||
| OIDC_LOGIN = True | ||||
| OIDC_CLIENT_ID = '1234' | ||||
| OIDC_CLIENT_SECRET = 'my_secret_string' | ||||
| OIDC_DISCOVERY_URL = 'https://accounts.mycompany.com' | ||||
| ``` | ||||
|  | ||||
|  | ||||
| More information on creating your own google cloud account can be found here: | ||||
| https://cloud.google.com/apigee/docs/hybrid/v1.3/precog-gcpaccount | ||||
|  | ||||
|  | ||||
| ## **Migrate From non OIDC to OIDC login method** | ||||
| With OIDC method, the user acounts are stored in the identity server which maintains accounts of your employees.  Accounts created in non OIDC database are not recognized by OIDC identity server.  Therefore, after converting to OIDC, you will not be able to login via the WEB using test user accounts created via CLI, although those can still be used by test scripts, and the roles of those accounts continue will be maintained. | ||||
|  | ||||
| To facilitate switching real (non test) accounts, when a user logs in for the first time via OIDC, and the email address from the OIDC identity server matches an existing user in the database, that existing account is converted while retaining all roles. Thus, when logging in via WEB GUI, the user has the same access as before the switch. | ||||
|  | ||||
| ## **Switching from OIDC to non OIDC login method** | ||||
| Accounts that are maintained exclusively by OIDC identity provider are not maintained locally. So, after the switch to non OIDC, they cannot be logged in via the WEB GUI unless the admin modify the password. Accounts created via CLI can be logged in using the same password used in the CLI to create them. | ||||
|  | ||||
| In non OIDC, any account's  password can be modified by the admin.  This is true even for accounts that were maintained by OIDC.  However, note that the newly modified password is not recognised by OIDC, and if switched back to OIDC mode again, the password used by the OIDC identity server must be used to login. | ||||
							
								
								
									
										960
									
								
								README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										960
									
								
								README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,960 @@ | ||||
| This work is licensed under the OpenAFC Project License, a copy of which is included in the LICENSE.txt file with this software program. | ||||
|  | ||||
| <br /> | ||||
| <br /> | ||||
|  | ||||
| ## Table of Contents | ||||
| - [**Introduction**](#introduction) | ||||
| - [**Contributing**](#contributing) | ||||
|   - [How to contribute](#how-to-contribute) | ||||
|   - [Pull request best practices](#pull-request-best-practices) | ||||
|     - [Step 1: File an issue](#step-1-file-an-issue) | ||||
|     - [Step 2: Clone OpenAFC GitHub repository](#step-2-clone-openafc-github-repository) | ||||
|     - [Step 3: Create a temporary branch](#step-3-create-a-temporary-branch) | ||||
|     - [Step 4: Commit your changes](#step-4-commit-your-changes) | ||||
|     - [Step 5: Rebase](#step-5-rebase) | ||||
|     - [Step 6: Run the tests](#step-6-run-the-tests) | ||||
|     - [Step 7: Push your branch to GitHub](#step-7-push-your-branch-to-github) | ||||
|     - [Step 8: Send the pull request](#step-8-send-the-pull-request) | ||||
|       - [Change Description](#change-description) | ||||
| - [**How to Build**](#how-to-build) | ||||
| - [AFC Engine build in docker setup](#afc-engine-build-in-docker-setup) | ||||
|   - [Installing docker engine](#installing-docker-engine) | ||||
|   - [Building the Docker image](#building-the-docker-image) | ||||
|     - [Prerequisites:](#prerequisites) | ||||
|     - [Building Docker image from Dockerfile (can be omitted once we have Docker registry)](#building-docker-image-from-dockerfile-can-be-omitted-once-we-have-docker-registry) | ||||
|     - [Pulling the Docker image from Docker registry](#pulling-the-docker-image-from-docker-registry) | ||||
|   - [Building OpenAFC engine](#building-openafc-engine) | ||||
| - [**OpenAFC Engine Server usage in Docker Environment**](#openafc-engine-server-usage-in-docker-environment) | ||||
| - [AFC Engine build in docker](#afc-engine-build-in-docker) | ||||
|   - [Building Docker Container OpenAFC engine server](#building-docker-container-openafc-engine-server) | ||||
|     - [Using scripts from the code base](#using-scripts-from-the-code-base) | ||||
|     - [To 'manually' build containers one by one:](#to-manually-build-containers-one-by-one) | ||||
|     - [celery worker prereq containers](#celery-worker-prereq-containers) | ||||
|   - [Prereqs](#prereqs) | ||||
|   - [docker-compose](#docker-compose) | ||||
|   - [**Environment variables**](#environment-variables) | ||||
|   - [RabbitMQ settings](#rabbitmq-settings) | ||||
|   - [PostgreSQL structure](#postgresql-structure) | ||||
|     - [Upgrade PostgreSQL](#upgrade-postgresql) | ||||
|   - [Initial Super Administrator account](#initial-super-administrator-account) | ||||
|     - [Note for an existing user database](#note-for-an-existing-user-database) | ||||
|   - [Managing user account](#managing-user-account) | ||||
|   - [User roles](#user-roles) | ||||
|   - [MTLS](#mtls) | ||||
|   - [ULS database update automation](#uls-database-update-automation) | ||||
|  | ||||
| # **Introduction** | ||||
|  | ||||
| This document describes the procedure for submitting the source code changes to the openAFC github project. Procedure described in this document requires access to the openAFC project and knowledge of the GIT usage. Please contact TBD@TBD.com in case you need access to the openAFC project. | ||||
|  | ||||
| Github.com can be referred for [details of alternate procedures for creating the pull requests](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests), developers can use any of these methods but need to include change description as part of pull requests  description. | ||||
|  | ||||
| OpenAFC conforms to all the requirements from FCC per [6GHz Report & Order](https://docs.fcc.gov/public/attachments/DOC-363490A1.pdf) and FCC 47 CFR Part 15.407 for unlicensed standard power devices in the 6 GHz band. | ||||
|  | ||||
| In addition, OpenAFC fully conforms to WinnForum’s Functional Requirements for the U.S. 6 GHz Band under the Control of an AFC System in WINNF-TS-1014-V1.4.0 ([https://6ghz.wirelessinnovation.org/baseline-standards](https://6ghz.wirelessinnovation.org/baseline-standards)). This includes some of the implementation details – for example correction of FS parameters in the ULS database, FS antenna pattern, FS noise power and feederloss to use, calculation of near-field adjustment factor, calculation of interference to FS links with passive sites and diversity receivers, path loss models and their parameters, etc. | ||||
| Finally, OpenAFC fully conforms to the implementation details specified in [WFA SUT Test Plan v1.5](https://www.wi-fi.org/file/afc-specification-and-test-plans). | ||||
|  | ||||
| OpenAFC software deployment consists of multiple containers, and it can be deployed on a standalone system for test and development purposes via the provided docker-compose based solution.  Instructions on how to build the containers and a sample docker-compose.yaml can be found in the [OpenAFC Engine Server usage in Docker Environment](#afc-engine-build-in-docker). | ||||
|  | ||||
| OpenAFC software can also be deployed for production using the Kubernetes framework. Please refer to the readme-kubernetes.md for the instructions. | ||||
|  | ||||
| The sample docker-compose.yaml assumes that the required databases (e.g. terrain, landcover, winnforum databases, etc.) have been obtained and placed in an accessible folder according to the information in [database_readme.md](https://github.com/Telecominfraproject/open-afc/blob/main/database_readme.md | ||||
| ) on Github. | ||||
| 	 | ||||
| Many of the components have additional README files inside folders that describe the additional configuration for each component.  Default values are provided either inside the component or in the sample files that will work to stand up the system. | ||||
|  | ||||
| Note that this sample does not provide working SSL certificates for authentication to the server. | ||||
|  | ||||
| <br /><br /> | ||||
|  | ||||
| # **Contributing** | ||||
| All contributions are welcome to this project. | ||||
|  | ||||
| ## How to contribute | ||||
|  | ||||
| * **File an issue** - if you found a bug, want to request an enhancement, or want to implement something (bug fix or feature). | ||||
| * **Send a pull request** - if you want to contribute code. Please be sure to file an issue first. | ||||
|  | ||||
| ## Pull request best practices | ||||
|  | ||||
| We want to accept your pull requests. Please follow these steps: | ||||
|  | ||||
| ### Step 1: File an issue | ||||
|  | ||||
| Before writing any code, please file an Issue ticket using this Github's repository's 'Issues' tab, stating the problem you want to solve or the feature you want to implement along with a high-level description of the resolution. This allows us to give you feedback before you spend any time writing code. There may be a known limitation that can't be addressed, or a bug that has already been fixed in a different way. The issue ticket allows us to communicate and figure out if it's worth your time to write a bunch of code for the project. | ||||
|  | ||||
| ### Step 2: Clone OpenAFC GitHub repository | ||||
|  | ||||
| OpenAFC source repository can be cloned using the below command. | ||||
| ``` | ||||
| git clone git@github.com:Telecominfraproject/open-afc.git | ||||
| ``` | ||||
| This will create your own copy of our repository. | ||||
| [about remote repositories](https://docs.github.com/en/get-started/getting-started-with-git/about-remote-repositories) | ||||
|  | ||||
| ### Step 3: Create a temporary branch | ||||
|  | ||||
| Create a temporary branch for making your changes. | ||||
| Keep a separate branch for each issue/feature you want to address . | ||||
| ``` | ||||
| git checkout -b <Issue ticket number>-branch_name | ||||
| ``` | ||||
|  | ||||
| Highly desirable to use branch name from Issue ticket title, or use meaningful branch name reflecting the actual changes | ||||
| ``` | ||||
| eg. git checkout -b 146-update-readme-md-to-reflect-issueticket-and-branch-creation-procedure | ||||
| ``` | ||||
| ### Step 4: Make your changes | ||||
| Review the [Readme in the tools/editing directory](tools/editing/README.md) to review the code style tools that are required.  Pull requests not meeting the code style requirements will fail to build in the pull request. | ||||
|  | ||||
| ### Step 5: Commit your changes | ||||
| As you develop code, commit your changes into your local feature branch. | ||||
| Please make sure to include the issue number you're addressing in your commit message. | ||||
| This helps us out by allowing us to track which issue/feature your commit relates to. | ||||
| Below command will commit your changes to the local branch. | ||||
| Note to use Issue ticket number at the beginning of commit message. | ||||
| ``` | ||||
| git commit -a -m "<Issue ticket number>  desctiption of the change  ..." | ||||
| ``` | ||||
| ### Step 6: Rebase | ||||
|  | ||||
| Before sending a pull request, rebase against upstream, such as: | ||||
|  | ||||
| ``` | ||||
| git fetch origin | ||||
| git rebase origin | ||||
| ``` | ||||
| This will add your changes on top of what's already in upstream, minimizing merge issues. | ||||
|  | ||||
| ### Step 7: Run the tests | ||||
|  | ||||
| Run sufficient targetted tests on the change made to validate that the change works as expected. Please document and submit the test requests/results in the Issue ticket. | ||||
|  | ||||
| This includes running the regression test framework available under the 'tests > regression' directory to verify your changes have not broken other portions of the system. | ||||
| Make sure that all regression tests are passing before submitting a pull request. | ||||
|  | ||||
| ### Step 8: Push your branch to GitHub | ||||
|  | ||||
| Push code to your remote feature branch. | ||||
| Below command will push your local branch along with the changes to OpenAFC GitHub. | ||||
| ``` | ||||
| git push -u origin <Issue ticket number>-branch_name | ||||
| ``` | ||||
|  > NOTE: The push can include several commits (not only one), but these commits should be related to the same logical change/issue fix/new feature originally described in the [Step 1](#step-1-file-an-issue). | ||||
|  | ||||
| ### Step 9: Send the pull request | ||||
|  | ||||
| Send the pull request from your feature branch to us. | ||||
|  | ||||
| #### Change Description | ||||
|  | ||||
|  | ||||
| When submitting a pull request, please use the following template to submit the change description, risks and validations done after making the changes | ||||
| (not a book, but an info required to understand the change/scenario/risks/test coverage) | ||||
|  | ||||
| - Issue ticket number (from [Step 1](#step-1-file-an-issue)). A brief description of issue(s) being fixed and likelihood/frequency/severity of the issue, or description of new feature if it is a new feature. | ||||
| - Reproduction procedure: Details of how the issue could be reproduced / procedure to reproduce the issue. | ||||
| Description of Change:  A detailed description of what the change is and assumptions / decisions made | ||||
| - Risks: Low, Medium or High and reasoning for the same. | ||||
| - Fix validation procedure: | ||||
|   - Description of validations done after the fix. | ||||
|   - Required regression tests: Describe what additional tests should be done to ensure no regressions in other functionalities. | ||||
|   - Sanity test results as described in the [Step 7](#step-7-run-the-tests) | ||||
|  | ||||
| > NOTE: Keep in mind that we like to see one issue addressed per pull request, as this helps keep our git history clean and we can more easily track down issues. | ||||
|  | ||||
|  | ||||
| <br /><br /> | ||||
|  | ||||
| # **How to Build** | ||||
| # AFC Engine build in docker and compose setup | ||||
|  | ||||
| ## Installing docker engine | ||||
| Docker engine instructions specific to your OS are available on the [Docker official website](https://docs.docker.com/engine/install/) | ||||
|  | ||||
| ## Building the Docker images | ||||
|  | ||||
| ### Prerequisites: | ||||
|  | ||||
| Currently, all the prerequisites to build the containers for the system (except docker installation) are situated in this repository. All you need is to clone OpenAFC locally to your working directory and start all following commands from there. | ||||
|  | ||||
| In order to run the system, you will need to construct a data volume and make it available to the containers. See [database_readme.md](database_readme.md) for details on what data is required. | ||||
|  | ||||
| ### Building Docker image from Dockerfiles | ||||
|  | ||||
| There is a script that builds all container used by the AFC service. | ||||
| This script is used by automatic test infrastructure. Please check [tests/regression](/tests/regression/) dir. | ||||
|  | ||||
| This script uses two environment variables PRIV_REPO and PUB_REPO to determine what repository to push images to. These values default to those used by the regression test infrastructure, but you should define them to refer to your repository. Note that the script  | ||||
|  | ||||
| ### Using scripts from the code base | ||||
|  | ||||
| To rebuild and tag all containers in your local docker repository, use this script: | ||||
| ``` | ||||
| cd open-afc | ||||
| tests/regression/build_imgs.sh `pwd` my_tag 0 | ||||
| ``` | ||||
| after the build, check all new containers: | ||||
| ``` | ||||
| docker images | grep my_tag | ||||
| ``` | ||||
| these containers are used by [tests/regression/run_srvr.sh](/tests/regression/run_srvr.sh) | ||||
|  | ||||
| ### To 'manually' build containers one by one: | ||||
| ``` | ||||
| cd open-afc | ||||
|  | ||||
| docker build . -t rat_server -f rat_server/Dockerfile | ||||
|  | ||||
| docker build . -t uls_service -f uls/Dockerfile-uls_service | ||||
|  | ||||
| docker build . -t msghnd -f msghnd/Dockerfile | ||||
|  | ||||
| docker build . -t ratdb -f ratdb/Dockerfile | ||||
|  | ||||
| docler build . -t objst -f objstorage/Dockerfile | ||||
|  | ||||
| docker build . -t rmq -f rabbitmq/Dockerfile | ||||
|  | ||||
| docker build . -t dispatcher -f dispatcher/Dockerfile | ||||
|  | ||||
| docker build . -t cert_db -f cert_db/Dockerfile | ||||
|  | ||||
| docker build . -t rcache -f rcache/Dockerfile | ||||
|  | ||||
| cd als && docker build . -t als_siphon   -f Dockerfile.siphon; cd ../ | ||||
| cd als && docker build . -t als_kafka    -f Dockerfile.kafka; cd ../ | ||||
| cd bulk_postgres && docker build . -t bulk_postgres -f Dockerfile; cd ../ | ||||
| ``` | ||||
|  | ||||
| ### celery worker prereq containers | ||||
| ``` | ||||
| docker build . -t worker-preinst -f worker/Dockerfile.preinstall | ||||
|  | ||||
| docker build . -t worker-build -f worker/Dockerfile.build | ||||
| ``` | ||||
| to build the worker using local preq containers: | ||||
| ``` | ||||
| docker build . -t worker -f worker/Dockerfile --build-arg PRINST_NAME=worker-preinst --build-arg PRINST_TAG=local --build-arg BLD_NAME=worker-build  --build-arg BLD_TAG=local | ||||
| ``` | ||||
| ### Prometheus Monitoring images | ||||
| If you wish to use [Prometheus](https://prometheus.io/) to montitor your system, you can build these images | ||||
| ``` | ||||
| cd prometheus && docker build . -t  Dockerfile-prometheus -t prometheus-image ; cd ../ | ||||
| cd /prometheus && docker build . Dockerfile-cadvisor -t cadvisor-image ; cd ../ | ||||
| cd /prometheus && docker build . Dockerfile-nginxexporter -t nginxexporter-image  ; cd ../ | ||||
| cd /prometheus && docker build . Dockerfile-grafana -t grafana-image ; cd ../ | ||||
| ``` | ||||
|  | ||||
| Once built, docker images are usable as usual docker image. | ||||
|  | ||||
| ## Building OpenAFC engine | ||||
|  | ||||
| If you wish to build or run the engine component outside of the entire OpenAFC system, you can build the docker images that are needed for only that component. | ||||
|  | ||||
| **NB:** "-v" option in docker maps the folder of the real machine into the insides of the docker container. | ||||
|  | ||||
| "-v /tmp/work/open-afc:/wd/afc" means that contents of "/tmp/work/open-afc" folder will be available inside of container in /wd/afc/ | ||||
|  | ||||
|  | ||||
| goto the project dir | ||||
| ``` | ||||
| cd open-afc | ||||
| ``` | ||||
| If you have not already, build the worker build image | ||||
| ``` | ||||
| docker build . -t worker-build -f worker/Dockerfile.build | ||||
| ``` | ||||
|  | ||||
| run shell of alpine docker-for-build shell | ||||
| ``` | ||||
| docker run --rm -it --user `id -u`:`id -g` --group-add `id -G | sed "s/ / --group-add /g"` -v `pwd`:/wd/afc worker-build:latest ash | ||||
| ``` | ||||
|  | ||||
| inside the container's shell, execute: | ||||
| ``` | ||||
| mkdir -p -m 777 /wd/afc/build && BUILDREV=offlinebuild && cd /wd/afc/build && cmake -DCMAKE_INSTALL_PREFIX=/wd/afc/__install -DCMAKE_PREFIX_PATH=/usr -DBUILD_WITH_COVERAGE=off -DCMAKE_BUILD_TYPE=EngineRelease -DSVN_LAST_REVISION=$BUILDREV -G Ninja /wd/afc && ninja -j$(nproc) install | ||||
| ``` | ||||
| Now the afc-engine is ready:  | ||||
| ``` | ||||
| [@wcc-afc-01 work/dimar/open-afc] > ls -l build/src/afc-engine/afc-engine | ||||
| -rwxr-xr-x. 1 dr942120 dr942120 4073528 Mar  8 04:03 build/src/afc-engine/afc-engine | ||||
| ``` | ||||
| run it from the default worker container: | ||||
| ``` | ||||
| docker run --rm -it --user `id -u`:`id -g` --group-add `id -G | sed "s/ / --group-add /g"` -v `pwd`:/wd/afc -v /opt/afc/worker:latest sh | ||||
| ``` | ||||
| inside the worker container execute the afc-engine app | ||||
| ``` | ||||
| ./afc/build/src/afc-engine/afc-engine | ||||
| ``` | ||||
|  | ||||
| ## Prereqs | ||||
| OpenAFC containers needs several mappings to work properly.  Assuming that you are using /var/databases on your host to store the databases, you can select either option 1 here (which is assumed in the docker compose shown below) or set mappings individually as shown in 2-6.    | ||||
|  | ||||
| 1) All databases in one folder - map to /mnt/nfs/rat_transfer | ||||
|       ``` | ||||
|       /var/databases:/mnt/nfs/rat_transfer | ||||
|       ``` | ||||
|       Those databases are: | ||||
|       - 3dep | ||||
|       - daily_uls_parse | ||||
|       - databases | ||||
|       - globe | ||||
|       - itudata | ||||
|       - nlcd | ||||
|       - population | ||||
|       - proc_gdal | ||||
|       - proc_lidar_2019 | ||||
|       - RAS_Database | ||||
|       - srtm3arcsecondv003 | ||||
|       - ULS_Database | ||||
|       - nfa | ||||
|       - pr | ||||
|  | ||||
|  | ||||
| 2) LiDAR Databases to /mnt/nfs/rat_transfer/proc_lidar_2019 | ||||
|       ``` | ||||
|       /var/databases/proc_lidar_2019:/mnt/nfs/rat_transfer/proc_lidar_2019 | ||||
|       ``` | ||||
| 3) RAS database to /mnt/nfs/rat_transfer/RAS_Database | ||||
|       ``` | ||||
|       /var/databases/RAS_Database:/mnt/nfs/rat_transfer/RAS_Database | ||||
|       ``` | ||||
| 4) Actual ULS Databases to /mnt/nfs/rat_transfer/ULS_Database | ||||
|       ``` | ||||
|       /var/databases/ULS_Database:/mnt/nfs/rat_transfer/ULS_Database | ||||
|       ``` | ||||
| 5) Folder with daily ULS Parse data /mnt/nfs/rat_transfer/daily_uls_parse | ||||
|       ``` | ||||
|       /var/databases/daily_uls_parse:/mnt/nfs/rat_transfer/daily_uls_parse | ||||
|       ``` | ||||
| 6) Folder with AFC Config data /mnt/nfs/afc_config (now can be moved to Object Storage by default) | ||||
|       ``` | ||||
|       /var/afc_config:/mnt/nfs/afc_config | ||||
|       ``` | ||||
| **NB: All or almost all files and folders should be owned by user and group 1003 (currently - fbrat)** | ||||
|  | ||||
| This can be applied via following command (mind the real location of these folders on your host system): | ||||
|  | ||||
| ``` | ||||
| chown -R 1003:1003 /var/databases /var/afc_config | ||||
| ``` | ||||
|  | ||||
| ## docker-compose | ||||
|  | ||||
| You would probably like to use docker-compose for setting up everything together - in this case feel free to use following docker-compose.yaml file as reference. | ||||
| also check [docker-compose.yaml](/tests/regression/docker-compose.yaml) and [.env](/tests/regression/.env) files from tests/regression directory, which are used by OpenAFC CI. | ||||
|  | ||||
| Note that the image tags here are the ones from the [manual build](#to-manually-build-containers-one-by-one) not the ones used by the [script build](#using-scripts-from-the-code-base). | ||||
|  | ||||
| ``` | ||||
| version: '3.2' | ||||
| services: | ||||
|   ratdb: | ||||
|     image: ratdb:${TAG:-latest} | ||||
|     restart: always | ||||
|     dns_search: [.] | ||||
|  | ||||
|   rmq: | ||||
|     image: rmq:${TAG:-latest} | ||||
|     restart: always | ||||
|     dns_search: [.] | ||||
|  | ||||
|   dispatcher: | ||||
|     image: dispatcher:${TAG:-latest} | ||||
|     restart: always | ||||
|     ports: | ||||
|       - "${EXT_PORT}:80" | ||||
|       - "${EXT_PORT_S}:443" | ||||
|     volumes: | ||||
|       - ${VOL_H_NGNX:-/tmp}:${VOL_C_NGNX:-/dummyngnx} | ||||
|     environment: | ||||
|       - AFC_SERVER_NAME=${AFC_SERVER_NAME:-_} | ||||
|       - AFC_ENFORCE_HTTPS=${AFC_ENFORCE_HTTPS:-TRUE} | ||||
|       # set to true if required to enforce mTLS check | ||||
|       - AFC_ENFORCE_MTLS=false | ||||
|       - AFC_MSGHND_NAME=msghnd | ||||
|       - AFC_MSGHND_PORT=8000 | ||||
|       - AFC_WEBUI_NAME=rat_server | ||||
|       - AFC_WEBUI_PORT=80 | ||||
|       # Filestorage params: | ||||
|       - AFC_OBJST_HOST=objst | ||||
|       - AFC_OBJST_PORT=5000 | ||||
|       - AFC_OBJST_SCHEME=HTTP | ||||
|     depends_on: | ||||
|       - msghnd | ||||
|       - rat_server | ||||
|     dns_search: [.] | ||||
|  | ||||
|   rat_server: | ||||
|     image: rat_server:${TAG:-latest} | ||||
|     volumes: | ||||
|       - ${VOL_H_DB}:${VOL_C_DB} | ||||
|       - ./pipe:/pipe | ||||
|     depends_on: | ||||
|       - ratdb | ||||
|       - rmq | ||||
|       - objst | ||||
|       - als_kafka | ||||
|       - als_siphon | ||||
|       - bulk_postgres | ||||
|       - rcache | ||||
|     secrets: | ||||
|       - NOTIFIER_MAIL.json | ||||
|       - OIDC.json | ||||
|       - REGISTRATION.json | ||||
|       - REGISTRATION_CAPTCHA.json | ||||
|     dns_search: [.] | ||||
|     environment: | ||||
|       # RabbitMQ server name: | ||||
|       - BROKER_TYPE=external | ||||
|       - BROKER_FQDN=rmq | ||||
|       # Filestorage params: | ||||
|       - AFC_OBJST_HOST=objst | ||||
|       - AFC_OBJST_PORT=5000 | ||||
|       - AFC_OBJST_SCHEME=HTTP | ||||
|       # ALS params | ||||
|       - ALS_KAFKA_SERVER_ID=rat_server | ||||
|       - ALS_KAFKA_CLIENT_BOOTSTRAP_SERVERS=${ALS_KAFKA_SERVER_}:${ALS_KAFKA_CLIENT_PORT_} | ||||
|       - ALS_KAFKA_MAX_REQUEST_SIZE=${ALS_KAFKA_MAX_REQUEST_SIZE_} | ||||
|       # Rcache parameters | ||||
|       - RCACHE_ENABLED=${RCACHE_ENABLED} | ||||
|       - RCACHE_POSTGRES_DSN=postgresql://postgres:postgres@bulk_postgres/rcache | ||||
|       - RCACHE_SERVICE_URL=http://rcache:${RCACHE_CLIENT_PORT} | ||||
|       - RCACHE_RMQ_DSN=amqp://rcache:rcache@rmq:5672/rcache | ||||
|  | ||||
|   msghnd: | ||||
|     image: msghnd:${TAG:-latest} | ||||
|     environment: | ||||
|       # RabbitMQ server name: | ||||
|       - BROKER_TYPE=external | ||||
|       - BROKER_FQDN=rmq | ||||
|       # Filestorage params: | ||||
|       - AFC_OBJST_HOST=objst | ||||
|       - AFC_OBJST_PORT=5000 | ||||
|       - AFC_OBJST_SCHEME=HTTP | ||||
|       # ALS params | ||||
|       - ALS_KAFKA_SERVER_ID=msghnd | ||||
|       - ALS_KAFKA_CLIENT_BOOTSTRAP_SERVERS=${ALS_KAFKA_SERVER_}:${ALS_KAFKA_CLIENT_PORT_} | ||||
|       - ALS_KAFKA_MAX_REQUEST_SIZE=${ALS_KAFKA_MAX_REQUEST_SIZE_} | ||||
|       # Rcache parameters | ||||
|       - RCACHE_ENABLED=${RCACHE_ENABLED} | ||||
|       - RCACHE_POSTGRES_DSN=postgresql://postgres:postgres@bulk_postgres/rcache | ||||
|       - RCACHE_SERVICE_URL=http://rcache:${RCACHE_CLIENT_PORT} | ||||
|       - RCACHE_RMQ_DSN=amqp://rcache:rcache@rmq:5672/rcache | ||||
|     dns_search: [.] | ||||
|     depends_on: | ||||
|       - ratdb | ||||
|       - rmq | ||||
|       - objst | ||||
|       - als_kafka | ||||
|       - als_siphon | ||||
|       - bulk_postgres | ||||
|       - rcache | ||||
|  | ||||
|   objst: | ||||
|     image: objst:${TAG:-latest} | ||||
|     environment: | ||||
|       - AFC_OBJST_PORT=5000 | ||||
|       - AFC_OBJST_HIST_PORT=4999 | ||||
|       - AFC_OBJST_LOCAL_DIR=/storage | ||||
|     dns_search: [.] | ||||
|  | ||||
|   worker: | ||||
|     image: worker:${TAG:-latest} | ||||
|     volumes: | ||||
|       - ${VOL_H_DB}:${VOL_C_DB} | ||||
|       - ./pipe:/pipe | ||||
|     environment: | ||||
|       # Filestorage params: | ||||
|       - AFC_OBJST_HOST=objst | ||||
|       - AFC_OBJST_PORT=5000 | ||||
|       - AFC_OBJST_SCHEME=HTTP | ||||
|       # worker params | ||||
|       - AFC_WORKER_CELERY_WORKERS=rat_1 rat_2 | ||||
|       # RabbitMQ server name: | ||||
|       - BROKER_TYPE=external | ||||
|       - BROKER_FQDN=rmq | ||||
|       # afc-engine preload lib params | ||||
|       - AFC_AEP_ENABLE=1 | ||||
|       - AFC_AEP_DEBUG=1 | ||||
|       - AFC_AEP_REAL_MOUNTPOINT=${VOL_C_DB}/3dep/1_arcsec | ||||
|       # Rcache parameters | ||||
|       - RCACHE_ENABLED=${RCACHE_ENABLED} | ||||
|       - RCACHE_SERVICE_URL=http://rcache:${RCACHE_CLIENT_PORT} | ||||
|       - RCACHE_RMQ_DSN=amqp://rcache:rcache@rmq:5672/rcache | ||||
|       # ALS params | ||||
|       - ALS_KAFKA_SERVER_ID=worker | ||||
|       - ALS_KAFKA_CLIENT_BOOTSTRAP_SERVERS=${ALS_KAFKA_SERVER_}:${ALS_KAFKA_CLIENT_PORT_} | ||||
|       - ALS_KAFKA_MAX_REQUEST_SIZE=${ALS_KAFKA_MAX_REQUEST_SIZE_} | ||||
|     depends_on: | ||||
|       - ratdb | ||||
|       - rmq | ||||
|       - objst | ||||
|       - rcache | ||||
|       - als_kafka | ||||
|     dns_search: [.] | ||||
|  | ||||
|   als_kafka: | ||||
|     image: als-kafka:${TAG:-latest} | ||||
|     restart: always | ||||
|     environment: | ||||
|       - KAFKA_ADVERTISED_HOST=${ALS_KAFKA_SERVER_} | ||||
|       - KAFKA_CLIENT_PORT=${ALS_KAFKA_CLIENT_PORT_} | ||||
|       - KAFKA_MAX_REQUEST_SIZE=${ALS_KAFKA_MAX_REQUEST_SIZE_} | ||||
|     dns_search: [.] | ||||
|  | ||||
|   als_siphon: | ||||
|     image: als-siphon:${TAG:-latest} | ||||
|     restart: always | ||||
|     environment: | ||||
|       - KAFKA_SERVERS=${ALS_KAFKA_SERVER_}:${ALS_KAFKA_CLIENT_PORT_} | ||||
|       - POSTGRES_HOST=bulk_postgres | ||||
|       - INIT_IF_EXISTS=skip | ||||
|       - KAFKA_MAX_REQUEST_SIZE=${ALS_KAFKA_MAX_REQUEST_SIZE_} | ||||
|     depends_on: | ||||
|       - als_kafka | ||||
|       - bulk_postgres | ||||
|     dns_search: [.] | ||||
|  | ||||
|   bulk_postgres: | ||||
|     image: public.ecr.aws/w9v6y1o0/openafc/bulk-postgres-image:${TAG:-latest} | ||||
|     dns_search: [.] | ||||
|  | ||||
|   uls_downloader: | ||||
|     image: public.ecr.aws/w9v6y1o0/openafc/uls-downloader:${TAG:-latest} | ||||
|     restart: always | ||||
|     environment: | ||||
|       - ULS_SERVICE_STATE_DB_DSN=postgresql://postgres:postgres@bulk_postgres/fs_state | ||||
|       - ULS_AFC_URL=http://msghnd:8000/fbrat/ap-afc/availableSpectrumInquiryInternal?nocache=True | ||||
|       - ULS_DELAY_HR=1 | ||||
|       - ULS_PROMETHEUS_PORT=8000 | ||||
|       # Rcache parameters | ||||
|       - RCACHE_ENABLED=${RCACHE_ENABLED} | ||||
|       - RCACHE_SERVICE_URL=http://rcache:${RCACHE_CLIENT_PORT} | ||||
|     volumes: | ||||
|       - ${VOL_H_DB}/ULS_Database:/rat_transfer/ULS_Database | ||||
|       - ${VOL_H_DB}/RAS_Database:/rat_transfer/RAS_Database | ||||
|     secrets: | ||||
|       - NOTIFIER_MAIL.json | ||||
|     dns_search: [.] | ||||
|  | ||||
|   cert_db: | ||||
|     image: cert_db:${TAG:-latest} | ||||
|     depends_on: | ||||
|       - ratdb | ||||
|     links: | ||||
|       - ratdb | ||||
|       - als_kafka | ||||
|     environment: | ||||
|       - ALS_KAFKA_SERVER_ID=cert_db | ||||
|       - ALS_KAFKA_CLIENT_BOOTSTRAP_SERVERS=${ALS_KAFKA_SERVER_}:${ALS_KAFKA_CLIENT_PORT_} | ||||
|       - ALS_KAFKA_MAX_REQUEST_SIZE=${ALS_KAFKA_MAX_REQUEST_SIZE_} | ||||
|  | ||||
|   rcache: | ||||
|     image: rcache:${TAG:-latest} | ||||
|     restart: always | ||||
|     environment: | ||||
|       - RCACHE_ENABLED=${RCACHE_ENABLED} | ||||
|       - RCACHE_CLIENT_PORT=${RCACHE_CLIENT_PORT} | ||||
|       - RCACHE_POSTGRES_DSN=postgresql://postgres:postgres@bulk_postgres/rcache | ||||
|       - RCACHE_AFC_REQ_URL=http://msghnd:8000/fbrat/ap-afc/availableSpectrumInquiry?nocache=True | ||||
|       - RCACHE_RULESETS_URL=http://rat_server/fbrat/ratapi/v1/GetRulesetIDs | ||||
|       - RCACHE_CONFIG_RETRIEVAL_URL=http://rat_server/fbrat/ratapi/v1/GetAfcConfigByRulesetID | ||||
|     depends_on: | ||||
|       - bulk_postgres | ||||
|     dns_search: [.] | ||||
|  | ||||
|   grafana: | ||||
|     image: grafana-image:${TAG:-latest} | ||||
|     restart: always | ||||
|     depends_on: | ||||
|       - prometheus | ||||
|       - bulk_postgres | ||||
|     dns_search: [.] | ||||
|  | ||||
|   prometheus: | ||||
|     image: prometheus-image:${TAG:-latest} | ||||
|     restart: always | ||||
|     depends_on: | ||||
|       - cadvisor | ||||
|       - nginxexporter | ||||
|     dns_search: [.] | ||||
|  | ||||
|   cadvisor: | ||||
|     image:  cadvisor-image:${TAG:-latest} | ||||
|     restart: always | ||||
|     volumes: | ||||
|     - /:/rootfs:ro | ||||
|     - /var/run:/var/run:rw | ||||
|     - /sys:/sys:ro | ||||
|     - /var/lib/docker/:/var/lib/docker:ro | ||||
|     - /dev/disk/:/dev/disk:ro | ||||
|     dns_search: [.] | ||||
|  | ||||
|   nginxexporter: | ||||
|     image: nginxexporter-image:${TAG:-latest} | ||||
|     restart: always | ||||
|     depends_on: | ||||
|       - dispatcher | ||||
|     dns_search: [.] | ||||
|  | ||||
| secrets: | ||||
|     NOTIFIER_MAIL.json: | ||||
|         file: ${VOL_H_SECRETS}/NOTIFIER_MAIL.json | ||||
|     OIDC.json: | ||||
|         file: ${VOL_H_SECRETS}/OIDC.json | ||||
|     REGISTRATION.json: | ||||
|         file: ${VOL_H_SECRETS}/REGISTRATION.json | ||||
|     REGISTRATION_CAPTCHA.json: | ||||
|         file: ${VOL_H_SECRETS}/REGISTRATION_CAPTCHA.json | ||||
|  | ||||
|  | ||||
| ``` | ||||
| `.env` file used with the docker-compose.yaml. please read comments in the file and update it accordingly  | ||||
| ``` | ||||
| # --------------------------------------------------- # | ||||
| # docker-compose.yaml variables                       # | ||||
| # convention: Host volume VOL_H_XXX will be mapped    # | ||||
| # as container's volume VOL_C_YYY                     # | ||||
| # VOL_H_XXX:VOL_C_YYY                                 # | ||||
| # --------------------------------------------------- # | ||||
|  | ||||
| # -= MUST BE defined =- | ||||
| # Hostname for AFC server | ||||
| AFC_SERVER_NAME="_" | ||||
| # Wether to forward all http requests to https | ||||
| AFC_ENFORCE_HTTPS=TRUE | ||||
|  | ||||
| # Host static DB root dir | ||||
| VOL_H_DB=/var/databases/rat_transfer | ||||
|  | ||||
| # Container's static DB root dir (dont change it !) | ||||
| VOL_C_DB=/mnt/nfs/rat_transfer | ||||
|  | ||||
| #RAT user to be used in containers | ||||
| UID=1003 | ||||
| GID=1003 | ||||
|  | ||||
| # AFC service external PORTs configuration | ||||
| # syntax: | ||||
| # [IP]:<port | portN-portM> | ||||
| # like 172.31.11.188:80-180 | ||||
| # where: | ||||
| #  IP is  172.31.11.188 | ||||
| #  port range is 80-180 | ||||
|  | ||||
| # Here we configuring range of external ports to be used by the service | ||||
| # docker-compose randomly uses one port from the range | ||||
|  | ||||
| # Note 1: | ||||
| # The IP arrdess can be skipped if there is only one external | ||||
| # IP address (i.e. 80-180 w/o IP address is acceptable as well) | ||||
|  | ||||
| # Note 2: | ||||
| # range of ports can be skipped . and just one port is acceptable as well | ||||
|  | ||||
| # all these valuase are acaptable: | ||||
| # PORT=172.31.11.188:80-180 | ||||
| # PORT=172.31.11.188:80 | ||||
| # PORT=80-180 | ||||
| # PORT=80 | ||||
|  | ||||
|  | ||||
| # http ports range | ||||
| EXT_PORT=80 | ||||
|  | ||||
| # https host ports range | ||||
| EXT_PORT_S=443 | ||||
|  | ||||
|  | ||||
| # -= ALS CONFIGURATION STUFF =- | ||||
|  | ||||
| # Port on which ALS Kafka server listens for clients | ||||
| ALS_KAFKA_CLIENT_PORT_=9092 | ||||
|  | ||||
| # ALS Kafka server host name | ||||
| ALS_KAFKA_SERVER_=als_kafka | ||||
|  | ||||
| # Maximum ALS message size (default 1MB is too tight for GUI AFC Response) | ||||
| ALS_KAFKA_MAX_REQUEST_SIZE_=10485760 | ||||
|  | ||||
|  | ||||
| # -= FS(ULS) DOWNLOADER CONFIGURATION STUFF =- | ||||
|  | ||||
| # Symlink pointing to current ULS database | ||||
| ULS_CURRENT_DB_SYMLINK=FS_LATEST.sqlite3 | ||||
|  | ||||
|  | ||||
| # -= RCACHE SERVICE CONFIGURATION STUFF =- | ||||
|  | ||||
| # True (1, t, on, y, yes) to enable use of Rcache. False (0, f, off, n, no) to | ||||
| # use legacy file-based cache. Default is True | ||||
| RCACHE_ENABLED=True | ||||
|  | ||||
| # Port Rcache service listens os | ||||
| RCACHE_CLIENT_PORT=8000 | ||||
|  | ||||
|  | ||||
| # -= SECRETS STUFF =- | ||||
|  | ||||
| # Host directory containing secret files | ||||
| VOL_H_SECRETS=../../tools/secrets/empty_secrets | ||||
| #VOL_H_SECRETS=/opt/afc/secrets | ||||
|  | ||||
| # Directory inside container where to secrets are mounted (always /run/secrets | ||||
| # in Compose, may vary in Kubernetes) | ||||
| VOL_C_SECRETS=/run/secrets | ||||
|  | ||||
|  | ||||
|  | ||||
| # -= OPTIONAL =- | ||||
| # to work without tls/mtls,remove these variables from here | ||||
| # if you have tls/mtls configuration, keep configuration | ||||
| # files in these host volumes | ||||
| VOL_H_SSL=./ssl | ||||
| VOL_C_SSL=/usr/share/ca-certificates/certs | ||||
| VOL_H_NGNX=./ssl/nginx | ||||
| VOL_C_NGNX=/certificates/servers | ||||
|  | ||||
|  | ||||
| ``` | ||||
|  | ||||
|  | ||||
| Just create this file on the same level with Dockerfile and you are almost ready. Verify that the VOL_H_DB setting in the .env file is pointing at your host directory with the databases. | ||||
|  | ||||
| Just run in this folder following command and it is done: | ||||
| ``` | ||||
| docker-compose up -d | ||||
| ``` | ||||
|  | ||||
| Keep in mind that on the first run it will build and pull all the needed containers and it can take some time (based on your machine power).  You may want to do a build (see [Building Docker image from Dockerfiles](#building-docker-image-from-dockerfiles)) | ||||
|  | ||||
| After the initial start of the server we recommend to stop it and then start again using these commands: | ||||
| ``` | ||||
| docker-compose down | ||||
| docker-compose up -d | ||||
| ``` | ||||
|  | ||||
| If you later need to rebuild the server with the changes - simply run this command: | ||||
| ``` | ||||
| docker-compose build | ||||
| ``` | ||||
| and then restart it. | ||||
| To force rebuild it completely use _--no-cache_ option: | ||||
|  | ||||
| ``` | ||||
| docker-compose build --no-cache | ||||
| ``` | ||||
|  | ||||
| **NB: the postgres container requires the folder /mnt/nfs/pgsql/data to be owned by it's internal user and group _postgres_, which both have id 999.** | ||||
|  | ||||
| You can achieve it this way  (mind the real location of these folders on your host system): | ||||
| ``` | ||||
| chown 999:999 /var/databases/pgdata | ||||
| ``` | ||||
| ## Initial configuration and first user | ||||
|  | ||||
| On the first start of the PostgreSQL server there are some initial steps to do. First to create the database. Its default name now is **fbrat**. If you are using compose script described above, everything will be done automatically to prepare the database for intialization. | ||||
|  | ||||
| After that, once OpenAFC server is started, you need to create DB structure for the user database. This can be done using a _rat-manage-api_ utility. | ||||
|  | ||||
| ``` | ||||
| rat-manage-api db-create | ||||
| ``` | ||||
|  | ||||
| If you do it with the server which is run thru the docker-compose script described above, you can do it using this command: | ||||
| ``` | ||||
| docker-compose exec rat_server rat-manage-api db-create | ||||
| ``` | ||||
| ### Initial Super Administrator account | ||||
|  | ||||
| Once done with database and starting the server, you need to create default administrative user to handle your server from WebUI. It is done from the server console using the _rat-manage-api_ utility. | ||||
|  | ||||
| If you are running from the compose file described above, you first need to get the OpenAFC server console. | ||||
| ``` | ||||
| docker-compose exec rat_server bash | ||||
| ``` | ||||
| it will return something like this: | ||||
| ``` | ||||
| [root@149372a2ac05 wd]# | ||||
| ``` | ||||
| this means you are in. | ||||
|  | ||||
| By default, the login uses non OIDC login method which manages user accounts locally.  You can use the following command to create an administrator for your OpenAFC server. | ||||
|  | ||||
| ``` | ||||
| rat-manage-api user create --role Super --role Admin --role AP --role Analysis admin "Enter Your Password Here" | ||||
| ``` | ||||
|  | ||||
| Once done, you can authorize with this user and password in WebUI. | ||||
| To exit the console press Ctrl+D or type the 'exit' command. | ||||
|  | ||||
| If you would like to use OIDC login method, please read [OIDC_Login.md](/OIDC_Login.md) | ||||
|  | ||||
|  | ||||
| ## **Environment variables** | ||||
| |Name|Default val|Container|Notes | ||||
| | :- | :- | :- | :- | | ||||
| | **RabbitMQ settings**|||| | ||||
| |BROKER_TYPE|`internal`|rat-server,msghnd,worker | whether `internal` or `external` AFC RMQ service used| | ||||
| |BROKER_PROT|`amqp` |rat-server,msghnd,worker | what protocol used for AFC RMQ service| | ||||
| |BROKER_USER|`celery`|rat-server,msghnd,worker | user used for AFC RMQ service| | ||||
| |BROKER_PWD |`celery`|rat-server,msghnd,worker | password used for AFC RMQ service| | ||||
| |BROKER_FQDN|`localhost`|rat-server,msghnd,worker | IP/domain name of AFC RMQ service| | ||||
| |BROKER_PORT|`5672`|rat-server,msghnd,worker | port of AFC RMQ service| | ||||
| |RMQ_LOG_CONSOLE_LEVEL|warning|rmq|RabbitMQ console log level (debug, info, warning, error, critical, none)| | ||||
| | **AFC Object Storage** |||please read [objst README.md](/objstorage/README.md)| | ||||
| |AFC_OBJST_HOST|`0.0.0.0`|objst,rat-server,msghnd,worker|file storage service host domain/IP| | ||||
| |AFC_OBJST_PORT|`5000`|objst,rat-server,msghnd,worker|file storage service port| | ||||
| |AFC_OBJST_SCHEME|'HTTP'|rat-server,msghnd,worker|file storage service scheme. `HTTP` or `HTTPS`| | ||||
| |AFC_OBJST_MEDIA|`LocalFS`|objst|The media used for storing files by the service. The possible values are `LocalFS` - store files on docker's FS. `GoogleCloudBucket` - store files on Google Store| | ||||
| |AFC_OBJST_LOCAL_DIR|`/storage`|objst|file system path to stored files in file storage container. Used only when `AFC_OBJST_MEDIA` is `LocalFS`| | ||||
| |AFC_OBJST_LOG_LVL|`ERROR`|objst|logging level of the file storage. The relevant values are `DEBUG` and `ERROR`| | ||||
| |AFC_OBJST_HIST_PORT|`4999`|objst,rat-server,msghnd,worker|history service port| | ||||
| |AFC_OBJST_WORKERS|`10`|objst|number of gunicorn workers running objst server| | ||||
| |AFC_OBJST_HIST_WORKERS|`2`|objst|number of gunicorn workers runnining history server| | ||||
| | **MSGHND settings**|||| | ||||
| |AFC_MSGHND_BIND|`0.0.0.0`|msghnd| the socket to bind. a string of the form: <host>| | ||||
| |AFC_MSGHND_PORT|`8000`|msghnd| the port to use in bind. a string of the form: <port>| | ||||
| |AFC_MSGHND_PID|`/run/gunicorn/openafc_app.pid`|msghnd| a filename to use for the PID file| | ||||
| |AFC_MSGHND_WORKERS|`20`|msghnd| the number of worker processes for handling requests| | ||||
| |AFC_MSGHND_TIMEOUT|`180`|msghnd| workers silent for more than this many seconds are killed and restarted| | ||||
| |AFC_MSGHND_ACCESS_LOG||msghnd| the Access log file to write to. Default to don't. Use `/proc/self/fd/2` for console| | ||||
| |AFC_MSGHND_ERROR_LOG|`/proc/self/fd/2`|msghnd| the Error log file to write to| | ||||
| |AFC_MSGHND_LOG_LEVEL|`info`|msghnd| The granularity of Error log outputs (values are 'debug', 'info', 'warning', 'error', 'critical'| | ||||
| | **worker settings**|||please read [afc-engine-preload README.md](/src/afc-engine-preload/README.md)| | ||||
| |AFC_AEP_ENABLE|Not defined|worker|Enable the preload library if defined| | ||||
| |AFC_AEP_FILELIST|`/aep/list/aep.list`|worker|Path to file tree info file| | ||||
| |AFC_AEP_DEBUG|`0`|worker|Log level. 0 - disable, 1 - log time of read operations| | ||||
| |AFC_AEP_LOGFILE|`/aep/log/aep.log`|worker|Where to write the log| | ||||
| |AFC_AEP_CACHE|`/aep/cache`|worker|Where to store the cache| | ||||
| |AFC_AEP_CACHE_MAX_FILE_SIZE|`50000000`|worker|Cache files with size less than the value| | ||||
| |AFC_AEP_CACHE_MAX_SIZE|`1000000000`|worker|Max cache size| | ||||
| |AFC_AEP_REAL_MOUNTPOINT|`/mnt/nfs/rat_transfer`|worker|Redirect read access to there| | ||||
| |AFC_AEP_ENGINE_MOUNTPOINT|value of AFC_AEP_REAL_MOUNTPOINT|worker|Redirect read access from here| | ||||
| |AFC_WORKER_CELERY_WORKERS|`rat_1`|worker|Celery worker name(s) to use| | ||||
| |AFC_WORKER_CELERY_OPTS||worker|Additional celery worker options| | ||||
| |AFC_WORKER_CELERY_LOG|`INFO`|worker|Celery log level. `ERROR` or `INFO` or `DEBUG`| | ||||
| |AFC_ENGINE_LOG_LVL|'info'|worker|afc-engine log level| | ||||
| |AFC_MSGHND_NAME|msghnd|dispatcher|Message handler service hostname| | ||||
| |AFC_MSGHND_PORT|8000|dispatcher|Message handler service HTTP port| | ||||
| |AFC_WEBUI_NAME|rat_server|dispatcher|WebUI service hostname| | ||||
| |AFC_WEBUI_PORT|80|dispatcher|WebUI service HTTP Port| | ||||
| |AFC_ENFORCE_HTTPS|TRUE|dispatcher|Wether to enforce forwarding of HTTP requests to HTTPS. TRUE - for enable, everything else - to disable| | ||||
| |AFC_SERVER_NAME|"_"|dispatcher|Hostname of the AFC Server, for example - "openafc.tip.build". "_" - will accept any hostname (but this is not secure)| | ||||
| | **RCACHE settings** |||| | ||||
| |RCACHE_ENABLED|TRUE|rcache, rat_server, msghnd, worker, uls_downloader|TRUE if Rcache enabled, FALSE to use legacy objstroage response cache| | ||||
| |RCACHE_POSTGRES_DSN|Must be set|rcache, rat_server, msghnd|Connection string to Rcache Postgres database| | ||||
| |RCACHE_SERVICE_URL|Must be set|rat_server, msghnd, worker, uls_downloader|Rcache service REST API base URL| | ||||
| |RCACHE_RMQ_DSN|Must be set|rat_server, msghnd, worker|AMQP URL to RabbitMQ vhost that workers use to communicate computation result| | ||||
| |RCACHE_UPDATE_ON_SEND|TRUE|TRUE if worker sends result to Rcache server, FALSE if msghnd/rat_server| | ||||
| |RCACHE_CLIENT_PORT|8000|rcache|Rcache REST API port| | ||||
| |RCACHE_AFC_REQ_URL||REST API Rcache precomputer uses to send invalidated AFC requests for precomputation. No precomputation if not set| | ||||
| |RCACHE_RULESETS_URL||REST API Rcache spatial invalidator uses to retrieve AFC Configs' rulesets. Default invalidation distance usd if not set| | ||||
| |RCACHE_CONFIG_RETRIEVAL_URL||REST API Rcache spatial invalidator uses to retrieve AFC Config by ruleset. Default invalidation distance usd if not set| | ||||
|  | ||||
|  | ||||
| ## RabbitMQ settings | ||||
|  | ||||
| There is a way to conifugre AFC server to use a RabbitMQ broker from different docker image. | ||||
| Following the list of environment variables you may configure a server to use 'external' Rabbit MQ instance. | ||||
| ``` | ||||
| BROKER_TYPE = external | ||||
| BROKER_PROT = amqp | ||||
| BROKER_USER = celery | ||||
| BROKER_PWD  = celery | ||||
| BROKER_FQDN = <ip address> | ||||
| BROKER_PORT = 5672 | ||||
| BROKER_MNG_PORT = 15672 | ||||
| ``` | ||||
| Following the example to use RabbitMQ service in docker-compose. | ||||
| ``` | ||||
|   rmq: | ||||
|     image: public.ecr.aws/w9v6y1o0/openafc/rmq-image:latest | ||||
|     restart: always | ||||
| ``` | ||||
|  | ||||
| ## Managing the PostgreSQL database for users | ||||
|  | ||||
| ### Upgrading PostgresSQL | ||||
| When PostgreSQL is upgraded the pgdata should be converted to be compatible with the new PostgreSQL version. It can be done by tools/db_tools/update_db.sh script. | ||||
| ``` | ||||
| tools/db_tools/update_db.sh [pgdata_dir] [postgres_password] [old_postgres_version] [new_postgres_version] | ||||
| ``` | ||||
| This script makes a backup of [pgdata_dir] to [pgdata_dir].back and puts the converted db in [pgdata_dir]. | ||||
| This command should be run under root permissions, i.e. 'sudo tools/db_tools/update_db.sh ...' | ||||
|  | ||||
| Example: convert db which was created by PostgreSQL version 9.6 to be used by PostgreSQL version 14.7: | ||||
| ``` | ||||
| sudo tools/db_tools/update_db.sh ./pgdata qwerty 9.6 14.7 | ||||
| ``` | ||||
|  | ||||
| ### Note for an existing user database | ||||
|  | ||||
| Database format has changed over time.  If your user database uses older format, you might find errors indicating missing database fields upon bootup and login. The error message has instructions on how to migrate the database. These steps apply whether you're using OIDC or non OIDC login method.  You have sereral options: | ||||
|  | ||||
| **1. Reinitialize the database without users:** | ||||
|  | ||||
| ``` | ||||
| rat-manage-api db-drop | ||||
| rat-manage-api db-create | ||||
| ``` | ||||
|  | ||||
| This will wipe out existing users, e.g. user acounts need to be manually recreated again. | ||||
|  | ||||
| **2. Migrate the database with users:** | ||||
|  | ||||
| ``` | ||||
| rat-manage-api db-upgrade | ||||
| ``` | ||||
| ## Managing user accounts | ||||
| Users can be created and removed. User roles can be added and removed. | ||||
| Remove user with user remove command, e.g.: | ||||
| ``` | ||||
| rat-manage-api user remove user@mycompany.com | ||||
|  | ||||
| ``` | ||||
| Update user roles with user update command, e.g.: | ||||
| ``` | ||||
| rat-manage-api user update --role Admin --role AP --role Analysis --email "user@mycompany.com" | ||||
| ``` | ||||
| Create user with user create command.  If org argument is not given, the organization can be derived from the username if it's given in the form of an email address e.g.: | ||||
| ``` | ||||
| rat-manage-api user create --role Admin --role AP --role Analysis --org mycompany.com "username" "mypassword' | ||||
|  | ||||
| ``` | ||||
| ## User roles | ||||
| Roles are: Super, admin, AP, Admin, Analysis, Trial | ||||
| "Super" is the highest level role, which allows access rights to all organizations, as opposed to "Admin", which is limited to one organization.  When upgrade from older system without "Super", you will need to decide which users to be assigned role of "Super" and update their roles via the user update command. | ||||
|  | ||||
| ## MTLS | ||||
| Vanilla installation comes with placeholder file for client certificate bundle. | ||||
|  | ||||
| Besides the GUI, mtls certificates can be managed via CLI. | ||||
| To list certificates: | ||||
| ``` | ||||
| rat-manage-api mtls list | ||||
| ``` | ||||
|  | ||||
| To add certificates: | ||||
| ``` | ||||
| rat-manage-api mtls create --src <certificate file> --org <Organization name> --note <Short Note> | ||||
| ``` | ||||
|  | ||||
| To remove certificates: | ||||
| ``` | ||||
| rat-manage-api mtls remove --id <certificate id obtained from list command> | ||||
| ``` | ||||
| To dump a certificate to a file: | ||||
| ``` | ||||
| rat-manage-api mtls dump --id <certificate id obtained from list command> --dst <file name> | ||||
| ``` | ||||
|  | ||||
| Happy usage! | ||||
|  | ||||
| ## ULS database update automation | ||||
|  | ||||
| ULS Database needs (preferrably daily) updates to be up-to-date with regulators requirements.  See [README in uls](uls/README.md "ULS Service ReadMe") for instructions and configuration.  The docker compose given above will create a container that runs the daily ULS update service. | ||||
							
								
								
									
										2
									
								
								ReleaseNote.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								ReleaseNote.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| # Release Note | ||||
| ## **Version and Date** | ||||
							
								
								
									
										3
									
								
								TestDemoUser.Readme.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								TestDemoUser.Readme.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| # DEMO and Test user configuration | ||||
| Super user can chose **DEMO_US** as a ruleset in the for Demo purposes or **TEST_US** for testing afc config | ||||
| When the AvailableSpectrumInquiry is sent with these rulesets, the appropriate config will be applied. | ||||
							
								
								
									
										9
									
								
								TrialUser.Readme.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								TrialUser.Readme.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | ||||
| # Trial user configuration | ||||
| AFC has the capability to provide for trial users that have limited ability to perform spectrum availability requests in preset configuration. | ||||
|  | ||||
| ### Create a user with the trial role only | ||||
| The new user can register for account online via the UI.  Upon approval, the user is granted by default the Trial role, and can run start sending inquiries. | ||||
|  | ||||
| ### Running the Spectrum query as the Trial user | ||||
| The trial user can simply provide **TestCertificationId** as the certification ID and **TestSerialNumber** as the serial number in the Spectrum query. | ||||
|  | ||||
							
								
								
									
										405
									
								
								als/ALS.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										405
									
								
								als/ALS.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,405 @@ | ||||
| /* | ||||
|  * Copyright (C) 2022 Broadcom. All rights reserved. | ||||
|  * The term "Broadcom" refers solely to the Broadcom Inc. corporate affiliate | ||||
|  * that owns the software below. | ||||
|  * This work is licensed under the OpenAFC Project License, a copy of which is | ||||
|  * included with this software program. | ||||
|  * | ||||
|  * This file creates ALS (AFC Request/Response/Config Logging System) database on PostgreSQL+PostGIS server | ||||
|  * This file is generated, direct editing is not recommended. | ||||
|  * Intended maintenance sequence is as follows: | ||||
|  *   1. Load (copypaste) als_db_schema/ALS.dbml into dbdiagram.io | ||||
|  *   2. Modify as needed | ||||
|  *   3. Save (copypaste) modified sources back to als_db_schema/ALS.dbml | ||||
|  *   4. Also export schema in PostgreSQL format as als_db_schema/ALS_raw.sql | ||||
|  *   5. Rectify exported schema with als_rectifier.awk (awk -f als_db_schema/als_rectifier.awk < als_db_schema/ALS_raw.sql > ALS.sql) | ||||
|  */ | ||||
|  | ||||
| CREATE EXTENSION postgis; | ||||
|  | ||||
| CREATE TABLE "afc_message" ( | ||||
|   "message_id" bigserial, | ||||
|   "month_idx" smallint, | ||||
|   "afc_server" serial, | ||||
|   "rx_time" timestamptz, | ||||
|   "tx_time" timestamptz, | ||||
|   "rx_envelope_digest" uuid, | ||||
|   "tx_envelope_digest" uuid, | ||||
|   PRIMARY KEY ("message_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "rx_envelope" ( | ||||
|   "rx_envelope_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "envelope_json" json, | ||||
|   PRIMARY KEY ("rx_envelope_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "tx_envelope" ( | ||||
|   "tx_envelope_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "envelope_json" json, | ||||
|   PRIMARY KEY ("tx_envelope_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "request_response_in_message" ( | ||||
|   "message_id" bigint, | ||||
|   "request_id" text, | ||||
|   "month_idx" smallint, | ||||
|   "request_response_digest" uuid, | ||||
|   "expire_time" timestamptz, | ||||
|   PRIMARY KEY ("message_id", "request_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "request_response" ( | ||||
|   "request_response_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "afc_config_text_digest" uuid, | ||||
|   "customer_id" integer, | ||||
|   "uls_data_version_id" integer, | ||||
|   "geo_data_version_id" integer, | ||||
|   "request_json_digest" uuid, | ||||
|   "response_json_digest" uuid, | ||||
|   "device_descriptor_digest" uuid, | ||||
|   "location_digest" uuid, | ||||
|   "response_code" int, | ||||
|   "response_description" text, | ||||
|   "response_data" text, | ||||
|   PRIMARY KEY ("request_response_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "device_descriptor" ( | ||||
|   "device_descriptor_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "serial_number" text, | ||||
|   "certifications_digest" uuid, | ||||
|   PRIMARY KEY ("device_descriptor_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "certification" ( | ||||
|   "certifications_digest" uuid, | ||||
|   "certification_index" smallint, | ||||
|   "month_idx" smallint, | ||||
|   "ruleset_id" text, | ||||
|   "certification_id" text, | ||||
|   PRIMARY KEY ("certifications_digest", "certification_index", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "compressed_json" ( | ||||
|   "compressed_json_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "compressed_json_data" bytea, | ||||
|   PRIMARY KEY ("compressed_json_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "customer" ( | ||||
|   "customer_id" serial, | ||||
|   "month_idx" smallint, | ||||
|   "customer_name" text, | ||||
|   PRIMARY KEY ("customer_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "location" ( | ||||
|   "location_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "location_wgs84" geography(POINT,4326), | ||||
|   "location_uncertainty_m" real, | ||||
|   "location_type" text, | ||||
|   "deployment_type" int, | ||||
|   "height_m" real, | ||||
|   "height_uncertainty_m" real, | ||||
|   "height_type" text, | ||||
|   PRIMARY KEY ("location_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "afc_config" ( | ||||
|   "afc_config_text_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "afc_config_text" text, | ||||
|   "afc_config_json" json, | ||||
|   PRIMARY KEY ("afc_config_text_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "geo_data_version" ( | ||||
|   "geo_data_version_id" serial, | ||||
|   "month_idx" smallint, | ||||
|   "geo_data_version" text, | ||||
|   PRIMARY KEY ("geo_data_version_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "uls_data_version" ( | ||||
|   "uls_data_version_id" serial, | ||||
|   "month_idx" smallint, | ||||
|   "uls_data_version" text, | ||||
|   PRIMARY KEY ("uls_data_version_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "max_psd" ( | ||||
|   "request_response_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "low_frequency_mhz" smallint, | ||||
|   "high_frequency_mhz" smallint, | ||||
|   "max_psd_dbm_mhz" real, | ||||
|   PRIMARY KEY ("request_response_digest", "month_idx", "low_frequency_mhz", "high_frequency_mhz") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "max_eirp" ( | ||||
|   "request_response_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "op_class" smallint, | ||||
|   "channel" smallint, | ||||
|   "max_eirp_dbm" real, | ||||
|   PRIMARY KEY ("request_response_digest", "month_idx", "op_class", "channel") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "afc_server" ( | ||||
|   "afc_server_id" serial, | ||||
|   "month_idx" smallint, | ||||
|   "afc_server_name" text, | ||||
|   PRIMARY KEY ("afc_server_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "decode_error" ( | ||||
|   "id" bigserial PRIMARY KEY, | ||||
|   "time" timestamptz, | ||||
|   "msg" text, | ||||
|   "code_line" integer, | ||||
|   "data" text, | ||||
|   "month_idx" smallint | ||||
| ); | ||||
|  | ||||
| CREATE INDEX ON "afc_message" ("rx_time"); | ||||
|  | ||||
| CREATE INDEX ON "afc_message" ("tx_time"); | ||||
|  | ||||
| CREATE INDEX ON "rx_envelope" USING HASH ("rx_envelope_digest"); | ||||
|  | ||||
| CREATE INDEX ON "tx_envelope" USING HASH ("tx_envelope_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response_in_message" ("request_id"); | ||||
|  | ||||
| CREATE INDEX ON "request_response_in_message" ("request_response_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response_in_message" ("expire_time"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" USING HASH ("request_response_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("afc_config_text_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("customer_id"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("device_descriptor_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("location_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("response_code"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("response_description"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("response_data"); | ||||
|  | ||||
| CREATE INDEX ON "device_descriptor" USING HASH ("device_descriptor_digest"); | ||||
|  | ||||
| CREATE INDEX ON "device_descriptor" ("serial_number"); | ||||
|  | ||||
| CREATE INDEX ON "device_descriptor" ("certifications_digest"); | ||||
|  | ||||
| CREATE INDEX ON "certification" USING HASH ("certifications_digest"); | ||||
|  | ||||
| CREATE INDEX ON "certification" ("ruleset_id"); | ||||
|  | ||||
| CREATE INDEX ON "certification" ("certification_id"); | ||||
|  | ||||
| CREATE INDEX ON "compressed_json" USING HASH ("compressed_json_digest"); | ||||
|  | ||||
| CREATE INDEX ON "customer" ("customer_name"); | ||||
|  | ||||
| CREATE INDEX ON "location" USING HASH ("location_digest"); | ||||
|  | ||||
| CREATE INDEX ON "location" ("location_wgs84"); | ||||
|  | ||||
| CREATE INDEX ON "location" ("location_type"); | ||||
|  | ||||
| CREATE INDEX ON "location" ("height_m"); | ||||
|  | ||||
| CREATE INDEX ON "location" ("height_type"); | ||||
|  | ||||
| CREATE INDEX ON "afc_config" USING HASH ("afc_config_text_digest"); | ||||
|  | ||||
| CREATE UNIQUE INDEX ON "geo_data_version" ("geo_data_version", "month_idx"); | ||||
|  | ||||
| CREATE INDEX ON "geo_data_version" ("geo_data_version"); | ||||
|  | ||||
| CREATE UNIQUE INDEX ON "uls_data_version" ("uls_data_version", "month_idx"); | ||||
|  | ||||
| CREATE INDEX ON "uls_data_version" ("uls_data_version"); | ||||
|  | ||||
| CREATE INDEX ON "max_psd" USING HASH ("request_response_digest"); | ||||
|  | ||||
| CREATE INDEX ON "max_psd" ("low_frequency_mhz"); | ||||
|  | ||||
| CREATE INDEX ON "max_psd" ("high_frequency_mhz"); | ||||
|  | ||||
| CREATE INDEX ON "max_psd" ("max_psd_dbm_mhz"); | ||||
|  | ||||
| CREATE INDEX ON "max_eirp" USING HASH ("request_response_digest"); | ||||
|  | ||||
| CREATE INDEX ON "max_eirp" ("op_class"); | ||||
|  | ||||
| CREATE INDEX ON "max_eirp" ("channel"); | ||||
|  | ||||
| CREATE INDEX ON "max_eirp" ("max_eirp_dbm"); | ||||
|  | ||||
| CREATE UNIQUE INDEX ON "afc_server" ("afc_server_name", "month_idx"); | ||||
|  | ||||
| CREATE INDEX ON "afc_server" ("afc_server_name"); | ||||
|  | ||||
| COMMENT ON TABLE "afc_message" IS 'AFC Request/Response message pair (contain individual requests/responses)'; | ||||
|  | ||||
| COMMENT ON COLUMN "afc_message"."rx_envelope_digest" IS 'Envelope of AFC Request message'; | ||||
|  | ||||
| COMMENT ON COLUMN "afc_message"."tx_envelope_digest" IS 'Envelope of AFC Response message'; | ||||
|  | ||||
| COMMENT ON TABLE "rx_envelope" IS 'Envelope (constant part) of AFC Request Message'; | ||||
|  | ||||
| COMMENT ON COLUMN "rx_envelope"."rx_envelope_digest" IS 'MD5 of envelope_json field in UTF8 encoding'; | ||||
|  | ||||
| COMMENT ON COLUMN "rx_envelope"."envelope_json" IS 'AFC Request JSON with empty availableSpectrumInquiryRequests field'; | ||||
|  | ||||
| COMMENT ON TABLE "tx_envelope" IS 'Envelope (constant part) of AFC Response Message'; | ||||
|  | ||||
| COMMENT ON COLUMN "tx_envelope"."tx_envelope_digest" IS 'MD5 of envelope_json field in UTF8 encoding'; | ||||
|  | ||||
| COMMENT ON COLUMN "tx_envelope"."envelope_json" IS 'AFC Response JSON with empty availableSpectrumInquiryRequests field'; | ||||
|  | ||||
| COMMENT ON TABLE "request_response_in_message" IS 'Associative table for relatonship between AFC Request/Response messages and individual requests/responses. Also encapsulates variable part of requests/responses'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response_in_message"."message_id" IS 'AFC request/response message pair this request/response belongs'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response_in_message"."request_id" IS 'ID of request/response within message'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response_in_message"."request_response_digest" IS 'Reference to otentially constant part of request/response'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response_in_message"."expire_time" IS 'Response expiration time'; | ||||
|  | ||||
| COMMENT ON TABLE "request_response" IS 'Potentiially constant part of request/response'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."request_response_digest" IS 'MD5 computed over request/response with requestId and availabilityExpireTime fields set to empty'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."afc_config_text_digest" IS 'MD5 over used AFC Config text represnetation'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."customer_id" IS 'AP vendor'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."uls_data_version_id" IS 'Version of used ULS data'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."geo_data_version_id" IS 'Version of used geospatial data'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."request_json_digest" IS 'MD5 of request JSON with empty requestId'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."response_json_digest" IS 'MD5 of resaponse JSON with empty requesatId and availabilityExpireTime'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."device_descriptor_digest" IS 'MD5 of device descriptor (AP) related part of request JSON'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."location_digest" IS 'MD5 of location-related part of request JSON'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."response_description" IS 'Optional response code short description. Null for success'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."response_data" IS 'Optional supplemental failure information. Optional comma-separated list of missing/invalid/unexpected parameters, etc.'; | ||||
|  | ||||
| COMMENT ON TABLE "device_descriptor" IS 'Information about device (e.g. AP)'; | ||||
|  | ||||
| COMMENT ON COLUMN "device_descriptor"."device_descriptor_digest" IS 'MD5 over parts of requesat JSON pertinent to AP'; | ||||
|  | ||||
| COMMENT ON COLUMN "device_descriptor"."serial_number" IS 'AP serial number'; | ||||
|  | ||||
| COMMENT ON COLUMN "device_descriptor"."certifications_digest" IS 'Device certifications'; | ||||
|  | ||||
| COMMENT ON TABLE "certification" IS 'Element of certifications list'; | ||||
|  | ||||
| COMMENT ON COLUMN "certification"."certifications_digest" IS 'MD5 of certification list json'; | ||||
|  | ||||
| COMMENT ON COLUMN "certification"."certification_index" IS 'Index in certification list'; | ||||
|  | ||||
| COMMENT ON COLUMN "certification"."ruleset_id" IS 'Name of rules for which AP certified (equivalent of region)'; | ||||
|  | ||||
| COMMENT ON COLUMN "certification"."certification_id" IS 'ID of certification (equivalent of manufacturer)'; | ||||
|  | ||||
| COMMENT ON TABLE "compressed_json" IS 'Compressed body of request or response'; | ||||
|  | ||||
| COMMENT ON COLUMN "compressed_json"."compressed_json_digest" IS 'MD5 hash of compressed data'; | ||||
|  | ||||
| COMMENT ON COLUMN "compressed_json"."compressed_json_data" IS 'Compressed data'; | ||||
|  | ||||
| COMMENT ON TABLE "customer" IS 'Customer aka vendor aka user'; | ||||
|  | ||||
| COMMENT ON COLUMN "customer"."customer_name" IS 'Its name'; | ||||
|  | ||||
| COMMENT ON TABLE "location" IS 'AP location'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."location_digest" IS 'MD5 computed over location part of request JSON'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."location_wgs84" IS 'AP area center (WGS84 coordinates)'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."location_uncertainty_m" IS 'Radius of AP uncertainty area in meters'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."location_type" IS 'Ellipse/LinearPolygon/RadialPolygon'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."deployment_type" IS '0/1/2 for unknown/indoor/outdoor'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."height_m" IS 'AP elevation in meters'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."height_uncertainty_m" IS 'Elevation uncertainty in meters'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."height_type" IS 'Elevation type'; | ||||
|  | ||||
| COMMENT ON TABLE "afc_config" IS 'AFC Config'; | ||||
|  | ||||
| COMMENT ON COLUMN "afc_config"."afc_config_text_digest" IS 'MD5 computed over text representation'; | ||||
|  | ||||
| COMMENT ON COLUMN "afc_config"."afc_config_text" IS 'Text representation of AFC Config'; | ||||
|  | ||||
| COMMENT ON COLUMN "afc_config"."afc_config_json" IS 'JSON representation of AFC Config'; | ||||
|  | ||||
| COMMENT ON TABLE "geo_data_version" IS 'Version of geospatial data'; | ||||
|  | ||||
| COMMENT ON TABLE "uls_data_version" IS 'Version of ULS data"'; | ||||
|  | ||||
| COMMENT ON TABLE "max_psd" IS 'PSD result'; | ||||
|  | ||||
| COMMENT ON COLUMN "max_psd"."request_response_digest" IS 'Request this result belongs to'; | ||||
|  | ||||
| COMMENT ON TABLE "max_eirp" IS 'EIRP result'; | ||||
|  | ||||
| COMMENT ON COLUMN "max_eirp"."request_response_digest" IS 'Request this result belongs to'; | ||||
|  | ||||
| ALTER TABLE "afc_message" ADD CONSTRAINT "afc_message_afc_server_ref" FOREIGN KEY ("afc_server", "month_idx") REFERENCES "afc_server" ("afc_server_id", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "afc_message" ADD CONSTRAINT "afc_message_rx_envelope_digest_ref" FOREIGN KEY ("rx_envelope_digest", "month_idx") REFERENCES "rx_envelope" ("rx_envelope_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "afc_message" ADD CONSTRAINT "afc_message_tx_envelope_digest_ref" FOREIGN KEY ("tx_envelope_digest", "month_idx") REFERENCES "tx_envelope" ("tx_envelope_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response_in_message" ADD CONSTRAINT "request_response_in_message_message_id_ref" FOREIGN KEY ("message_id", "month_idx") REFERENCES "afc_message" ("message_id", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response_in_message" ADD CONSTRAINT "request_response_in_message_request_response_digest_ref" FOREIGN KEY ("request_response_digest", "month_idx") REFERENCES "request_response" ("request_response_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_afc_config_text_digest_ref" FOREIGN KEY ("afc_config_text_digest", "month_idx") REFERENCES "afc_config" ("afc_config_text_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_customer_id_ref" FOREIGN KEY ("customer_id", "month_idx") REFERENCES "customer" ("customer_id", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_uls_data_version_id_ref" FOREIGN KEY ("uls_data_version_id", "month_idx") REFERENCES "uls_data_version" ("uls_data_version_id", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_geo_data_version_id_ref" FOREIGN KEY ("geo_data_version_id", "month_idx") REFERENCES "geo_data_version" ("geo_data_version_id", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_request_json_digest_ref" FOREIGN KEY ("request_json_digest", "month_idx") REFERENCES "compressed_json" ("compressed_json_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_response_json_digest_ref" FOREIGN KEY ("response_json_digest", "month_idx") REFERENCES "compressed_json" ("compressed_json_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_device_descriptor_digest_ref" FOREIGN KEY ("device_descriptor_digest", "month_idx") REFERENCES "device_descriptor" ("device_descriptor_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_location_digest_ref" FOREIGN KEY ("location_digest", "month_idx") REFERENCES "location" ("location_digest", "month_idx"); | ||||
|  | ||||
|  | ||||
| ALTER TABLE "max_psd" ADD CONSTRAINT "max_psd_request_response_digest_ref" FOREIGN KEY ("request_response_digest", "month_idx") REFERENCES "request_response" ("request_response_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "max_eirp" ADD CONSTRAINT "max_eirp_request_response_digest_ref" FOREIGN KEY ("request_response_digest", "month_idx") REFERENCES "request_response" ("request_response_digest", "month_idx"); | ||||
| ; | ||||
							
								
								
									
										43
									
								
								als/Dockerfile.kafka
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								als/Dockerfile.kafka
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,43 @@ | ||||
| # | ||||
| # Copyright (C) 2021 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
|  | ||||
| # Dockerfile for Kafka server, used by ALS (AFC Request/Response/Config) logging | ||||
| # This dockerfile maps outside config variables to those, used by Bitnami/Kafka  | ||||
|  | ||||
| FROM bitnami/kafka:3.3.1 | ||||
|  | ||||
| # Outside configuration variables | ||||
| ENV KAFKA_ADVERTISED_HOST=localhost | ||||
| ENV KAFKA_CLIENT_PORT=9092 | ||||
| ENV KAFKA_BROKER_PORT=9093 | ||||
| ENV KAFKA_CLIENT_SECURITY_PROTOCOL=PLAINTEXT | ||||
|  | ||||
| # Bitnami Kafka configuration parameters | ||||
| ENV KAFKA_ENABLE_KRAFT=yes | ||||
| ENV KAFKA_CFG_PROCESS_ROLES=broker,controller | ||||
| ENV KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER | ||||
| ENV KAFKA_CFG_BROKER_ID=1 | ||||
| ENV ALLOW_PLAINTEXT_LISTENER=yes | ||||
| ENV KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true | ||||
| ENV KAFKA_CFG_NUM_PARTITIONS=1 | ||||
| ENV KAFKA_MAX_REQUEST_SIZE=1048576 | ||||
|  | ||||
| # Setting Kafka log level to ERROR limits initialization blurt | ||||
| ENV KAFKA_LOG_LEVEL=ERROR | ||||
| RUN sed -i "s/log4j\.logger\.kafka=.*/log4j.logger.kafka=${KAFKA_LOG_LEVEL}/" /opt/bitnami/kafka/config/log4j.properties | ||||
| RUN sed -i "s/log4j\.logger\.org\.apache\.kafka=.*/log4j.logger.org.apache.kafka=${KAFKA_LOG_LEVEL}/" /opt/bitnami/kafka/config/log4j.properties | ||||
|  | ||||
| # Kafka environment variables, computed from DockerCompose-supplied variables | ||||
| # can't be defined in ENV - hence they are moved to ENTRYPOINT | ||||
| ENTRYPOINT env \ | ||||
|            KAFKA_CFG_MESSAGE_MAX_BYTES=${KAFKA_MAX_REQUEST_SIZE} \ | ||||
|            KAFKA_CFG_MAX_REQUEST_SIZE=${KAFKA_MAX_REQUEST_SIZE} \ | ||||
|            KAFKA_CFG_LISTENERS=PLAINTEXT://:${KAFKA_CLIENT_PORT},CONTROLLER://:${KAFKA_BROKER_PORT} \ | ||||
|            KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:${KAFKA_CLIENT_SECURITY_PROTOCOL},PLAINTEXT:${KAFKA_CLIENT_SECURITY_PROTOCOL} \ | ||||
|            KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://${KAFKA_ADVERTISED_HOST}:${KAFKA_CLIENT_PORT} \ | ||||
|            KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=1@localhost:${KAFKA_BROKER_PORT} \ | ||||
|            /opt/bitnami/scripts/kafka/entrypoint.sh /opt/bitnami/scripts/kafka/run.sh | ||||
							
								
								
									
										90
									
								
								als/Dockerfile.siphon
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										90
									
								
								als/Dockerfile.siphon
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,90 @@ | ||||
| # | ||||
| # Copyright (C) 2021 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
|  | ||||
| # Dockerfile for Siphon - als_siphon.py script that takes log records from | ||||
| # Kafka and puts them to PostgreSQL+PostGIS database (optionally creating | ||||
| # necessary database before operation) | ||||
|  | ||||
| FROM alpine:3.18 | ||||
|  | ||||
| RUN mkdir -p -m 777 /usr/app | ||||
| WORKDIR /usr/app | ||||
|  | ||||
| RUN apk add --update --no-cache python3=~3.11 py3-sqlalchemy=~1.4 py3-pip \ | ||||
|     py3-psycopg2 py3-pydantic=~1.10 py3-alembic py3-lz4 | ||||
|  | ||||
| RUN apk add --repository=http://dl-cdn.alpinelinux.org/alpine/edge/testing/ \ | ||||
|     py3-confluent-kafka | ||||
|  | ||||
| COPY requirements.txt /usr/app/ | ||||
| RUN pip3 install --no-cache-dir --root-user-action=ignore -r requirements.txt | ||||
|  | ||||
| ENV PYTHONPATH=/usr/app | ||||
| ENV PATH=$PATH:/usr/app | ||||
|  | ||||
| # Comma-separated list of Kafka (bootstrap) servers, each having 'host[:port]' | ||||
| # form. Port, if not specified, is 9092 | ||||
| ENV KAFKA_SERVERS=localhost | ||||
| # Client ID to use in Kafka logs. If ends with '@' - suffixed by unique random | ||||
| # string | ||||
| ENV KAFKA_CLIENT_ID=siphon_@ | ||||
| # 'SSL' or 'PLAINTEXT'. Default is 'PLAINTEXT' | ||||
| ENV KAFKA_SECURITY_PROTOCOL= | ||||
| # SSL keyfile | ||||
| ENV KAFKA_SSL_KEYFILE= | ||||
| # SSL CA (Certificate Authority) file | ||||
| ENV KAFKA_SSL_CAFILE= | ||||
| # Maximum message size (default is 1MB) | ||||
| ENV KAFKA_MAX_REQUEST_SIZE= | ||||
| # PostgreSQL server hostname | ||||
| ENV POSTGRES_HOST=localhost | ||||
| # PostgreSQL server port | ||||
| ENV POSTGRES_PORT=5432 | ||||
| # Parameters (name, user, password, options) of initial database - database to | ||||
| # connect to to create other databases | ||||
| ENV POSTGRES_INIT_DB=postgres | ||||
| ENV POSTGRES_INIT_USER=postgres | ||||
| ENV POSTGRES_INIT_PASSWORD=postgres | ||||
| ENV POSTGRES_INIT_OPTIONS= | ||||
| # Parameters (name, user, password, options) of database for | ||||
| # Request/Response/Config logs | ||||
| ENV POSTGRES_ALS_DB=ALS | ||||
| ENV POSTGRES_ALS_USER=postgres | ||||
| ENV POSTGRES_ALS_PASSWORD=postgres | ||||
| ENV POSTGRES_ALS_OPTIONS= | ||||
| # Parameters (name, user, password, options) of database for JSON logs | ||||
| ENV POSTGRES_LOG_DB=AFC_LOGS | ||||
| ENV POSTGRES_LOG_USER=postgres | ||||
| ENV POSTGRES_LOG_PASSWORD=postgres | ||||
| ENV POSTGRES_LOG_OPTIONS= | ||||
| # What to do if database being created already exists: 'skip', 'drop'. Default | ||||
| # is to fail | ||||
| ENV INIT_IF_EXISTS=skip | ||||
| # Port to serve Prometheus metrics on (none/empty is to not serve) | ||||
| ENV SIPHON_PROMETHEUS_PORT=8080 | ||||
|  | ||||
| COPY als_siphon.py als_query.py /usr/app/ | ||||
| RUN chmod a+x /usr/app/*.py | ||||
| COPY ALS.sql /usr/app | ||||
|  | ||||
| ENTRYPOINT /usr/app/als_siphon.py init_siphon \ | ||||
|     --kafka_servers=$KAFKA_SERVERS \ | ||||
|     --kafka_client_id=$KAFKA_CLIENT_ID \ | ||||
|     --kafka_security_protocol=$KAFKA_SECURITY_PROTOCOL \ | ||||
|     --kafka_ssl_keyfile=$KAFKA_SSL_KEYFILE \ | ||||
|     --kafka_ssl_cafile=$KAFKA_SSL_CAFILE \ | ||||
|     --kafka_max_partition_fetch_bytes=$KAFKA_MAX_REQUEST_SIZE \ | ||||
|     --init_postgres=postgresql://${POSTGRES_INIT_USER}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_INIT_DB}${POSTGRES_INIT_OPTIONS} \ | ||||
|     --init_postgres_password=$POSTGRES_INIT_PASSWORD \ | ||||
|     --if_exists=$INIT_IF_EXISTS \ | ||||
|     --als_postgres=postgresql://${POSTGRES_ALS_USER}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_ALS_DB}${POSTGRES_ALS_OPTIONS} \ | ||||
|     --als_postgres_password=$POSTGRES_ALS_PASSWORD \ | ||||
|     --log_postgres=postgresql://${POSTGRES_LOG_USER}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_LOG_DB}${POSTGRES_LOG_OPTIONS} \ | ||||
|     --log_postgres_password=$POSTGRES_LOG_PASSWORD \ | ||||
|     --prometheus_port=$SIPHON_PROMETHEUS_PORT \ | ||||
|     --if_exists=$INIT_IF_EXISTS \ | ||||
|     --als_sql /usr/app/ALS.sql | ||||
							
								
								
									
										214
									
								
								als/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										214
									
								
								als/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,214 @@ | ||||
| Copyright (C) 2022 Broadcom. All rights reserved.\ | ||||
| The term "Broadcom" refers solely to the Broadcom Inc. corporate affiliate that | ||||
| owns the software below. This work is licensed under the OpenAFC Project | ||||
| License, a copy of which is included with this software program. | ||||
|  | ||||
| # Tools For Working With Log Databases | ||||
|  | ||||
| ## Table of Contents | ||||
| - [Databases ](#databases) | ||||
|   - [*ALS* Database ](#als_database) | ||||
|   - [*AFC\_LOGS* Database ](#afc_logs_database) | ||||
|   - [Initial Database ](#initial_database) | ||||
|   - [Template Databases ](#template_databases) | ||||
| - [`als_siphon.py` - Moving Logs From Kafka To Postgres ](#als_siphon_py) | ||||
| - [`als_query.py` - Querying Logs From Postgres Database ](#als_query_py) | ||||
|   - [Installation](#als_query_install) | ||||
|   - [Addressing PostgreSQL](#als_query_server) | ||||
|   - [`log` Command ](#als_query_log) | ||||
|     - [`log` Command Examples ](#als_query_log_examples) | ||||
|  | ||||
|  | ||||
| ## Databases <a name="databases"> | ||||
|  | ||||
| ALS (AFC Log Storage) functionality revolves around two PostgreSQL databases, used for log storage: **ALS** and **AFC_LOGS**. | ||||
|  | ||||
| ### *ALS* Database <a name="als_database"> | ||||
|  | ||||
| Stores log of AFC Request/Response/Config data. Has rather convoluted multitable structure. | ||||
|  | ||||
| SQL code for creation of this database contained in *ALS.sql* file. This file should be considered generated and not be manually edited. | ||||
|  | ||||
| *als_db_schema* folder contains source material for *ALS.sql* generation: | ||||
|  | ||||
| - *ALS.dbml*. Source file for [dbdiagram.io](https://dbdiagram.io) DB diagramming site. Copy/paste content of this file there, make modifications, then copy/paste back to this file.   | ||||
|   Also upon completion *ALS_raw.sql* and *ALS.png* should be exported (as *Export to PostgreSQL* and *Export to PNG* respectively) - see below. | ||||
|  | ||||
| - *ALS_raw.sql*. Database creation SQL script that should exported from [dbdiagram.io](https://dbdiagram.io) after changes made to *ALS.dbml*.   | ||||
|   This file is almost like final *ALS.sql*, but requires certain tweaks: | ||||
|     * Declaring used PostgreSQL extensions (PostGIS in this case) | ||||
|     * Removal of many-to-many artifacts. For many-to-many relationships [dbdiagram.io](https://dbdiagram.io) creates artificial tables that are, adding insult to injury, violate PostgreSQL syntax. They are not used and should be removed. | ||||
|     * Segmentation. Database is planned with segmentation in mind (by *month_idx* field). But segmentation itself not performed. This will need to be done eventually. | ||||
|  | ||||
| - *als_rectifier.awk* AWK script for converting *ALS_raw.sql* to *ALS.sql*. | ||||
|  | ||||
| - *ALS.png* Picturesque database schema. Should be exported as PNG after changes, made to *ALS.dbml*. | ||||
|  | ||||
| ### *AFC_LOGS* Database <a name="als_logs_database"> | ||||
|  | ||||
| For each JSON log type (*topic* on Kafka parlance) this database has separate table with following columns: | ||||
|  | ||||
| - *time* Log record timetag with timezone | ||||
|  | ||||
| - *source* String that uniquely identifies entity that created log record | ||||
|  | ||||
| - *log* JSON log record. | ||||
|  | ||||
| ### Initial Database <a name="initial_database"> | ||||
|  | ||||
| To create database `als_siphon.py` script should connect to already database. This already existing database named *initial database*, by default it is built-in database named *postgres*. | ||||
|  | ||||
| ### Template Databases <a name="template_database"> | ||||
|  | ||||
| Template databases, used for creation of *ALS* and *AFC_LOGS* databases. Something other than default might be used (but not yet, as of time of this writing). | ||||
|  | ||||
|  | ||||
| ## `als_siphon.py` - Moving Logs From Kafka To Postgres <a name="als_siphon_py"> | ||||
|  | ||||
| The main purpose of `als_siphon.py` is to fetch log records from Kafka and move them to previously described PostgreSQL databases. Also it can initialize those databases. | ||||
|  | ||||
| `$ als_siphon.py COMMAND PARAMETERS` | ||||
|  | ||||
| Commands are: | ||||
|  | ||||
| - `init` Create *ALS* and/or *AFC_LOGS* database. If already exists, databases may be recreated or left intact. | ||||
|  | ||||
| - `siphon` Do the moving from Kafka to PostgreSQL. | ||||
|  | ||||
| - `init_siphon` First create databases then do the siphoning. Used for Docker operation. | ||||
|  | ||||
| Parameters are many - see help messages. | ||||
|  | ||||
|  | ||||
| ## `als_query.py` - Querying Logs From Postgres Database <a name="als_query_py"> | ||||
|  | ||||
| This script queries logs, stored in *ALS* and *AFC_LOGS* databases. | ||||
|  | ||||
| As of time of this writing this script only supports `log` command that reads JSON logs from *AFC_LOGS* | ||||
|  | ||||
|  | ||||
| ### Installation <a name="als_query_install"> | ||||
|  | ||||
| `als_query.py` requires Python 3 with reasonably recent *sqlalchemy*, *psycopg2*, *geoalchemy2* modules installed (latter is optional - not required for e.g. `log` command).  | ||||
|  | ||||
| Proper installation of these modules requires too much luck to be described here (as even `venv/virtualenv` does not help always - only sometimes). If you'll succeed - fine, otherwise there is one more method: running from the container where `als_siphon.py` installed. In latter case invocation looks like this: | ||||
|  | ||||
| `$ docker exec SIPHON_CONTAINER als_query.py CMD ...` | ||||
|  | ||||
| Here `SIPHON_CONTAINER` is either value from first column of `docker ps` or from last column of `docker-compose ps`. | ||||
|  | ||||
| ### Addressing PostgreSQL Server <a name="als_query_server"> | ||||
|  | ||||
| Another important aspect is how to access PostgreSQL database server where logs were placed. | ||||
|  | ||||
| #### Explicit specification | ||||
|  | ||||
| Using `--server` (aka `-s`) and `--password` parameters of `als_query.py` command line). Here are most probable cases: | ||||
|  | ||||
| 1. `als_query.py` runs inside `als_siphon.py` container, PostgreSQL runs inside the container, named `bulk_postgres` in *docker-compose.yaml* (that's how it is named as of time of this writing):   | ||||
|   `$ docker exec SIPHON_CONTAINER als_query.py CMD \ `   | ||||
|   `--server [USER@]als_postrgres[:PORT][?OPTIONS] [--password PASSWORD] ...`   | ||||
|   Here `USER` or `PORT` might be omitted if they are `postgres` and `5432` respectively. `--password PASSWORD` and `OPTIONS` are optional.    | ||||
|   Actually, in this case `--server` and `--password` may be omitted - see below on the use of environment variables. | ||||
|  | ||||
| 2. User/host/port of PostgreSQL server is known:   | ||||
|   `$ [docker exec SIPHON_CONTAINER] als_query CMD \ `   | ||||
|   `--server [USER@]HOST[:PORT][?OPTIONS] [--password PASSWORD] ...`   | ||||
|  | ||||
| 3. `als_query.py` runs outside container, PostgreSQL runs inside container:   | ||||
|   `$ als_query.py CMD \ `   | ||||
|   `--server [USER@]^POSTGRES_CONTAINER[:PORT][?OPTIONS] \ `   | ||||
|   `[--password PASSWORD] ...`   | ||||
|   Note the `***^***` before `POSTGRES_CONTAINER`. Here, again `POSTGRES_CONTAINER` is either value from first column of `docker ps` or from last column of `docker-compose ps` for container running PostgreSQL | ||||
|  | ||||
| I expect #1 to be the common case for development environment, #2 - for deployment environment, #3 - for illustrations (for sake of brevity) or for some lucky conditions. | ||||
|  | ||||
| #### Environment variables | ||||
|  | ||||
| If `--server` parameter not specified `als_query.py` attempts to use environment variables: | ||||
|  | ||||
| - `POSTGRES_LOG_USER`, `POSTGRES_HOST`, `POSTGRES_PORT`, `POSTGRES_LOG_PASSWORD` for accessing *AFC_LOGS* database | ||||
| - `POSTGRES_ALS_USER`, `POSTGRES_HOST`, `POSTGRES_PORT`, `POSTGRES_ALS_PASSWORD` for accessing *ALS* database | ||||
|  | ||||
| These environment variables are passed to container with `als_siphon.py`, so they are quite natural choice when running `als_query` from there (case #1 above).  | ||||
|  | ||||
| Hence for case #1 `als_query.py` command line would actually look like this:   | ||||
| `$ docker exec SIPHON_CONTAINER als_query.py CMD ...`   | ||||
| Where `...` does not contain `--server` | ||||
|  | ||||
|  | ||||
| ### `log` Command <a name="als_query_log"> | ||||
|  | ||||
| `log` command retrieves JSON logs from *AFC_LOGS* database. Each JSON logs is belongs to certain *topic* (handy term, originated from Kafka). Topic is a string (***lowercase highly recommended, 'ALS' name must not be used***) that supposedly corresponds to format (content) of JSON data. | ||||
|  | ||||
| Topic specifies a name of table inside *AFC_LOGS* database. | ||||
|  | ||||
| Since content of JSON may be any and PostgreSQL already provides the special 'SELECT' syntax for accessing JSON data (see e.g. [here](https://www.postgresqltutorial.com/postgresql-tutorial/postgresql-json/) and [here](https://www.javatpoint.com/postgresql-json), google for further assistance), `log` command is, in fact, thin wrapper around `SELECT` command, plus a couple of additional options. | ||||
|  | ||||
| Each table in *AFC_LOGS* has the following columns (this is important when composing `SELECT` statements): | ||||
|  | ||||
| |Column|Content| | ||||
| |------|-------| | ||||
| |time|Time when log record was made in (includes date, time, timezone)| | ||||
| |source|Entity (e.g. WEB server) that made record| | ||||
| |log|JSON log data| | ||||
|  | ||||
| Command format:   | ||||
| `$ [docker exec SIPHON_CONTAINER] als_query.py log OPTIONS [SELECT_BODY]` | ||||
|  | ||||
| |Parameter|Meaning| | ||||
| |---------|-------| | ||||
| |--server/-s **[USER@][^]HOST_OR_CONTAINER[:PORT][?OPTIONS]**|PostgreSQL server connection parameters. See discussion in [Installing and running](#als_query_deploy) chapter. This parameter is mandatory| | ||||
| |--password **PASSWORD**|PostgreSQL connection password (if required)| | ||||
| |--topics|List existing topics (database tables)| | ||||
| |--sources **[TOPIC]**|List sources - all or from specific topic| | ||||
| |--format/-f **{bare\|json\|csv}**|Output format for SELECT-based queries: **bare** - unadorned single column output, **csv** - output as CSV table (default), **json** - output as JSON list or row dictionaries| | ||||
| |**SELECT_BODY**|SQL SELECT statement body (without leading `SELECT` and trailing `;`. May be unquoted, but most likely requires quotes because of special symbols like `*`, `>`, etc.| | ||||
|  | ||||
| #### `log` Command Examples <a name="als_query_log_examples"> | ||||
|  | ||||
| Suppose that: | ||||
|  | ||||
| - There are various topics (tables), among which there is topic *few* (let me remind again, that lowercase topic names are recommended), filled with JSONs with structure similar to this:   | ||||
| ``` | ||||
| { | ||||
|     "a": 42, | ||||
|     "b": [1, 2, 3], | ||||
|     "c": {"d": 57} | ||||
| } | ||||
| ``` | ||||
|  | ||||
| - `als_query.py` runs in `regression_als_siphon_1` container (YMMV - see output of `docker-compose ps`). In this case there is no need to pass `--server` parameter, as it will be taken from environment variables. | ||||
|  | ||||
| Now, here are some possible actions: | ||||
|  | ||||
| - List all topics:   | ||||
|   `$ docker exec regression_als_siphon_1 als_query.py log --topics`   | ||||
|   Note that there is no `--server` parameter here, as `als_query.py` would values, passed over environment variables. | ||||
|  | ||||
| - Print content of *foo* topic (table) in its entirety, using CSV format:   | ||||
|   `$ docker exec regression_als_siphon_1 als_query.py log "* from foo"`   | ||||
|   This invokes `SELECT * from foo;` on *AFC_LOGS* database of PostgreSQL server. | ||||
|  | ||||
| - Print key names of JSONs of topic *foo*:   | ||||
|   `$ docker exec regression_als_siphon_1 als_query.py log \ `   | ||||
|   `json_object_keys(log) from foo`   | ||||
|   Note that quotes may be omitted here, as there are no special symbols in select statement. | ||||
|  | ||||
| - From topic *foo* print values of *c.d* for all records, using bare (unadorned) format:    | ||||
|   `$ docker exec regression_als_siphon_1 als_query.py log \ `   | ||||
|   `-f bare "log->'c'->'d' from foo"`   | ||||
|   Note the quotes around field names | ||||
|  | ||||
| - From topic *foo* print only values of *b[0]* for all records where *a* field equals *179*:   | ||||
|   `$ docker exec regression_als_siphon_1 als_query.py log \ `   | ||||
|   `"log->'b'->0 from foo where log->'a' = 179"`   | ||||
|   Note the way list indexing is performed (`->0`). | ||||
|  | ||||
| - Print maximum value of column *a* in topic *foo*:   | ||||
|   `$ docker exec regression_als_siphon_1 als_query.py log "MAX(log->'a') from foo"`   | ||||
|  | ||||
| - Print log records in given time range:   | ||||
|   `$ docker exec regression_als_siphon_1 als_query.py log \ `   | ||||
|   `"* from foo where time > '2023-02-08 23:25:54.484174+00:00'" \ `   | ||||
|   `"and time < '2023-02-08 23:28:54.484174+00:00'"` | ||||
							
								
								
									
										290
									
								
								als/als_db_schema/ALS.dbml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										290
									
								
								als/als_db_schema/ALS.dbml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,290 @@ | ||||
| // Copyright (C) 2022 Broadcom. All rights reserved. | ||||
| // The term "Broadcom" refers solely to the Broadcom Inc. corporate affiliate | ||||
| // that owns the software below. | ||||
| // This work is licensed under the OpenAFC Project License, a copy of which is | ||||
| // included with this software program. | ||||
|  | ||||
| // Schema of AFC Request/response/config log database | ||||
| // This file is a DBML source code for visualizing database scheme in dbdiagram.io | ||||
|  | ||||
| // Table, containing a record for each request messgae/response message/config(s) set | ||||
| table afc_message [headercolor: #000000, note: 'AFC Request/Response message pair (contain individual requests/responses)'] { | ||||
|   message_id bigserial | ||||
|   month_idx smallint | ||||
|   afc_server serial | ||||
|   rx_time timestamptz | ||||
|   tx_time timestamptz | ||||
|   rx_envelope_digest uuid [note: 'Envelope of AFC Request message'] | ||||
|   tx_envelope_digest uuid [note: 'Envelope of AFC Response message'] | ||||
|  | ||||
|   indexes { | ||||
|     (message_id, month_idx) [pk] | ||||
|     rx_time | ||||
|     tx_time | ||||
|   } | ||||
| } | ||||
|  | ||||
| Ref afc_message_afc_server_ref: afc_message.(afc_server, month_idx) > afc_server.(afc_server_id, month_idx) | ||||
| Ref afc_message_rx_envelope_digest_ref: afc_message.(rx_envelope_digest, month_idx) > rx_envelope.(rx_envelope_digest, month_idx) | ||||
| Ref afc_message_tx_envelope_digest_ref: afc_message.(tx_envelope_digest, month_idx) > tx_envelope.(tx_envelope_digest, month_idx) | ||||
|  | ||||
| // Outer part of request message | ||||
| table rx_envelope [headercolor: #4B82B0, note: 'Envelope (constant part) of AFC Request Message'] { | ||||
|   rx_envelope_digest uuid [note: 'MD5 of envelope_json field in UTF8 encoding'] | ||||
|   month_idx smallint | ||||
|   envelope_json json [note: 'AFC Request JSON with empty availableSpectrumInquiryRequests field'] | ||||
|  | ||||
|   indexes { | ||||
|     (rx_envelope_digest, month_idx) [pk] | ||||
|     rx_envelope_digest [type: hash] | ||||
|   } | ||||
| } | ||||
|  | ||||
| // Outer part of response message | ||||
| table tx_envelope [headercolor: #4B82B0, note: 'Envelope (constant part) of AFC Response Message'] { | ||||
|   tx_envelope_digest uuid [note: 'MD5 of envelope_json field in UTF8 encoding'] | ||||
|   month_idx smallint | ||||
|   envelope_json json [note: 'AFC Response JSON with empty availableSpectrumInquiryRequests field'] | ||||
|  | ||||
|   indexes { | ||||
|     (tx_envelope_digest, month_idx) [pk] | ||||
|     tx_envelope_digest [type: hash] | ||||
|   } | ||||
| } | ||||
|  | ||||
| // Join table between message table (afc_message) and request/response/config table (request_response) | ||||
| // Implement smany-to-many relationship, contains variable part of request/response | ||||
| table request_response_in_message [headercolor: #4B82B0, note: 'Associative table for relatonship between AFC Request/Response messages and individual requests/responses. Also encapsulates variable part of requests/responses'] { | ||||
|   message_id bigint [note: 'AFC request/response message pair this request/response belongs'] | ||||
|   request_id text [note: 'ID of request/response within message'] | ||||
|   month_idx smallint | ||||
|   request_response_digest uuid [note: 'Reference to otentially constant part of request/response'] | ||||
|   expire_time timestamptz [note: 'Response expiration time'] | ||||
|  | ||||
|   indexes { | ||||
|     (message_id, request_id, month_idx) [pk] | ||||
|     request_id | ||||
|     request_response_digest | ||||
|     expire_time | ||||
|   } | ||||
| } | ||||
|  | ||||
| Ref request_response_in_message_message_id_ref: request_response_in_message.(message_id, month_idx) > afc_message.(message_id, month_idx) | ||||
| Ref request_response_in_message_request_response_digest_ref: request_response_in_message.(request_response_digest, month_idx) > request_response.(request_response_digest, month_idx) | ||||
|  | ||||
| // Request/response/config - constant part | ||||
| table request_response [headercolor: #2D6512, note: 'Potentiially constant part of request/response'] { | ||||
|   request_response_digest uuid [note: 'MD5 computed over request/response with requestId and availabilityExpireTime fields set to empty'] | ||||
|   month_idx smallint | ||||
|   afc_config_text_digest uuid [note: 'MD5 over used AFC Config text represnetation'] | ||||
|   customer_id integer [note: 'AP vendor'] | ||||
|   uls_data_version_id integer [note: 'Version of used ULS data'] | ||||
|   geo_data_version_id integer [note: 'Version of used geospatial data'] | ||||
|   request_json_digest uuid [note: 'MD5 of request JSON with empty requestId'] | ||||
|   response_json_digest uuid [note: 'MD5 of resaponse JSON with empty requesatId and availabilityExpireTime'] | ||||
|   device_descriptor_digest uuid [note: 'MD5 of device descriptor (AP) related part of request JSON'] | ||||
|   location_digest uuid [note: 'MD5 of location-related part of request JSON'] | ||||
|   response_code int | ||||
|   response_description text [note: 'Optional response code short description. Null for success'] | ||||
|   response_data text [note: 'Optional supplemental failure information. Optional comma-separated list of missing/invalid/unexpected parameters, etc.'] | ||||
|  | ||||
|   indexes { | ||||
|     (request_response_digest, month_idx) [pk] | ||||
|     request_response_digest [type: hash] | ||||
|     afc_config_text_digest | ||||
|     customer_id | ||||
|     device_descriptor_digest | ||||
|     location_digest | ||||
|     response_code | ||||
|     response_description | ||||
|     response_data | ||||
|   } | ||||
| } | ||||
|  | ||||
| Ref request_response_afc_config_text_digest_ref: request_response.(afc_config_text_digest, month_idx) > afc_config.(afc_config_text_digest, month_idx) | ||||
| Ref request_response_customer_id_ref: request_response.(customer_id, month_idx) > customer.(customer_id, month_idx) | ||||
| Ref request_response_uls_data_version_id_ref: request_response.(uls_data_version_id, month_idx) > uls_data_version.(uls_data_version_id, month_idx) | ||||
| Ref request_response_geo_data_version_id_ref: request_response.(geo_data_version_id, month_idx) > geo_data_version.(geo_data_version_id, month_idx) | ||||
| Ref request_response_request_json_digest_ref: request_response.(request_json_digest, month_idx) > compressed_json.(compressed_json_digest, month_idx) | ||||
| Ref request_response_response_json_digest_ref: request_response.(response_json_digest, month_idx) > compressed_json.(compressed_json_digest, month_idx) | ||||
| Ref request_response_device_descriptor_digest_ref: request_response.(device_descriptor_digest, month_idx) > device_descriptor.(device_descriptor_digest, month_idx) | ||||
| Ref request_response_location_digest_ref: request_response.(location_digest, month_idx) > location.(location_digest, month_idx) | ||||
|  | ||||
| // AP device descriptor | ||||
| table device_descriptor [headercolor: #2D6512, note: 'Information about device (e.g. AP)'] { | ||||
|   device_descriptor_digest uuid [note: 'MD5 over parts of requesat JSON pertinent to AP'] | ||||
|   month_idx smallint | ||||
|   serial_number text [note: 'AP serial number'] | ||||
|   certifications_digest uuid [note: 'Device certifications'] | ||||
|  | ||||
|   indexes { | ||||
|     (device_descriptor_digest, month_idx) [pk] | ||||
|     device_descriptor_digest [type: hash] | ||||
|     serial_number | ||||
|     certifications_digest | ||||
|   } | ||||
| } | ||||
|  | ||||
| Ref device_descriptor_certifications_digest_ref: device_descriptor.(certifications_digest, month_idx) <> certification.(certifications_digest, month_idx) | ||||
|  | ||||
| // Single certification | ||||
| table certification [headercolor: #79AD51, note: 'Element of certifications list'] { | ||||
|   certifications_digest uuid [note: 'MD5 of certification list json'] | ||||
|   certification_index smallint [note: 'Index in certification list'] | ||||
|   month_idx smallint | ||||
|   ruleset_id text [note: 'Name of rules for which AP certified (equivalent of region)'] | ||||
|   certification_id text [note: 'ID of certification (equivalent of manufacturer)'] | ||||
|  | ||||
|   indexes { | ||||
|     (certifications_digest, certification_index, month_idx) [pk] | ||||
|     certifications_digest [type: hash] | ||||
|     ruleset_id | ||||
|     certification_id | ||||
|   } | ||||
| } | ||||
|  | ||||
| // Compressed text of constant part of request or response | ||||
| table compressed_json [headercolor: #2D6512, note: 'Compressed body of request or response'] { | ||||
|   compressed_json_digest uuid [note: 'MD5 hash of compressed data'] | ||||
|   month_idx smallint | ||||
|   compressed_json_data bytea [note: 'Compressed data'] | ||||
|  | ||||
|   indexes { | ||||
|     (compressed_json_digest, month_idx) [pk] | ||||
|     compressed_json_digest [type: hash] | ||||
|   } | ||||
| } | ||||
|  | ||||
| // Customer information | ||||
| table customer [headercolor: #79AD51, note: 'Customer aka vendor aka user'] { | ||||
|   customer_id serial | ||||
|   month_idx smallint | ||||
|   customer_name text [note: 'Its name'] | ||||
|  | ||||
|   indexes { | ||||
|     (customer_id, month_idx) [pk] | ||||
|     customer_name | ||||
|   } | ||||
| } | ||||
|  | ||||
| // AP location information | ||||
| table location [headercolor: #2D6512, note: 'AP location'] { | ||||
|   location_digest uuid [note: 'MD5 computed over location part of request JSON'] | ||||
|   month_idx smallint | ||||
|   location_wgs84 geography(POINT,4326) [note: 'AP area center (WGS84 coordinates)'] | ||||
|   location_uncertainty_m real [note: 'Radius of AP uncertainty area in meters'] | ||||
|   location_type text [note: 'Ellipse/LinearPolygon/RadialPolygon'] | ||||
|   deployment_type int [note: '0/1/2 for unknown/indoor/outdoor'] | ||||
|   height_m real [note: 'AP elevation in meters'] | ||||
|   height_uncertainty_m real [note: 'Elevation uncertainty in meters'] | ||||
|   height_type text [note: 'Elevation type'] | ||||
|  | ||||
|   indexes { | ||||
|     (location_digest, month_idx) [pk] | ||||
|     location_digest [type: hash] | ||||
|     location_wgs84 | ||||
|     location_type | ||||
|     height_m | ||||
|     height_type | ||||
|   } | ||||
| } | ||||
|  | ||||
| // AFC Config | ||||
| table afc_config [headercolor: #79AD51, note: 'AFC Config'] { | ||||
|   afc_config_text_digest uuid [note: 'MD5 computed over text representation'] | ||||
|   month_idx smallint | ||||
|   afc_config_text text [note: 'Text representation of AFC Config'] | ||||
|   afc_config_json json [note: 'JSON representation of AFC Config'] | ||||
|  | ||||
|   indexes { | ||||
|     (afc_config_text_digest, month_idx) [pk] | ||||
|     afc_config_text_digest [type: hash] | ||||
|   } | ||||
| } | ||||
|  | ||||
| // Geodetic data version | ||||
| table geo_data_version [headercolor: #79AD51, note: 'Version of geospatial data'] { | ||||
|   geo_data_version_id serial | ||||
|   month_idx smallint | ||||
|   geo_data_version text | ||||
|  | ||||
|   indexes { | ||||
|     (geo_data_version_id, month_idx) [pk] | ||||
|     (geo_data_version, month_idx) [unique] | ||||
|     geo_data_version | ||||
|   } | ||||
| } | ||||
|  | ||||
| // ULS data version | ||||
| table uls_data_version [headercolor: #79AD51, note: "Version of ULS data'"] { | ||||
|   uls_data_version_id serial | ||||
|   month_idx smallint | ||||
|   uls_data_version text | ||||
|  | ||||
|   indexes { | ||||
|     (uls_data_version_id, month_idx) [pk] | ||||
|     (uls_data_version, month_idx) [unique] | ||||
|     uls_data_version | ||||
|   } | ||||
| } | ||||
|  | ||||
| // PSD result | ||||
| table max_psd [headercolor: #990D0D, note: 'PSD result'] { | ||||
|   request_response_digest uuid [note: 'Request this result belongs to'] | ||||
|   month_idx smallint | ||||
|   low_frequency_mhz smallint | ||||
|   high_frequency_mhz smallint | ||||
|   max_psd_dbm_mhz real | ||||
|  | ||||
|   indexes { | ||||
|     (request_response_digest, month_idx, low_frequency_mhz, high_frequency_mhz) [pk] | ||||
|     request_response_digest [type: hash] | ||||
|     low_frequency_mhz | ||||
|     high_frequency_mhz | ||||
|     max_psd_dbm_mhz | ||||
|   } | ||||
| } | ||||
|  | ||||
| Ref max_psd_request_response_digest_ref: max_psd.(request_response_digest, month_idx) > request_response.(request_response_digest, month_idx) | ||||
|  | ||||
| // EIRP result | ||||
| table max_eirp [headercolor: #990D0D, note: 'EIRP result'] { | ||||
|   request_response_digest uuid [note: 'Request this result belongs to'] | ||||
|   month_idx smallint | ||||
|   op_class smallint | ||||
|   channel smallint | ||||
|   max_eirp_dbm real | ||||
|  | ||||
|   indexes { | ||||
|     (request_response_digest, month_idx, op_class, channel) [pk] | ||||
|     request_response_digest [type: hash] | ||||
|     op_class | ||||
|     channel | ||||
|     max_eirp_dbm | ||||
|   } | ||||
| } | ||||
|  | ||||
| Ref max_eirp_request_response_digest_ref: max_eirp.(request_response_digest, month_idx) > request_response.(request_response_digest, month_idx) | ||||
|  | ||||
| // AFC Server | ||||
| table afc_server [headercolor: #4B82B0] { | ||||
|   afc_server_id serial | ||||
|   month_idx smallint | ||||
|   afc_server_name text | ||||
|  | ||||
|   indexes { | ||||
|     (afc_server_id, month_idx) [pk] | ||||
|     (afc_server_name, month_idx) [unique] | ||||
|     afc_server_name | ||||
|   } | ||||
| } | ||||
|  | ||||
| // Message decoding problems | ||||
| table decode_error { | ||||
|   id bigserial [pk] | ||||
|   time timestamptz | ||||
|   msg text | ||||
|   code_line integer | ||||
|   data text | ||||
|   month_idx smallint | ||||
| } | ||||
							
								
								
									
										
											BIN
										
									
								
								als/als_db_schema/ALS.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								als/als_db_schema/ALS.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 235 KiB | 
							
								
								
									
										397
									
								
								als/als_db_schema/ALS_raw.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										397
									
								
								als/als_db_schema/ALS_raw.sql
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,397 @@ | ||||
| CREATE TABLE "afc_message" ( | ||||
|   "message_id" bigserial, | ||||
|   "month_idx" smallint, | ||||
|   "afc_server" serial, | ||||
|   "rx_time" timestamptz, | ||||
|   "tx_time" timestamptz, | ||||
|   "rx_envelope_digest" uuid, | ||||
|   "tx_envelope_digest" uuid, | ||||
|   PRIMARY KEY ("message_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "rx_envelope" ( | ||||
|   "rx_envelope_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "envelope_json" json, | ||||
|   PRIMARY KEY ("rx_envelope_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "tx_envelope" ( | ||||
|   "tx_envelope_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "envelope_json" json, | ||||
|   PRIMARY KEY ("tx_envelope_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "request_response_in_message" ( | ||||
|   "message_id" bigint, | ||||
|   "request_id" text, | ||||
|   "month_idx" smallint, | ||||
|   "request_response_digest" uuid, | ||||
|   "expire_time" timestamptz, | ||||
|   PRIMARY KEY ("message_id", "request_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "request_response" ( | ||||
|   "request_response_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "afc_config_text_digest" uuid, | ||||
|   "customer_id" integer, | ||||
|   "uls_data_version_id" integer, | ||||
|   "geo_data_version_id" integer, | ||||
|   "request_json_digest" uuid, | ||||
|   "response_json_digest" uuid, | ||||
|   "device_descriptor_digest" uuid, | ||||
|   "location_digest" uuid, | ||||
|   "response_code" int, | ||||
|   "response_description" text, | ||||
|   "response_data" text, | ||||
|   PRIMARY KEY ("request_response_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "device_descriptor" ( | ||||
|   "device_descriptor_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "serial_number" text, | ||||
|   "certifications_digest" uuid, | ||||
|   PRIMARY KEY ("device_descriptor_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "certification" ( | ||||
|   "certifications_digest" uuid, | ||||
|   "certification_index" smallint, | ||||
|   "month_idx" smallint, | ||||
|   "ruleset_id" text, | ||||
|   "certification_id" text, | ||||
|   PRIMARY KEY ("certifications_digest", "certification_index", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "compressed_json" ( | ||||
|   "compressed_json_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "compressed_json_data" bytea, | ||||
|   PRIMARY KEY ("compressed_json_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "customer" ( | ||||
|   "customer_id" serial, | ||||
|   "month_idx" smallint, | ||||
|   "customer_name" text, | ||||
|   PRIMARY KEY ("customer_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "location" ( | ||||
|   "location_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "location_wgs84" geography(POINT,4326), | ||||
|   "location_uncertainty_m" real, | ||||
|   "location_type" text, | ||||
|   "deployment_type" int, | ||||
|   "height_m" real, | ||||
|   "height_uncertainty_m" real, | ||||
|   "height_type" text, | ||||
|   PRIMARY KEY ("location_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "afc_config" ( | ||||
|   "afc_config_text_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "afc_config_text" text, | ||||
|   "afc_config_json" json, | ||||
|   PRIMARY KEY ("afc_config_text_digest", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "geo_data_version" ( | ||||
|   "geo_data_version_id" serial, | ||||
|   "month_idx" smallint, | ||||
|   "geo_data_version" text, | ||||
|   PRIMARY KEY ("geo_data_version_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "uls_data_version" ( | ||||
|   "uls_data_version_id" serial, | ||||
|   "month_idx" smallint, | ||||
|   "uls_data_version" text, | ||||
|   PRIMARY KEY ("uls_data_version_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "max_psd" ( | ||||
|   "request_response_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "low_frequency_mhz" smallint, | ||||
|   "high_frequency_mhz" smallint, | ||||
|   "max_psd_dbm_mhz" real, | ||||
|   PRIMARY KEY ("request_response_digest", "month_idx", "low_frequency_mhz", "high_frequency_mhz") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "max_eirp" ( | ||||
|   "request_response_digest" uuid, | ||||
|   "month_idx" smallint, | ||||
|   "op_class" smallint, | ||||
|   "channel" smallint, | ||||
|   "max_eirp_dbm" real, | ||||
|   PRIMARY KEY ("request_response_digest", "month_idx", "op_class", "channel") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "afc_server" ( | ||||
|   "afc_server_id" serial, | ||||
|   "month_idx" smallint, | ||||
|   "afc_server_name" text, | ||||
|   PRIMARY KEY ("afc_server_id", "month_idx") | ||||
| ); | ||||
|  | ||||
| CREATE TABLE "decode_error" ( | ||||
|   "id" bigserial PRIMARY KEY, | ||||
|   "time" timestamptz, | ||||
|   "msg" text, | ||||
|   "code_line" integer, | ||||
|   "data" text, | ||||
|   "month_idx" smallint | ||||
| ); | ||||
|  | ||||
| CREATE INDEX ON "afc_message" ("rx_time"); | ||||
|  | ||||
| CREATE INDEX ON "afc_message" ("tx_time"); | ||||
|  | ||||
| CREATE INDEX ON "rx_envelope" USING HASH ("rx_envelope_digest"); | ||||
|  | ||||
| CREATE INDEX ON "tx_envelope" USING HASH ("tx_envelope_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response_in_message" ("request_id"); | ||||
|  | ||||
| CREATE INDEX ON "request_response_in_message" ("request_response_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response_in_message" ("expire_time"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" USING HASH ("request_response_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("afc_config_text_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("customer_id"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("device_descriptor_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("location_digest"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("response_code"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("response_description"); | ||||
|  | ||||
| CREATE INDEX ON "request_response" ("response_data"); | ||||
|  | ||||
| CREATE INDEX ON "device_descriptor" USING HASH ("device_descriptor_digest"); | ||||
|  | ||||
| CREATE INDEX ON "device_descriptor" ("serial_number"); | ||||
|  | ||||
| CREATE INDEX ON "device_descriptor" ("certifications_digest"); | ||||
|  | ||||
| CREATE INDEX ON "certification" USING HASH ("certifications_digest"); | ||||
|  | ||||
| CREATE INDEX ON "certification" ("ruleset_id"); | ||||
|  | ||||
| CREATE INDEX ON "certification" ("certification_id"); | ||||
|  | ||||
| CREATE INDEX ON "compressed_json" USING HASH ("compressed_json_digest"); | ||||
|  | ||||
| CREATE INDEX ON "customer" ("customer_name"); | ||||
|  | ||||
| CREATE INDEX ON "location" USING HASH ("location_digest"); | ||||
|  | ||||
| CREATE INDEX ON "location" ("location_wgs84"); | ||||
|  | ||||
| CREATE INDEX ON "location" ("location_type"); | ||||
|  | ||||
| CREATE INDEX ON "location" ("height_m"); | ||||
|  | ||||
| CREATE INDEX ON "location" ("height_type"); | ||||
|  | ||||
| CREATE INDEX ON "afc_config" USING HASH ("afc_config_text_digest"); | ||||
|  | ||||
| CREATE UNIQUE INDEX ON "geo_data_version" ("geo_data_version", "month_idx"); | ||||
|  | ||||
| CREATE INDEX ON "geo_data_version" ("geo_data_version"); | ||||
|  | ||||
| CREATE UNIQUE INDEX ON "uls_data_version" ("uls_data_version", "month_idx"); | ||||
|  | ||||
| CREATE INDEX ON "uls_data_version" ("uls_data_version"); | ||||
|  | ||||
| CREATE INDEX ON "max_psd" USING HASH ("request_response_digest"); | ||||
|  | ||||
| CREATE INDEX ON "max_psd" ("low_frequency_mhz"); | ||||
|  | ||||
| CREATE INDEX ON "max_psd" ("high_frequency_mhz"); | ||||
|  | ||||
| CREATE INDEX ON "max_psd" ("max_psd_dbm_mhz"); | ||||
|  | ||||
| CREATE INDEX ON "max_eirp" USING HASH ("request_response_digest"); | ||||
|  | ||||
| CREATE INDEX ON "max_eirp" ("op_class"); | ||||
|  | ||||
| CREATE INDEX ON "max_eirp" ("channel"); | ||||
|  | ||||
| CREATE INDEX ON "max_eirp" ("max_eirp_dbm"); | ||||
|  | ||||
| CREATE UNIQUE INDEX ON "afc_server" ("afc_server_name", "month_idx"); | ||||
|  | ||||
| CREATE INDEX ON "afc_server" ("afc_server_name"); | ||||
|  | ||||
| COMMENT ON TABLE "afc_message" IS 'AFC Request/Response message pair (contain individual requests/responses)'; | ||||
|  | ||||
| COMMENT ON COLUMN "afc_message"."rx_envelope_digest" IS 'Envelope of AFC Request message'; | ||||
|  | ||||
| COMMENT ON COLUMN "afc_message"."tx_envelope_digest" IS 'Envelope of AFC Response message'; | ||||
|  | ||||
| COMMENT ON TABLE "rx_envelope" IS 'Envelope (constant part) of AFC Request Message'; | ||||
|  | ||||
| COMMENT ON COLUMN "rx_envelope"."rx_envelope_digest" IS 'MD5 of envelope_json field in UTF8 encoding'; | ||||
|  | ||||
| COMMENT ON COLUMN "rx_envelope"."envelope_json" IS 'AFC Request JSON with empty availableSpectrumInquiryRequests field'; | ||||
|  | ||||
| COMMENT ON TABLE "tx_envelope" IS 'Envelope (constant part) of AFC Response Message'; | ||||
|  | ||||
| COMMENT ON COLUMN "tx_envelope"."tx_envelope_digest" IS 'MD5 of envelope_json field in UTF8 encoding'; | ||||
|  | ||||
| COMMENT ON COLUMN "tx_envelope"."envelope_json" IS 'AFC Response JSON with empty availableSpectrumInquiryRequests field'; | ||||
|  | ||||
| COMMENT ON TABLE "request_response_in_message" IS 'Associative table for relatonship between AFC Request/Response messages and individual requests/responses. Also encapsulates variable part of requests/responses'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response_in_message"."message_id" IS 'AFC request/response message pair this request/response belongs'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response_in_message"."request_id" IS 'ID of request/response within message'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response_in_message"."request_response_digest" IS 'Reference to otentially constant part of request/response'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response_in_message"."expire_time" IS 'Response expiration time'; | ||||
|  | ||||
| COMMENT ON TABLE "request_response" IS 'Potentiially constant part of request/response'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."request_response_digest" IS 'MD5 computed over request/response with requestId and availabilityExpireTime fields set to empty'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."afc_config_text_digest" IS 'MD5 over used AFC Config text represnetation'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."customer_id" IS 'AP vendor'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."uls_data_version_id" IS 'Version of used ULS data'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."geo_data_version_id" IS 'Version of used geospatial data'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."request_json_digest" IS 'MD5 of request JSON with empty requestId'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."response_json_digest" IS 'MD5 of resaponse JSON with empty requesatId and availabilityExpireTime'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."device_descriptor_digest" IS 'MD5 of device descriptor (AP) related part of request JSON'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."location_digest" IS 'MD5 of location-related part of request JSON'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."response_description" IS 'Optional response code short description. Null for success'; | ||||
|  | ||||
| COMMENT ON COLUMN "request_response"."response_data" IS 'Optional supplemental failure information. Optional comma-separated list of missing/invalid/unexpected parameters, etc.'; | ||||
|  | ||||
| COMMENT ON TABLE "device_descriptor" IS 'Information about device (e.g. AP)'; | ||||
|  | ||||
| COMMENT ON COLUMN "device_descriptor"."device_descriptor_digest" IS 'MD5 over parts of requesat JSON pertinent to AP'; | ||||
|  | ||||
| COMMENT ON COLUMN "device_descriptor"."serial_number" IS 'AP serial number'; | ||||
|  | ||||
| COMMENT ON COLUMN "device_descriptor"."certifications_digest" IS 'Device certifications'; | ||||
|  | ||||
| COMMENT ON TABLE "certification" IS 'Element of certifications list'; | ||||
|  | ||||
| COMMENT ON COLUMN "certification"."certifications_digest" IS 'MD5 of certification list json'; | ||||
|  | ||||
| COMMENT ON COLUMN "certification"."certification_index" IS 'Index in certification list'; | ||||
|  | ||||
| COMMENT ON COLUMN "certification"."ruleset_id" IS 'Name of rules for which AP certified (equivalent of region)'; | ||||
|  | ||||
| COMMENT ON COLUMN "certification"."certification_id" IS 'ID of certification (equivalent of manufacturer)'; | ||||
|  | ||||
| COMMENT ON TABLE "compressed_json" IS 'Compressed body of request or response'; | ||||
|  | ||||
| COMMENT ON COLUMN "compressed_json"."compressed_json_digest" IS 'MD5 hash of compressed data'; | ||||
|  | ||||
| COMMENT ON COLUMN "compressed_json"."compressed_json_data" IS 'Compressed data'; | ||||
|  | ||||
| COMMENT ON TABLE "customer" IS 'Customer aka vendor aka user'; | ||||
|  | ||||
| COMMENT ON COLUMN "customer"."customer_name" IS 'Its name'; | ||||
|  | ||||
| COMMENT ON TABLE "location" IS 'AP location'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."location_digest" IS 'MD5 computed over location part of request JSON'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."location_wgs84" IS 'AP area center (WGS84 coordinates)'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."location_uncertainty_m" IS 'Radius of AP uncertainty area in meters'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."location_type" IS 'Ellipse/LinearPolygon/RadialPolygon'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."deployment_type" IS '0/1/2 for unknown/indoor/outdoor'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."height_m" IS 'AP elevation in meters'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."height_uncertainty_m" IS 'Elevation uncertainty in meters'; | ||||
|  | ||||
| COMMENT ON COLUMN "location"."height_type" IS 'Elevation type'; | ||||
|  | ||||
| COMMENT ON TABLE "afc_config" IS 'AFC Config'; | ||||
|  | ||||
| COMMENT ON COLUMN "afc_config"."afc_config_text_digest" IS 'MD5 computed over text representation'; | ||||
|  | ||||
| COMMENT ON COLUMN "afc_config"."afc_config_text" IS 'Text representation of AFC Config'; | ||||
|  | ||||
| COMMENT ON COLUMN "afc_config"."afc_config_json" IS 'JSON representation of AFC Config'; | ||||
|  | ||||
| COMMENT ON TABLE "geo_data_version" IS 'Version of geospatial data'; | ||||
|  | ||||
| COMMENT ON TABLE "uls_data_version" IS 'Version of ULS data"'; | ||||
|  | ||||
| COMMENT ON TABLE "max_psd" IS 'PSD result'; | ||||
|  | ||||
| COMMENT ON COLUMN "max_psd"."request_response_digest" IS 'Request this result belongs to'; | ||||
|  | ||||
| COMMENT ON TABLE "max_eirp" IS 'EIRP result'; | ||||
|  | ||||
| COMMENT ON COLUMN "max_eirp"."request_response_digest" IS 'Request this result belongs to'; | ||||
|  | ||||
| ALTER TABLE "afc_message" ADD CONSTRAINT "afc_message_afc_server_ref" FOREIGN KEY ("afc_server", "month_idx") REFERENCES "afc_server" ("afc_server_id", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "afc_message" ADD CONSTRAINT "afc_message_rx_envelope_digest_ref" FOREIGN KEY ("rx_envelope_digest", "month_idx") REFERENCES "rx_envelope" ("rx_envelope_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "afc_message" ADD CONSTRAINT "afc_message_tx_envelope_digest_ref" FOREIGN KEY ("tx_envelope_digest", "month_idx") REFERENCES "tx_envelope" ("tx_envelope_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response_in_message" ADD CONSTRAINT "request_response_in_message_message_id_ref" FOREIGN KEY ("message_id", "month_idx") REFERENCES "afc_message" ("message_id", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response_in_message" ADD CONSTRAINT "request_response_in_message_request_response_digest_ref" FOREIGN KEY ("request_response_digest", "month_idx") REFERENCES "request_response" ("request_response_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_afc_config_text_digest_ref" FOREIGN KEY ("afc_config_text_digest", "month_idx") REFERENCES "afc_config" ("afc_config_text_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_customer_id_ref" FOREIGN KEY ("customer_id", "month_idx") REFERENCES "customer" ("customer_id", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_uls_data_version_id_ref" FOREIGN KEY ("uls_data_version_id", "month_idx") REFERENCES "uls_data_version" ("uls_data_version_id", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_geo_data_version_id_ref" FOREIGN KEY ("geo_data_version_id", "month_idx") REFERENCES "geo_data_version" ("geo_data_version_id", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_request_json_digest_ref" FOREIGN KEY ("request_json_digest", "month_idx") REFERENCES "compressed_json" ("compressed_json_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_response_json_digest_ref" FOREIGN KEY ("response_json_digest", "month_idx") REFERENCES "compressed_json" ("compressed_json_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_device_descriptor_digest_ref" FOREIGN KEY ("device_descriptor_digest", "month_idx") REFERENCES "device_descriptor" ("device_descriptor_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "request_response" ADD CONSTRAINT "request_response_location_digest_ref" FOREIGN KEY ("location_digest", "month_idx") REFERENCES "location" ("location_digest", "month_idx"); | ||||
|  | ||||
| CREATE TABLE "device_descriptor_certification" ( | ||||
|   "device_descriptor_certifications_digest" uuid, | ||||
|   "device_descriptor_month_idx" smallint, | ||||
|   "certification_certifications_digest" uuid, | ||||
|   "certification_month_idx" smallint, | ||||
|   PRIMARY KEY ("device_descriptor_certifications_digest", "device_descriptor_month_idx", "certification_certifications_digest", "certification_month_idx") | ||||
| ); | ||||
|  | ||||
| ALTER TABLE "device_descriptor_certification" ADD FOREIGN KEY ("device_descriptor_certifications_digest", "device_descriptor_month_idx") REFERENCES "device_descriptor" ("certifications_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "device_descriptor_certification" ADD FOREIGN KEY ("certification_certifications_digest", "certification_month_idx") REFERENCES "certification" ("certifications_digest", "month_idx"); | ||||
|  | ||||
|  | ||||
| ALTER TABLE "max_psd" ADD CONSTRAINT "max_psd_request_response_digest_ref" FOREIGN KEY ("request_response_digest", "month_idx") REFERENCES "request_response" ("request_response_digest", "month_idx"); | ||||
|  | ||||
| ALTER TABLE "max_eirp" ADD CONSTRAINT "max_eirp_request_response_digest_ref" FOREIGN KEY ("request_response_digest", "month_idx") REFERENCES "request_response" ("request_response_digest", "month_idx"); | ||||
							
								
								
									
										35
									
								
								als/als_db_schema/als_rectifier.awk
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								als/als_db_schema/als_rectifier.awk
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | ||||
| #!/bin/awk -f | ||||
|  | ||||
| # Copyright (C) 2022 Broadcom. All rights reserved. | ||||
| # The term "Broadcom" refers solely to the Broadcom Inc. corporate affiliate | ||||
| # that owns the software below. | ||||
| # This work is licensed under the OpenAFC Project License, a copy of which is | ||||
| # included with this software program. | ||||
|  | ||||
| BEGIN { | ||||
| 	print "/*" | ||||
| 	print " * Copyright (C) 2022 Broadcom. All rights reserved." | ||||
| 	print " * The term "Broadcom" refers solely to the Broadcom Inc. corporate affiliate" | ||||
| 	print " * that owns the software below." | ||||
| 	print " * This work is licensed under the OpenAFC Project License, a copy of which is" | ||||
| 	print " * included with this software program." | ||||
| 	print " *" | ||||
| 	print " * This file creates ALS (AFC Request/Response/Config Logging System) database on PostgreSQL+PostGIS server" | ||||
| 	print " * This file is generated, direct editing is not recommended." | ||||
| 	print " * Intended maintenance sequence is as follows:" | ||||
| 	print " *   1. Load (copypaste) als_db_schema/ALS.dbml into dbdiagram.io" | ||||
| 	print " *   2. Modify as needed" | ||||
| 	print " *   3. Save (copypaste) modified sources back to als_db_schema/ALS.dbml" | ||||
| 	print " *   4. Also export schema in PostgreSQL format as als_db_schema/ALS_raw.sql" | ||||
| 	print " *   5. Rectify exported schema with als_rectifier.awk (awk -f als_db_schema/als_rectifier.awk < als_db_schema/ALS_raw.sql > ALS.sql)" | ||||
| 	print " */" | ||||
| 	print "" | ||||
| 	print "CREATE EXTENSION postgis;" | ||||
| 	print "" | ||||
| 	RS=ORS=";" | ||||
| } | ||||
|  | ||||
| /\w+ TABLE \"device_descriptor_certification\"/ {next} | ||||
| /\w+ TABLE \"device_descriptor_regulatory_rule\"/ {next} | ||||
|  | ||||
| { print } | ||||
							
								
								
									
										324
									
								
								als/als_query.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										324
									
								
								als/als_query.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,324 @@ | ||||
| #!/usr/bin/env python3 | ||||
| # Tool for querying ALS database | ||||
|  | ||||
| # Copyright (C) 2022 Broadcom. All rights reserved. | ||||
| # The term "Broadcom" refers solely to the Broadcom Inc. corporate affiliate | ||||
| # that owns the software below. | ||||
| # This work is licensed under the OpenAFC Project License, a copy of which is | ||||
| # included with this software program. | ||||
|  | ||||
| import argparse | ||||
| import csv | ||||
| import datetime | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| import psycopg2 | ||||
| import re | ||||
| import sqlalchemy as sa | ||||
| import sqlalchemy.dialects.postgresql as sa_pg | ||||
| import subprocess | ||||
| import sys | ||||
| from typing import Any, List, NamedTuple, Optional, Set | ||||
|  | ||||
| try: | ||||
|     import geoalchemy2 as ga                        # type: ignore | ||||
| except ImportError: | ||||
|     pass | ||||
|  | ||||
| VERSION = "0.1" | ||||
|  | ||||
| DEFAULT_USER = "postgres" | ||||
| DEFAULT_PORT = 5432 | ||||
|  | ||||
| ALS_DB = "ALS" | ||||
| LOG_DB = "AFC_LOGS" | ||||
|  | ||||
| # Environment variables holding parts of database connection string | ||||
| DbEnv = \ | ||||
|     NamedTuple( | ||||
|         "DbEnv", | ||||
|         [ | ||||
|             # Host name | ||||
|             ("host", str), | ||||
|             # Port | ||||
|             ("port", str), | ||||
|             # Username | ||||
|             ("user", str), | ||||
|             # Password | ||||
|             ("password", str), | ||||
|             # Options | ||||
|             ("options", str)]) | ||||
|  | ||||
| # Environment variable names for ALS and JSON log databases' connection strings | ||||
| DB_ENVS = {ALS_DB: | ||||
|            DbEnv(host="POSTGRES_HOST", port="POSTGRES_PORT", | ||||
|                  user="POSTGRES_ALS_USER", password="POSTGRES_ALS_PASSWORD", | ||||
|                  options="POSTGRES_ALS_OPTIONS"), | ||||
|            LOG_DB: | ||||
|            DbEnv(host="POSTGRES_HOST", port="POSTGRES_PORT", | ||||
|                  user="POSTGRES_LOG_USER", password="POSTGRES_LOG_PASSWORD", | ||||
|                  options="POSTGRES_LOG_OPTIONS")} | ||||
|  | ||||
|  | ||||
| def error(msg: str) -> None: | ||||
|     """ Prints given msg as error message and exit abnormally """ | ||||
|     logging.error(msg) | ||||
|     sys.exit(1) | ||||
|  | ||||
|  | ||||
| def error_if(cond: Any, msg: str) -> None: | ||||
|     """ If condition evaluates to true prints given msg as error message and | ||||
|     exits abnormally """ | ||||
|     if cond: | ||||
|         error(msg) | ||||
|  | ||||
|  | ||||
| class DbConn: | ||||
|     """ Database connection encapsulation | ||||
|  | ||||
|     Attributes: | ||||
|     db_name  -- Database name | ||||
|     engine   -- Database engine | ||||
|     metadata -- Database metadata | ||||
|     conn     -- Database connection | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, conn_str: Optional[str], password: Optional[str], | ||||
|                  db_name: str) -> None: | ||||
|         """ Constructor | ||||
|  | ||||
|         Arguments: | ||||
|         conn_str -- Abbreviated conneftion string, as specified in command | ||||
|                     line. None means take from environment variable | ||||
|         password -- Optional password | ||||
|         db_name  -- Database name | ||||
|         """ | ||||
|         self.db_name = db_name | ||||
|  | ||||
|         if conn_str: | ||||
|             m = re.match( | ||||
|                 r"^(?P<user>[^ :\?]+@)?" | ||||
|                 r"(?P<cont>\^)?(?P<host>[^ :?]+)" | ||||
|                 r"(:(?P<port>\d+))?" | ||||
|                 r"(?P<options>\?.+)?$", | ||||
|                 conn_str) | ||||
|             error_if(not m, f"Server string '{conn_str}' has invalid format") | ||||
|             assert m is not None | ||||
|  | ||||
|             user = m.group("user") or DEFAULT_USER | ||||
|             host = m.group("host") | ||||
|             port = m.group("port") or str(DEFAULT_PORT) | ||||
|             options = m.group("options") or "" | ||||
|             if m.group("cont"): | ||||
|                 try: | ||||
|                     insp_str = \ | ||||
|                         subprocess.check_output(["docker", "inspect", host]) | ||||
|                 except (OSError, subprocess.CalledProcessError) as ex: | ||||
|                     error(f"Failed to inspect container '{host}': {ex}") | ||||
|                 insp = json.loads(insp_str) | ||||
|                 try: | ||||
|                     networks = insp[0]["NetworkSettings"]["Networks"] | ||||
|                     host = networks[list(networks.keys())[0]]["IPAddress"] | ||||
|                 except (LookupError, TypeError, ValueError) as ex: | ||||
|                     error(f"Failed to find server IP address in container " | ||||
|                           f"inspection: {ex}") | ||||
|         else: | ||||
|             db_env = DB_ENVS[db_name] | ||||
|             error_if(db_env.host not in os.environ, | ||||
|                      f"PostgreSQL server neither specified explicitly (via " | ||||
|                      f"--server parameter) nor via environment (via " | ||||
|                      f"'{db_env.host}' variable and related ones)") | ||||
|             host = os.environ[db_env.host] | ||||
|             port = os.environ.get(db_env.port, str(DEFAULT_PORT)) | ||||
|             user = os.environ.get(db_env.user, str(DEFAULT_USER)) | ||||
|             options = os.environ.get(db_env.options, "") | ||||
|             password = password or os.environ.get(db_env.password) | ||||
|         try: | ||||
|             full_conn_str = \ | ||||
|                 f"postgresql+psycopg2://{user}" \ | ||||
|                 f"{(':' + password) if password else ''}@{host}:{port}/" \ | ||||
|                 f"{db_name}{options}" | ||||
|             self.engine = sa.create_engine(full_conn_str) | ||||
|             self.metadata = sa.MetaData() | ||||
|             self.metadata.reflect(bind=self.engine) | ||||
|             self.conn = self.engine.connect() | ||||
|         except sa.exc.SQLAlchemyError as ex: | ||||
|             error(f"Failed to connect to '{db_name}' at '{conn_str}' " | ||||
|                   f"('{full_conn_str}'): {ex}") | ||||
|  | ||||
|  | ||||
| class JsonEncoder(json.JSONEncoder): | ||||
|     """ JSON encoder that handles unusual types """ | ||||
|  | ||||
|     def default(self, o: Any) -> Any: | ||||
|         """ Handles unusual data types """ | ||||
|         if isinstance(o, datetime.datetime): | ||||
|             return o.isoformat() | ||||
|         return super().default(o) | ||||
|  | ||||
|  | ||||
| def do_log(args: Any) -> None: | ||||
|     """Execute "log" command. | ||||
|  | ||||
|     Arguments: | ||||
|     args -- Parsed command line arguments | ||||
|     """ | ||||
|     db_conn = \ | ||||
|         DbConn(conn_str=args.server, password=args.password, db_name=LOG_DB) | ||||
|     work_done = False | ||||
|     if args.topics: | ||||
|         work_done = True | ||||
|         for topic in sorted(db_conn.metadata.tables.keys()): | ||||
|             print(topic) | ||||
|     if args.sources is not None: | ||||
|         work_done = True | ||||
|         sources: Set[str] = set() | ||||
|         error_if( | ||||
|             args.sources and (args.sources not in db_conn.metadata.tables), | ||||
|             f"Topic '{args.sources}' not found") | ||||
|         for topic in db_conn.metadata.tables.keys(): | ||||
|             if "source" not in db_conn.metadata.tables[topic].c: | ||||
|                 continue | ||||
|             if args.sources and (args.sources != topic): | ||||
|                 continue | ||||
|             table_sources = \ | ||||
|                 db_conn.conn.execute( | ||||
|                     sa.text(f'SELECT DISTINCT source FROM "{topic}"')).\ | ||||
|                 fetchall() | ||||
|             sources |= {s[0] for s in table_sources} | ||||
|         for source in sorted(sources): | ||||
|             print(source) | ||||
|     if args.SELECT: | ||||
|         work_done = True | ||||
|         try: | ||||
|             rp = db_conn.conn.execute( | ||||
|                 sa.text("SELECT " + " ".join(args.SELECT))) | ||||
|             if args.format == "bare": | ||||
|                 for record in rp: | ||||
|                     error_if( | ||||
|                         len(record) != 1, | ||||
|                         f"Bare format assumes one field per result row " | ||||
|                         f"(this query has {len(record)} fields per record)") | ||||
|                     print(record[0]) | ||||
|             elif args.format == "json": | ||||
|                 print("[") | ||||
|                 for record in rp: | ||||
|                     print("    " + json.dumps(record._asdict(), | ||||
|                                               cls=JsonEncoder)) | ||||
|                 print("]") | ||||
|             elif args.format == "csv": | ||||
|                 csv_writer = csv.writer(sys.stdout) | ||||
|                 csv_writer.writerow(rp.keys()) | ||||
|                 for record in rp: | ||||
|                     csv_writer.writerow(record) | ||||
|             else: | ||||
|                 error(f"Internal error: unsupported output format " | ||||
|                       f"'{args.format}'") | ||||
|         except sa.exc.SQLAlchemyError as ex: | ||||
|             error(f"Database acces error: {ex}") | ||||
|     error_if(not work_done, "Nothing to do!") | ||||
|  | ||||
|  | ||||
| def do_help(args: Any) -> None: | ||||
|     """Execute "help" command. | ||||
|  | ||||
|     Arguments: | ||||
|     args -- Parsed command line arguments (also contains 'argument_parser' and | ||||
|             'subparsers' fields) | ||||
|     """ | ||||
|     if args.subcommand is None: | ||||
|         args.argument_parser.print_help() | ||||
|     else: | ||||
|         args.subparsers.choices[args.subcommand].print_help() | ||||
|  | ||||
|  | ||||
| def main(argv: List[str]) -> None: | ||||
|     """Do the job. | ||||
|  | ||||
|     Arguments: | ||||
|     argv -- Program arguments | ||||
|     """ | ||||
|     # Database connection switches | ||||
|     switches_server = argparse.ArgumentParser(add_help=False) | ||||
|     switches_server.add_argument( | ||||
|         "--server", "-s", metavar="[user@]{host|^container}[:port][?options]", | ||||
|         help=f"PostgreSQL server connection information. Host part may be a " | ||||
|         f"hostname, IP address, or container name or ID, preceded by '^' " | ||||
|         f"(specifying container name would not work if script runs inside the " | ||||
|         f"container). Default username is '{DEFAULT_USER}', default port is " | ||||
|         f"{DEFAULT_PORT}. Options may specify various (e.g. SSL-related) " | ||||
|         f"parameters (see " | ||||
|         f"https://www.postgresql.org/docs/current/libpq-connect.html" | ||||
|         f"#LIBPQ-CONNSTRING for details). If omitted, script tries to use " | ||||
|         f"data from POSTGRES_HOST, POSTGRES_PORT, POSTGRES_LOG_USER, " | ||||
|         f"POSTGRES_LOG_OPTIONS, POSTGRES_ALS_USER, POSTGRES_ALS_OPTIONS " | ||||
|         f"environment variables") | ||||
|     switches_server.add_argument( | ||||
|         "--password", metavar="PASSWORD", | ||||
|         help="Postgres connection password (if required). If omitted and " | ||||
|         "--server not specified then values from POSTGRES_LOG_PASSWORD and " | ||||
|         "POSTGRES_ALS_PASSWORD environment variables are used") | ||||
|  | ||||
|     # Top level parser | ||||
|     argument_parser = argparse.ArgumentParser( | ||||
|         description=f"Tool for querying ALS database. V{VERSION}") | ||||
|     subparsers = argument_parser.add_subparsers(dest="subcommand", | ||||
|                                                 metavar="SUBCOMMAND") | ||||
|  | ||||
|     # Subparser for "log" command | ||||
|     parser_log = subparsers.add_parser( | ||||
|         "log", parents=[switches_server], | ||||
|         help="Read JSON log messages") | ||||
|     parser_log.add_argument( | ||||
|         "--topics", action="store_true", | ||||
|         help="Print list of topics, stored in database") | ||||
|     parser_log.add_argument( | ||||
|         "--sources", metavar="[TOPIC]", nargs="?", const="", | ||||
|         help="Print list of log sources - for all topics or for specific " | ||||
|         "topic") | ||||
|     parser_log.add_argument( | ||||
|         "--format", "-f", choices=["bare", "json", "csv"], default="csv", | ||||
|         help="Output format for 'SELECT' result. 'bare' is unadorned " | ||||
|         "value-per-line output (must be just one field per result row), " | ||||
|         "'csv' - CSV format (default), 'json' - JSON format") | ||||
|  | ||||
|     parser_log.add_argument( | ||||
|         "SELECT", nargs="*", | ||||
|         help="SELECT command body (without 'SELECT'). 'FROM' clause should " | ||||
|         "use topic name, column names are 'time' (timetag), 'source' (AFC or " | ||||
|         "whatever server ID) and 'log' (JSON log record). Surrounding quotes " | ||||
|         "are optional") | ||||
|     parser_log.set_defaults(func=do_log) | ||||
|  | ||||
|     # Subparser for 'help' command | ||||
|     parser_help = subparsers.add_parser( | ||||
|         "help", add_help=False, usage="%(prog)s subcommand", | ||||
|         help="Prints help on given subcommand") | ||||
|     parser_help.add_argument( | ||||
|         "subcommand", metavar="SUBCOMMAND", nargs="?", | ||||
|         choices=subparsers.choices, | ||||
|         help="Name of subcommand to print help about (use " + | ||||
|         "\"%(prog)s --help\" to get list of all subcommands)") | ||||
|     parser_help.set_defaults(func=do_help, subparsers=subparsers, | ||||
|                              argument_parser=argument_parser) | ||||
|  | ||||
|     if not argv: | ||||
|         argument_parser.print_help() | ||||
|         sys.exit(1) | ||||
|     args = argument_parser.parse_args(argv) | ||||
|  | ||||
|     # Set up logging | ||||
|     console_handler = logging.StreamHandler() | ||||
|     console_handler.setFormatter( | ||||
|         logging.Formatter( | ||||
|             f"{os.path.basename(__file__)}. %(levelname)s: %(message)s")) | ||||
|     logging.getLogger().addHandler(console_handler) | ||||
|     logging.getLogger().setLevel(logging.INFO) | ||||
|  | ||||
|     # Do the needful | ||||
|     args.func(args) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main(sys.argv[1:]) | ||||
							
								
								
									
										3808
									
								
								als/als_siphon.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										3808
									
								
								als/als_siphon.py
									
									
									
									
									
										Executable file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										12
									
								
								als/requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								als/requirements.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| # | ||||
| # Copyright (C) 2021 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns  | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
| GeoAlchemy==0.7.2 | ||||
| GeoAlchemy2==0.12.5 | ||||
| postgis==1.0.4 | ||||
| prometheus-client==0.17.1 | ||||
| pyparsing==3.0.9 | ||||
| python-dateutil==2.8.2 | ||||
							
								
								
									
										32
									
								
								build-rpm.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										32
									
								
								build-rpm.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| #!/bin/bash | ||||
| # Build the RPM packages for CPO. | ||||
| # Any arguments to this script are passed along to each of the "rpmbuild" commands. | ||||
| set -e | ||||
|  | ||||
| N_THREADS=$(nproc --all --ignore=2) | ||||
| ROOTDIR=$(readlink -f $(dirname "${BASH_SOURCE[0]}")) | ||||
| CMAKE="cmake3 -G Ninja -DSVN_LAST_REVISION=$BUILDREV" | ||||
| NINJA="ninja-build -j$N_THREADS" | ||||
| RPMBUILD="rpmbuild -ba --without apidoc $@" | ||||
| RPMLINT="rpmlint" | ||||
|  | ||||
| source /opt/rh/$(scl -l)/enable | ||||
| export CC=$(which gcc) | ||||
| export CXX=$(which g++) | ||||
|  | ||||
| if [[ -d "/usr/include/boost169" ]]; then | ||||
|  CMAKE="${CMAKE} -DBOOST_INCLUDEDIR=/usr/include/boost169 -DBOOST_LIBRARYDIR=/usr/lib64/boost169" | ||||
| fi | ||||
|  | ||||
| BUILDDIR=${ROOTDIR}/build | ||||
| mkdir -p $BUILDDIR | ||||
| pushd $BUILDDIR | ||||
|  | ||||
| ${CMAKE} .. | ||||
| rm -rf dist | ||||
| ${NINJA} rpm-prep | ||||
| # Run rpmbuild directly to get unbuffered output | ||||
| ${RPMBUILD} --define "_topdir ${PWD}/dist" dist/SPECS/*.spec | ||||
|  | ||||
| popd | ||||
| cd / && ${RPMLINT} --file fbrat.rpmlintrc build/dist/SRPMS build/dist/RPMS | ||||
							
								
								
									
										5
									
								
								build@tmp/durable-760e39dc/jenkins-log.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								build@tmp/durable-760e39dc/jenkins-log.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| + runcmd.py build.log -- ninja-build -v rpm-prep | ||||
| [0/2] cd /home/jenkins/slave/workspace/RAT-release-RPM/build && /usr/bin/cpack3 --config ./CPackSourceConfig.cmake /home/jenkins/slave/workspace/RAT-release-RPM/build/CPackSourceConfig.cmake | ||||
| CPack3: Create package using TBZ2 | ||||
| CPack3: Install projects | ||||
| CPack3: - Install directory: /home/jenkins/slave/workspace/RAT-release-RPM | ||||
							
								
								
									
										17
									
								
								bulk_postgres/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								bulk_postgres/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,17 @@ | ||||
| # | ||||
| # Copyright (C) 2021 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
|  | ||||
| # Dockerfile for PostgreSQL+PostGIS server used for ALS log storage | ||||
|  | ||||
| FROM postgis/postgis:14-3.3 | ||||
|  | ||||
| ENV POSTGRES_PASSWORD=postgres | ||||
| ENV POSTGRES_HOST_AUTH_METHOD=trust | ||||
| ENV AFC_BULKDB_CONNS=${AFC_BULKDB_CONNS:-1000} | ||||
| ENTRYPOINT docker-entrypoint.sh postgres -c max_connections=$AFC_BULKDB_CONNS | ||||
| HEALTHCHECK --start-period=20s --interval=10s --timeout=5s \ | ||||
|     CMD pg_isready -U postgres || exit 1 | ||||
							
								
								
									
										3828
									
								
								ca-bundle.crt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3828
									
								
								ca-bundle.crt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										69
									
								
								cert_db/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										69
									
								
								cert_db/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,69 @@ | ||||
| # | ||||
| # Copyright 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
| # Install required packages | ||||
| # | ||||
| FROM alpine:3.18 as cert_db.preinstall | ||||
| ENV PYTHONUNBUFFERED=1 | ||||
| RUN apk add --update --no-cache python3 && ln -sf python3 /usr/bin/python && \ | ||||
| apk add --update --no-cache py3-sqlalchemy py3-cryptography py3-numpy \ | ||||
| py3-requests py3-flask py3-psycopg2 py3-pydantic=~1.10 && python3 -m ensurepip && \ | ||||
| apk add --repository=http://dl-cdn.alpinelinux.org/alpine/edge/testing/ \ | ||||
| py3-confluent-kafka && \ | ||||
| pip3 install --no-cache --upgrade pip setuptools | ||||
|  | ||||
| COPY cert_db/requirements.txt /wd/ | ||||
| RUN pip3 install -r /wd/requirements.txt && mkdir -p /etc/xdg/fbrat | ||||
| COPY config/ratapi.conf /etc/xdg/fbrat/ | ||||
| RUN echo "AFC_APP_TYPE = 'cert_db'" >> /etc/xdg/fbrat/ratapi.conf | ||||
| # | ||||
| # Build Message Handler application | ||||
| # | ||||
| FROM alpine:3.18 as cert_db.build | ||||
| COPY --from=cert_db.preinstall / / | ||||
| # Development env | ||||
| RUN apk add --update --no-cache cmake ninja | ||||
| # | ||||
| COPY CMakeLists.txt LICENSE.txt version.txt Doxyfile.in /wd/ | ||||
| COPY cmake /wd/cmake/ | ||||
| COPY pkg /wd/pkg/ | ||||
| COPY src /wd/src/ | ||||
| RUN mkdir -p -m 777 /wd/build | ||||
| ARG BUILDREV=localbuild | ||||
| RUN cd /wd/build && \ | ||||
| cmake -DCMAKE_INSTALL_PREFIX=/wd/__install -DCMAKE_PREFIX_PATH=/usr -DCMAKE_BUILD_TYPE=RatapiRelease -DSVN_LAST_REVISION=$BUILDREV -G Ninja /wd && \ | ||||
| ninja -j$(nproc) install | ||||
| # | ||||
| # Install FCCID_DB application | ||||
| # | ||||
| FROM alpine:3.18 as cert_db.install | ||||
| COPY --from=cert_db.preinstall / / | ||||
| COPY --from=cert_db.build /wd/__install /usr/ | ||||
| COPY src/afc-packages /wd/afc-packages | ||||
| RUN pip3 install --use-pep517 --root-user-action=ignore \ | ||||
|         -r /wd/afc-packages/pkgs.cert_db \ | ||||
|     && rm -rf /wd/afc-packages | ||||
| RUN mkdir -m 755 -p /var/lib/fbrat | ||||
| RUN mkdir -m 755 -p /var/spool/fbrat  | ||||
|  | ||||
| # Add user and group | ||||
| RUN addgroup -g 1003 fbrat && \ | ||||
| adduser -g '' -D -u 1003 -G fbrat -h /var/lib/fbrat -s /sbin/nologin fbrat && \ | ||||
| chown fbrat:fbrat /var/lib/fbrat | ||||
| # | ||||
| LABEL revision="afc-cert_db" | ||||
| WORKDIR /wd | ||||
| COPY cert_db/entrypoint.sh / | ||||
| # Add debugging env if configured | ||||
| ARG AFC_DEVEL_ENV=${AFC_DEVEL_ENV:-production} | ||||
| COPY cert_db/devel.sh /wd/ | ||||
| RUN chmod +x /wd/devel.sh | ||||
| RUN /wd/devel.sh | ||||
| # | ||||
| ADD cert_db/sweep.sh /etc/periodic/daily/ | ||||
| RUN chmod 744 /etc/periodic/daily/sweep.sh | ||||
| RUN chmod +x /entrypoint.sh | ||||
| CMD ["/entrypoint.sh"] | ||||
							
								
								
									
										23
									
								
								cert_db/devel.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								cert_db/devel.sh
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| #!/bin/sh | ||||
| # | ||||
| # Copyright 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
| AFC_DEVEL_ENV=${AFC_DEVEL_ENV:-production} | ||||
| case "$AFC_DEVEL_ENV" in | ||||
|   "devel") | ||||
|     echo "Debug profile"  | ||||
|     export NODE_OPTIONS='--openssl-legacy-provider' | ||||
|     apk add --update --no-cache cmake bash | ||||
|     ;; | ||||
|   "production") | ||||
|     echo "Production profile" | ||||
|     ;; | ||||
|   *) | ||||
|     echo "Uknown profile" | ||||
|     ;; | ||||
| esac | ||||
|  | ||||
| exit $? | ||||
							
								
								
									
										15
									
								
								cert_db/entrypoint.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								cert_db/entrypoint.sh
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| #!/bin/sh | ||||
| # | ||||
| # Copyright 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
|  | ||||
| if [[ -z "${K8S_CRON}" ]]; then | ||||
|   crond -b | ||||
|   sleep infinity | ||||
| else | ||||
|   /usr/bin/rat-manage-api cert_id sweep --country US | ||||
|   /usr/bin/rat-manage-api cert_id sweep --country CA | ||||
| fi | ||||
							
								
								
									
										23
									
								
								cert_db/requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								cert_db/requirements.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| alembic==1.8.1 | ||||
| Flask==2.3.2 | ||||
| Flask-JSONRPC==1.0.2 | ||||
| Flask-Login==0.6.2 | ||||
| Flask-SQLAlchemy==2.5.1 | ||||
| Flask-User==1.0.2.1 | ||||
| Flask-WTF==1.1.1 | ||||
| Flask-Migrate==2.6.0 | ||||
| Flask-Script==2.0.5 | ||||
| Jinja2==3.1.2 | ||||
| json5==0.9.10 | ||||
| prettytable==3.5.0 | ||||
| python_dateutil==2.8.2 | ||||
| pyxdg==0.28 | ||||
| email-validator==1.3.0 | ||||
| jwt==1.3.1 | ||||
| WsgiDAV==4.1.0 | ||||
| typeguard==2.13.3 | ||||
| celery==5.2.7 | ||||
| Werkzeug==2.3.3 | ||||
| gevent==23.9.1 | ||||
| pika==1.3.2 | ||||
|  | ||||
							
								
								
									
										10
									
								
								cert_db/sweep.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										10
									
								
								cert_db/sweep.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,10 @@ | ||||
| #!/bin/sh | ||||
| # | ||||
| # Copyright 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
|  | ||||
| /usr/bin/rat-manage-api cert_id sweep --country US | ||||
| /usr/bin/rat-manage-api cert_id sweep --country CA | ||||
							
								
								
									
										41
									
								
								cmake/CheckCoverage.cmake
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								cmake/CheckCoverage.cmake
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,41 @@ | ||||
| # Add a target to allow test coverage analysis (all tests together). | ||||
| # This option fails on non-unix systems | ||||
| option(BUILD_WITH_COVERAGE "Run unit tests with code coverage analysis" OFF) | ||||
|  | ||||
| if(BUILD_WITH_COVERAGE) | ||||
|     if(NOT UNIX) | ||||
|         message(FATAL_ERROR "Unable to coverage-check non-unix") | ||||
|     endif(NOT UNIX) | ||||
|      | ||||
|     FIND_PROGRAM(LCOV_PATH lcov) | ||||
|     IF(NOT LCOV_PATH) | ||||
|         MESSAGE(FATAL_ERROR "lcov not found") | ||||
|     ENDIF(NOT LCOV_PATH) | ||||
|      | ||||
|     FIND_PROGRAM(GENHTML_PATH genhtml) | ||||
|     IF(NOT GENHTML_PATH) | ||||
|         MESSAGE(FATAL_ERROR "genhtml not found!") | ||||
|     ENDIF(NOT GENHTML_PATH) | ||||
|      | ||||
|     set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} --coverage") | ||||
|     set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage") | ||||
|      | ||||
|     # Target to check with pre- and post-steps to capture coverage results | ||||
|     SET(OUT_RAW "${CMAKE_BINARY_DIR}/coverage-raw.lcov") | ||||
|     SET(OUT_CLEAN "${CMAKE_BINARY_DIR}/coverage-clean.lcov") | ||||
|     add_custom_target(check-coverage | ||||
|         # Before any tests | ||||
|         COMMAND ${LCOV_PATH} --directory . --zerocounters | ||||
|          | ||||
|         # Actually run the tests, ignoring exit code | ||||
|         COMMAND ${CMAKE_CTEST_COMMAND} --verbose || : | ||||
|          | ||||
|         # Pull together results, ignoring system files and auto-built files | ||||
|         COMMAND ${LCOV_PATH} --directory . --capture --output-file ${OUT_RAW} | ||||
|         COMMAND ${LCOV_PATH} --remove ${OUT_RAW} '*/test/*' '${CMAKE_BINARY_DIR}/*' '/usr/*' --output-file ${OUT_CLEAN} | ||||
|         COMMAND ${GENHTML_PATH} -o coverage ${OUT_CLEAN} | ||||
|         COMMAND ${CMAKE_COMMAND} -E remove ${OUT_RAW} ${OUT_CLEAN} | ||||
|          | ||||
|         WORKING_DIRECTORY ${CMAKE_BINARY_DIR} | ||||
|     ) | ||||
| endif(BUILD_WITH_COVERAGE) | ||||
							
								
								
									
										36
									
								
								cmake/Findminizip.cmake
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								cmake/Findminizip.cmake
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,36 @@ | ||||
|  | ||||
| IF(UNIX) | ||||
|     find_package(PkgConfig) | ||||
|     pkg_search_module(MINIZIP REQUIRED minizip) | ||||
|     if(NOT ${MINIZIP_FOUND} EQUAL 1) | ||||
|         message(FATAL_ERROR "minizip is missing") | ||||
|     endif()  | ||||
| ENDIF(UNIX) | ||||
| IF(WIN32) | ||||
|     # Verify headers present | ||||
|     find_path(MINIZIP_MAIN_INCLUDE minizip/zip.h PATHS ${MINIZIP_INCLUDE_DIRS} ${CONAN_INCLUDE_DIRS}) | ||||
|  | ||||
|     # Verify link and dynamic library present | ||||
|     find_library(MINIZIP_MAIN_LIB NAMES minizip minizipd PATHS ${MINIZIP_LIBDIR} ${CONAN_LIB_DIRS}) | ||||
|     find_file(MINIZIP_MAIN_DLL NAMES minizip.dll minizipd.dll PATHS ${MINIZIP_BINDIR} ${CONAN_BIN_DIRS}) | ||||
|     message("-- Found minizip at ${MINIZIP_MAIN_LIB} ${MINIZIP_MAIN_DLL}") | ||||
|      | ||||
|     add_library(minizip SHARED IMPORTED) | ||||
|     set_target_properties(minizip PROPERTIES | ||||
|         INTERFACE_INCLUDE_DIRECTORIES ${MINIZIP_MAIN_INCLUDE} | ||||
|     ) | ||||
|     set_target_properties(minizip PROPERTIES | ||||
|         IMPORTED_LOCATION "${MINIZIP_MAIN_DLL}" | ||||
|         IMPORTED_IMPLIB "${MINIZIP_MAIN_LIB}" | ||||
|     ) | ||||
|  | ||||
|     # handle the QUIETLY and REQUIRED arguments and set JPEG_FOUND to TRUE if | ||||
|     # all listed variables are TRUE | ||||
|     include(FindPackageHandleStandardArgs) | ||||
|     FIND_PACKAGE_HANDLE_STANDARD_ARGS(MINIZIP DEFAULT_MSG MINIZIP_MAIN_LIB MINIZIP_MAIN_INCLUDE) | ||||
|  | ||||
|     if(MINIZIP_FOUND) | ||||
|         set(MINIZIP_INCLUDE_DIRS ${MINIZIP_MAIN_INCLUDE}) | ||||
|         set(MINIZIP_LIBRARIES minizip) | ||||
|     endif(MINIZIP_FOUND) | ||||
| ENDIF(WIN32) | ||||
							
								
								
									
										497
									
								
								cmake/srcfunctions.cmake
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										497
									
								
								cmake/srcfunctions.cmake
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,497 @@ | ||||
| # Redirect add_... functions to accumulate target names | ||||
|  | ||||
| # | ||||
| # Define a library from sources with its headers. | ||||
| # This relies on the pre-existing values: | ||||
| #  - "VERSION" to set the library PROJECT_VERSION property | ||||
| #  - "SOVERSION" to set the library SOVERSION property | ||||
| # | ||||
| function(add_dist_library) | ||||
|     set(PARSE_OPTS ) | ||||
|     set(PARSE_ARGS_SINGLE TARGET EXPORTNAME) | ||||
|     set(PARSE_ARGS_MULTI SOURCES HEADERS) | ||||
|     cmake_parse_arguments(ADD_DIST_LIB "${PARSE_OPTS}" "${PARSE_ARGS_SINGLE}" "${PARSE_ARGS_MULTI}" ${ARGN}) | ||||
|     if("${ADD_DIST_LIB_TARGET}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_library missing TARGET parameter") | ||||
|     endif() | ||||
|     if("${ADD_DIST_LIB_EXPORTNAME}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_library missing EXPORTNAME parameter") | ||||
|     endif() | ||||
|     if("${ADD_DIST_LIB_SOURCES}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_library missing SOURCES parameter") | ||||
|     endif() | ||||
|      | ||||
|     if(WIN32 AND BUILD_SHARED_LIBS) | ||||
|         # Give the DLL version markings | ||||
|         set(WINRES_COMPANY_NAME_STR "OpenAFC") | ||||
|         set(WINRES_PRODUCT_NAME_STR ${PROJECT_NAME}) | ||||
|         set(WINRES_PRODUCT_VERSION_RES "${PROJECT_VERSION_MAJOR},${PROJECT_VERSION_MINOR},${PROJECT_VERSION_PATCH},0") | ||||
|         set(WINRES_PRODUCT_VERSION_STR "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}.${PROJECT_VERSION_PATCH}-${SVN_LAST_REVISION}") | ||||
|         set(WINRES_INTERNAL_NAME_STR ${ADD_DIST_LIB_TARGET}) | ||||
|         set(WINRES_ORIG_FILENAME "${CMAKE_SHARED_LIBRARY_PREFIX}${ADD_DIST_LIB_TARGET}${CMAKE_SHARED_LIBRARY_SUFFIX}") | ||||
|         set(WINRES_FILE_DESCRIPTION_STR "Runtime for ${ADD_DIST_LIB_TARGET}") | ||||
|         set(WINRES_FILE_VERSION_RES ${WINRES_PRODUCT_VERSION_RES}) | ||||
|         set(WINRES_FILE_VERSION_STR ${WINRES_PRODUCT_VERSION_STR}) | ||||
|         set(WINRES_COMMENTS_STR "") | ||||
|         configure_file("${CMAKE_SOURCE_DIR}/src/libinfo.rc.in" "${CMAKE_CURRENT_BINARY_DIR}/${ADD_DIST_LIB_TARGET}-libinfo.rc" @ONLY) | ||||
|         list(APPEND ADD_DIST_LIB_SOURCES "${CMAKE_CURRENT_BINARY_DIR}/${ADD_DIST_LIB_TARGET}-libinfo.rc") | ||||
|     endif(WIN32 AND BUILD_SHARED_LIBS) | ||||
|      | ||||
|     add_library(${ADD_DIST_LIB_TARGET} ${ADD_DIST_LIB_SOURCES}) | ||||
|     set_target_properties( | ||||
|         ${ADD_DIST_LIB_TARGET} PROPERTIES | ||||
|         VERSION ${PROJECT_VERSION} | ||||
|         SOVERSION ${SOVERSION} | ||||
|     ) | ||||
|      | ||||
|     include(GenerateExportHeader) | ||||
|     generate_export_header(${ADD_DIST_LIB_TARGET}) | ||||
|     target_include_directories(${ADD_DIST_LIB_TARGET} PUBLIC | ||||
|         $<BUILD_INTERFACE:${CMAKE_CURRENT_BINARY_DIR}> | ||||
|         $<INSTALL_INTERFACE:$<INSTALL_PREFIX>/${PKG_INSTALL_INCLUDEDIR}> | ||||
|     ) | ||||
|     list(APPEND ADD_DIST_LIB_HEADERS "${CMAKE_CURRENT_BINARY_DIR}/${ADD_DIST_LIB_TARGET}_export.h") | ||||
|      | ||||
|     # Source-directory relative path | ||||
|     get_filename_component(SOURCE_DIRNAME ${CMAKE_CURRENT_SOURCE_DIR} NAME) | ||||
|      | ||||
|     # Include headers, with original directory name | ||||
|     install( | ||||
|         FILES ${ADD_DIST_LIB_HEADERS} | ||||
|         DESTINATION ${PKG_INSTALL_INCLUDEDIR}/${SOURCE_DIRNAME} | ||||
|         COMPONENT development | ||||
|     ) | ||||
|  | ||||
|     if(WIN32) | ||||
|         # PDB for symbol mapping | ||||
|         install( | ||||
|             FILES "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${ADD_DIST_LIB_TARGET}.pdb" | ||||
|             DESTINATION ${PKG_INSTALL_DEBUGDIR} | ||||
|             COMPONENT debuginfo | ||||
|         ) | ||||
|         # Sources for debugger (directory name is target name) | ||||
|         install( | ||||
|             FILES ${ADD_DIST_LIB_HEADERS} ${ADD_DIST_LIB_SOURCES} | ||||
|             DESTINATION ${PKG_INSTALL_DEBUGDIR}/${SOURCE_DIRNAME} | ||||
|             COMPONENT debuginfo | ||||
|         ) | ||||
|     endif(WIN32) | ||||
|      | ||||
|     install( | ||||
|         TARGETS ${ADD_DIST_LIB_TARGET} | ||||
|         EXPORT ${ADD_DIST_LIB_EXPORTNAME} | ||||
|         # For Win32 | ||||
|         RUNTIME | ||||
|             DESTINATION ${PKG_INSTALL_BINDIR} | ||||
|             COMPONENT runtime | ||||
|         ARCHIVE | ||||
|             DESTINATION ${PKG_INSTALL_LIBDIR} | ||||
|             COMPONENT development | ||||
|         # For unix | ||||
|         LIBRARY  | ||||
|             DESTINATION ${PKG_INSTALL_LIBDIR} | ||||
|             COMPONENT runtime | ||||
|     ) | ||||
| endfunction(add_dist_library) | ||||
|  | ||||
| function(add_dist_module) | ||||
|     set(PARSE_OPTS ) | ||||
|     set(PARSE_ARGS_SINGLE TARGET EXPORTNAME COMPONENT) | ||||
|     set(PARSE_ARGS_MULTI SOURCES HEADERS) | ||||
|     cmake_parse_arguments(ADD_DIST_LIB "${PARSE_OPTS}" "${PARSE_ARGS_SINGLE}" "${PARSE_ARGS_MULTI}" ${ARGN}) | ||||
|     if("${ADD_DIST_LIB_TARGET}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_library missing TARGET parameter") | ||||
|     endif() | ||||
|     if("${ADD_DIST_LIB_COMPONENT}" STREQUAL "") | ||||
|         set(ADD_DIST_LIB_COMPONENT runtime) | ||||
|     endif() | ||||
|     if("${ADD_DIST_LIB_SOURCES}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_library missing SOURCES parameter") | ||||
|     endif() | ||||
|     if(NOT PKG_MODULE_LIBDIR) | ||||
|         message(FATAL_ERROR "Must define PKG_MODULE_LIBDIR for installation") | ||||
|     endif() | ||||
|  | ||||
|     if(WIN32 AND BUILD_SHARED_LIBS) | ||||
|         # Give the DLL version markings | ||||
|         set(WINRES_COMPANY_NAME_STR "OpenAFC") | ||||
|         set(WINRES_PRODUCT_NAME_STR ${PROJECT_NAME}) | ||||
|         set(WINRES_PRODUCT_VERSION_RES "${PROJECT_VERSION_MAJOR},${PROJECT_VERSION_MINOR},${PROJECT_VERSION_PATCH},0") | ||||
|         set(WINRES_PRODUCT_VERSION_STR "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}.${PROJECT_VERSION_PATCH}-${SVN_LAST_REVISION}") | ||||
|         set(WINRES_INTERNAL_NAME_STR ${ADD_DIST_LIB_TARGET}) | ||||
|         set(WINRES_ORIG_FILENAME "${CMAKE_SHARED_LIBRARY_PREFIX}${ADD_DIST_LIB_TARGET}${CMAKE_SHARED_LIBRARY_SUFFIX}") | ||||
|         set(WINRES_FILE_DESCRIPTION_STR "Runtime for ${ADD_DIST_LIB_TARGET}") | ||||
|         set(WINRES_FILE_VERSION_RES ${WINRES_PRODUCT_VERSION_RES}) | ||||
|         set(WINRES_FILE_VERSION_STR ${WINRES_PRODUCT_VERSION_STR}) | ||||
|         set(WINRES_COMMENTS_STR "") | ||||
|         configure_file("${CMAKE_SOURCE_DIR}/src/libinfo.rc.in" "${CMAKE_CURRENT_BINARY_DIR}/${ADD_DIST_LIB_TARGET}-libinfo.rc" @ONLY) | ||||
|         list(APPEND ADD_DIST_LIB_SOURCES "${CMAKE_CURRENT_BINARY_DIR}/${ADD_DIST_LIB_TARGET}-libinfo.rc") | ||||
|     endif(WIN32 AND BUILD_SHARED_LIBS) | ||||
|      | ||||
|     add_library(${ADD_DIST_LIB_TARGET} MODULE ${ADD_DIST_LIB_SOURCES}) | ||||
|     set_target_properties( | ||||
|         ${ADD_DIST_LIB_TARGET} PROPERTIES | ||||
|         VERSION ${PROJECT_VERSION} | ||||
|         SOVERSION ${SOVERSION} | ||||
|         # no "lib" prefix on unix | ||||
|         PREFIX "" | ||||
|     ) | ||||
|      | ||||
|     include(GenerateExportHeader) | ||||
|     generate_export_header(${ADD_DIST_LIB_TARGET}) | ||||
|     list(APPEND ADD_DIST_LIB_HEADERS "${CMAKE_CURRENT_BINARY_DIR}/${ADD_DIST_LIB_TARGET}_export.h") | ||||
|      | ||||
|     # Source-directory relative path | ||||
|     get_filename_component(SOURCE_DIRNAME ${CMAKE_CURRENT_SOURCE_DIR} NAME) | ||||
|      | ||||
|     if(WIN32) | ||||
|         # PDB for symbol mapping | ||||
|         install( | ||||
|             FILES "${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}/${ADD_DIST_LIB_TARGET}.pdb" | ||||
|             DESTINATION ${PKG_INSTALL_DEBUGDIR} | ||||
|             COMPONENT debuginfo | ||||
|         ) | ||||
|         # Sources for debugger (directory name is target name) | ||||
|         install( | ||||
|             FILES ${ADD_DIST_LIB_HEADERS} ${ADD_DIST_LIB_SOURCES} | ||||
|             DESTINATION ${PKG_INSTALL_DEBUGDIR}/${SOURCE_DIRNAME} | ||||
|             COMPONENT debuginfo | ||||
|         ) | ||||
|     endif(WIN32) | ||||
|      | ||||
|     install( | ||||
|         TARGETS ${ADD_DIST_LIB_TARGET} | ||||
|         EXPORT ${ADD_DIST_LIB_EXPORTNAME} | ||||
|         # For Win32 | ||||
|         RUNTIME | ||||
|             DESTINATION ${PKG_MODULE_LIBDIR} | ||||
|             COMPONENT ${ADD_DIST_LIB_COMPONENT} | ||||
|         ARCHIVE | ||||
|             DESTINATION ${PKG_INSTALL_LIBDIR} | ||||
|             COMPONENT development | ||||
|         # For unix | ||||
|         LIBRARY  | ||||
|             DESTINATION ${PKG_MODULE_LIBDIR} | ||||
|             COMPONENT ${ADD_DIST_LIB_COMPONENT} | ||||
|     ) | ||||
| endfunction(add_dist_module) | ||||
|  | ||||
| function(add_dist_executable) | ||||
|     set(PARSE_OPTS SYSTEMEXEC) | ||||
|     set(PARSE_ARGS_SINGLE TARGET EXPORTNAME) | ||||
|     set(PARSE_ARGS_MULTI SOURCES HEADERS) | ||||
|     cmake_parse_arguments(ADD_DIST_BIN "${PARSE_OPTS}" "${PARSE_ARGS_SINGLE}" "${PARSE_ARGS_MULTI}" ${ARGN}) | ||||
|     if("${ADD_DIST_BIN_TARGET}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_executable missing TARGET parameter") | ||||
|     endif() | ||||
|     if("${ADD_DIST_BIN_SOURCES}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_executable missing SOURCES parameter") | ||||
|     endif() | ||||
|  | ||||
|     if(WIN32) | ||||
|         # Give the DLL version markings | ||||
|         set(WINRES_COMPANY_NAME_STR "OpenAFC") | ||||
|         set(WINRES_PRODUCT_NAME_STR ${PROJECT_NAME}) | ||||
|         set(WINRES_PRODUCT_VERSION_RES "${PROJECT_VERSION_MAJOR},${PROJECT_VERSION_MINOR},${PROJECT_VERSION_PATCH},0") | ||||
|         set(WINRES_PRODUCT_VERSION_STR "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}.${PROJECT_VERSION_PATCH}-${SVN_LAST_REVISION}") | ||||
|         set(WINRES_INTERNAL_NAME_STR ${ADD_DIST_BIN_TARGET}) | ||||
|         set(WINRES_ORIG_FILENAME "${ADD_DIST_BIN_TARGET}${CMAKE_EXECUTABLE_SUFFIX}") | ||||
|         set(WINRES_FILE_DESCRIPTION_STR "Runtime for ${ADD_DIST_BIN_TARGET}") | ||||
|         set(WINRES_FILE_VERSION_RES ${WINRES_PRODUCT_VERSION_RES}) | ||||
|         set(WINRES_FILE_VERSION_STR ${WINRES_PRODUCT_VERSION_STR}) | ||||
|         set(WINRES_COMMENTS_STR "") | ||||
|         configure_file("${CMAKE_SOURCE_DIR}/src/libinfo.rc.in" "${CMAKE_CURRENT_BINARY_DIR}/${ADD_DIST_BIN_TARGET}-libinfo.rc" @ONLY) | ||||
|         list(APPEND ADD_DIST_BIN_SOURCES "${CMAKE_CURRENT_BINARY_DIR}/${ADD_DIST_BIN_TARGET}-libinfo.rc") | ||||
|     endif(WIN32) | ||||
|      | ||||
|     add_executable(${ADD_DIST_BIN_TARGET} ${ADD_DIST_BIN_SOURCES}) | ||||
|      | ||||
|     if(TARGET Threads::Threads) | ||||
|         target_link_libraries(${ADD_DIST_BIN_TARGET} PRIVATE Threads::Threads) | ||||
|     endif() | ||||
|  | ||||
|     if(${ADD_DIST_BIN_SYSTEMEXEC}) | ||||
|         set(ADD_DIST_BIN_DEST ${PKG_INSTALL_SBINDIR}) | ||||
|     else() | ||||
|         set(ADD_DIST_BIN_DEST ${PKG_INSTALL_BINDIR}) | ||||
|     endif() | ||||
|     install( | ||||
|         TARGETS ${ADD_DIST_BIN_TARGET} | ||||
|         EXPORT ${ADD_DIST_BIN_EXPORTNAME} | ||||
|         DESTINATION ${ADD_DIST_BIN_DEST} | ||||
|         COMPONENT runtime | ||||
|     ) | ||||
|     if(WIN32) | ||||
|         get_filename_component(SOURCE_DIRNAME ${CMAKE_CURRENT_SOURCE_DIR} NAME) | ||||
|         # PDB for symbol mapping | ||||
|         install( | ||||
|             FILES "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${ADD_DIST_BIN_TARGET}.pdb" | ||||
|             DESTINATION ${PKG_INSTALL_DEBUGDIR} | ||||
|             COMPONENT debuginfo | ||||
|         ) | ||||
|         # Sources for debugger (directory name is target name) | ||||
|         install( | ||||
|             FILES ${ADD_DIST_BIN_HEADERS} ${ADD_DIST_BIN_SOURCES} | ||||
|             DESTINATION ${PKG_INSTALL_DEBUGDIR}/${SOURCE_DIRNAME} | ||||
|             COMPONENT debuginfo | ||||
|         ) | ||||
|     endif(WIN32) | ||||
| endfunction(add_dist_executable) | ||||
|  | ||||
| # | ||||
| # Define a python library from sources. | ||||
| # The named function arguments are: | ||||
| #  TARGET: The cmake target name to create. | ||||
| #  SETUP_TEMPLATE: A file to be used as template for setup.py. | ||||
| #  COMPONENT: The cmake "install" component to install the library as. | ||||
| #  SOURCEDIR: The root directory of all sources for the target, python files or otherwise. | ||||
| # | ||||
| # When processing the setup template, a variable is created for a windows-safe | ||||
| # escaped file path to the source directory named  | ||||
| # DIST_LIB_PACKAGE_DIR_ESCAPED. | ||||
| # | ||||
| function(add_dist_pythonlibrary) | ||||
|     set(PARSE_OPTS ) | ||||
|     set(PARSE_ARGS_SINGLE TARGET SETUP_TEMPLATE SOURCEDIR COMPONENT) | ||||
|     set(PARSE_ARGS_MULTI ) | ||||
|     cmake_parse_arguments(ADD_DIST_LIB "${PARSE_OPTS}" "${PARSE_ARGS_SINGLE}" "${PARSE_ARGS_MULTI}" ${ARGN}) | ||||
|     if("${ADD_DIST_LIB_TARGET}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_pythonlibrary missing TARGET parameter") | ||||
|     endif() | ||||
|     if("${ADD_DIST_LIB_SETUP_TEMPLATE}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_pythonlibrary missing SETUP_TEMPLATE parameter") | ||||
|     endif() | ||||
|     if("${ADD_DIST_LIB_SOURCEDIR}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_pythonlibrary missing SOURCEDIR parameter") | ||||
|     endif() | ||||
|  | ||||
|     find_program(PYTHON_BIN "python") | ||||
|     if(NOT PYTHON_BIN) | ||||
|         message(FATAL_ERROR "Missing executable for 'python'") | ||||
|     endif() | ||||
|  | ||||
|     # Setuptools runs on copy of source in the build path | ||||
|     set(ADD_DIST_LIB_SOURCECOPY "${CMAKE_CURRENT_BINARY_DIR}/pkg") | ||||
|     # Need to escape the path for windows | ||||
|     if(WIN32) | ||||
|         string(REPLACE "/" "\\\\" DIST_LIB_PACKAGE_DIR_ESCAPED ${ADD_DIST_LIB_SOURCECOPY}) | ||||
|     else(WIN32) | ||||
|         set(DIST_LIB_PACKAGE_DIR_ESCAPED ${ADD_DIST_LIB_SOURCECOPY}) | ||||
|     endif(WIN32) | ||||
|  | ||||
|     # Assemble the actual setup.py input | ||||
|     configure_file(${ADD_DIST_LIB_SETUP_TEMPLATE} setup.py @ONLY) | ||||
|  | ||||
|     # Command depends on all source files, package-included or not | ||||
|     # Record an explicit sentinel file for the build | ||||
|     file(GLOB_RECURSE ALL_PACKAGE_FILES "${ADD_DIST_LIB_SOURCEDIR}/*") | ||||
|     add_custom_command( | ||||
|         DEPENDS ${ALL_PACKAGE_FILES} | ||||
|         OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/timestamp" | ||||
|         COMMAND ${CMAKE_COMMAND} -E remove_directory ${ADD_DIST_LIB_SOURCECOPY} | ||||
|         COMMAND ${CMAKE_COMMAND} -E copy_directory ${ADD_DIST_LIB_SOURCEDIR} ${ADD_DIST_LIB_SOURCECOPY} | ||||
|         COMMAND ${PYTHON_BIN} "${CMAKE_CURRENT_BINARY_DIR}/setup.py" build --quiet | ||||
|         COMMAND ${CMAKE_COMMAND} -E touch "${CMAKE_CURRENT_BINARY_DIR}/timestamp" | ||||
|     ) | ||||
|     add_custom_target(${ADD_DIST_LIB_TARGET} ALL  | ||||
|         DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/timestamp" | ||||
|     ) | ||||
|      | ||||
|     # Use DESTDIR from actual install environment | ||||
|     set(ADD_DIST_LIB_INSTALL_CMD "${PYTHON_BIN} \"${CMAKE_CURRENT_BINARY_DIR}/setup.py\" install --root=\$DESTDIR/${CMAKE_INSTALL_PREFIX} --prefix=") | ||||
|     if(PKG_INSTALL_PYTHONSITEDIR) | ||||
|         set(ADD_DIST_LIB_INSTALL_CMD "${ADD_DIST_LIB_INSTALL_CMD} --install-lib=${PKG_INSTALL_PYTHONSITEDIR}") | ||||
|     endif() | ||||
|     install( | ||||
|         CODE "execute_process(COMMAND ${ADD_DIST_LIB_INSTALL_CMD})" | ||||
|         COMPONENT ${ADD_DIST_LIB_COMPONENT} | ||||
|     ) | ||||
|  | ||||
| endfunction(add_dist_pythonlibrary) | ||||
|  | ||||
| # Use qt "lrelease" to generate a translation binary from a source file. | ||||
| # The named function arguments are: | ||||
| #  TARGET: The output QM file to create. | ||||
| #  SOURCE: The input TS file to read. | ||||
| function(add_qt_translation) | ||||
|     set(PARSE_OPTS ) | ||||
|     set(PARSE_ARGS_SINGLE TARGET SOURCE) | ||||
|     set(PARSE_ARGS_MULTI ) | ||||
|     cmake_parse_arguments(ADD_TRANSLATION "${PARSE_OPTS}" "${PARSE_ARGS_SINGLE}" "${PARSE_ARGS_MULTI}" ${ARGN}) | ||||
|     if(NOT ADD_TRANSLATION_TARGET) | ||||
|         message(FATAL_ERROR "add_qt_translation missing TARGET parameter") | ||||
|     endif() | ||||
|     if(NOT ADD_TRANSLATION_SOURCE) | ||||
|         message(FATAL_ERROR "add_qt_translation missing SOURCE parameter") | ||||
|     endif() | ||||
|  | ||||
|     find_package(Qt5LinguistTools) | ||||
|     add_custom_command( | ||||
|         OUTPUT ${ADD_TRANSLATION_TARGET} | ||||
|         DEPENDS ${ADD_TRANSLATION_SOURCE} | ||||
|         COMMAND Qt5::lrelease -qm "${ADD_TRANSLATION_TARGET}" "${ADD_TRANSLATION_SOURCE}" | ||||
|     ) | ||||
| endfunction(add_qt_translation) | ||||
|  | ||||
| # Common run-time test behavior | ||||
| set(GTEST_RUN_ARGS "--gtest_output=xml:test-detail.junit.xml") | ||||
| function(add_gtest_executable TARGET_NAME ...) | ||||
|     add_executable(${ARGV}) | ||||
|     set_target_properties(${TARGET_NAME} PROPERTIES  | ||||
|         COMPILE_FLAGS "-DGTEST_LINKED_AS_SHARED_LIBRARY=1"  | ||||
|     ) | ||||
|     target_include_directories(${TARGET_NAME} PRIVATE ${GTEST_INCLUDE_DIRS}) | ||||
|     target_link_libraries(${TARGET_NAME} PRIVATE ${GTEST_BOTH_LIBRARIES}) | ||||
|      | ||||
|     find_package(Threads QUIET) | ||||
|     if(TARGET Threads::Threads) | ||||
|         target_link_libraries(${TARGET_NAME} PRIVATE Threads::Threads) | ||||
|     endif() | ||||
|          | ||||
|     add_test( | ||||
|         NAME ${TARGET_NAME} | ||||
|         COMMAND ${TARGET_NAME} ${GTEST_RUN_ARGS} | ||||
|     ) | ||||
|     set_property( | ||||
|         TEST ${TARGET_NAME} | ||||
|         APPEND PROPERTY | ||||
|             ENVIRONMENT | ||||
|                 "TEST_SOURCE_DIR=${CMAKE_CURRENT_SOURCE_DIR}" | ||||
|                 "TEST_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR}" | ||||
|     ) | ||||
|     if(UNIX) | ||||
|         set_property( | ||||
|             TEST ${TARGET_NAME} | ||||
|             APPEND PROPERTY | ||||
|                 ENVIRONMENT | ||||
|                     "XDG_DATA_DIRS=${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_DATADIR}:/usr/share" | ||||
|         ) | ||||
|     elseif(WIN32) | ||||
|         set_property( | ||||
|             TEST ${TARGET_NAME} | ||||
|             APPEND PROPERTY | ||||
|                 ENVIRONMENT | ||||
|                     "LOCALAPPDATA=${CMAKE_INSTALL_PREFIX}\\${CMAKE_INSTALL_DATADIR}" | ||||
|         ) | ||||
|  | ||||
|         set(PATH_WIN "${CMAKE_INSTALL_PREFIX}\\bin\;${GTEST_PATH}\;$ENV{PATH}") | ||||
|         # escape for ctest string processing | ||||
|         string(REPLACE ";" "\\;" PATH_WIN "${PATH_WIN}") | ||||
|         string(REPLACE "/" "\\" PATH_WIN "${PATH_WIN}") | ||||
|         set_property( | ||||
|             TEST ${TARGET_NAME} | ||||
|             APPEND PROPERTY | ||||
|                 ENVIRONMENT | ||||
|                     "PATH=${PATH_WIN}" | ||||
|                     "QT_PLUGIN_PATH=${CMAKE_INSTALL_PREFIX}\\bin" | ||||
|         ) | ||||
|     endif() | ||||
| endfunction(add_gtest_executable) | ||||
|  | ||||
| function(add_nosetest_run TEST_NAME) | ||||
|     find_program(NOSETEST_BIN "nosetests") | ||||
|     if(NOT NOSETEST_BIN) | ||||
|         message(FATAL_ERROR "Missing executable for 'nosetests'") | ||||
|     endif() | ||||
|     set(NOSETEST_RUN_ARGS "-v" "--with-xunit" "--xunit-file=test-detail.xunit.xml") | ||||
|     add_test( | ||||
|         NAME ${TEST_NAME} | ||||
|         COMMAND ${NOSETEST_BIN} ${CMAKE_CURRENT_SOURCE_DIR} ${NOSETEST_RUN_ARGS} | ||||
|     ) | ||||
|  | ||||
|     set_property( | ||||
|         TEST ${TEST_NAME} | ||||
|         APPEND PROPERTY | ||||
|             ENVIRONMENT | ||||
|                 "TEST_SOURCE_DIR=${CMAKE_CURRENT_SOURCE_DIR}" | ||||
|                 "TEST_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR}" | ||||
|     ) | ||||
|     if(UNIX) | ||||
|         set_property( | ||||
|             TEST ${TEST_NAME} | ||||
|             APPEND PROPERTY | ||||
|                 ENVIRONMENT | ||||
|                     "XDG_DATA_DIRS=${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_DATADIR}:/usr/share" | ||||
|         ) | ||||
|     elseif(WIN32) | ||||
|         set_property( | ||||
|             TEST ${TEST_NAME} | ||||
|             APPEND PROPERTY | ||||
|                 ENVIRONMENT | ||||
|                     "LOCALAPPDATA=${CMAKE_INSTALL_PREFIX}\\${CMAKE_INSTALL_DATADIR}" | ||||
|         ) | ||||
|     endif() | ||||
| endfunction(add_nosetest_run) | ||||
|  | ||||
| #  | ||||
| # Define a web site library from scources.  | ||||
| # Yarn build should output to /www directory relative to build directory. | ||||
| # Function Arguments: | ||||
| #   TARGET: the cmake target name to create | ||||
| #   SETUP_TEMPLATES files to be used as templates for webpack.*.js | ||||
| #   COMPONENT: The cmake "install" component to install the library as. | ||||
| #   SOURCES: All of the dependencies of the target | ||||
| # | ||||
| function(add_dist_yarnlibrary) | ||||
|  | ||||
|     set(PARSE_OPTS) | ||||
|     set(PARSE_ARGS_SINGLE TARGET COMPONENT) | ||||
|     set(PARSE_ARGS_MULTI SOURCES SETUP_TEMPLATES) | ||||
|     cmake_parse_arguments(ADD_DIST_LIB "${PARSE_OPTS}" "${PARSE_ARGS_SINGLE}" "${PARSE_ARGS_MULTI}" ${ARGN}) | ||||
|     if("${ADD_DIST_LIB_TARGET}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_yarnlibrary missing TARGET parameter") | ||||
|     endif() | ||||
|  | ||||
|     if("${ADD_DIST_LIB_SETUP_TEMPLATES}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_yarnlibrary missing SETUP_TEMPLATES parameter") | ||||
|     endif() | ||||
|  | ||||
|     if("${ADD_DIST_LIB_SOURCES}" STREQUAL "") | ||||
|         message(FATAL_ERROR "add_dist_yarnlibrary missing SOURCES parameter") | ||||
|     endif() | ||||
|  | ||||
|     # TODO: only build working is build-dev | ||||
|     #if ("${CMAKE_BUILD_TYPE}" STREQUAL "Debug") | ||||
|         set(YARN_BUILD_TYPE "build-dev") | ||||
|         message(STATUS "will build yarn in DEV mode.") | ||||
|     #else() | ||||
|     #    set(YARN_BUILD_TYPE "build") | ||||
|     #endif() | ||||
|  | ||||
|     find_program(YARN "yarn") | ||||
|      | ||||
|     # Setuptools runs on copy of source in the build path | ||||
|     set(ADD_DIST_LIB_SOURCECOPY "${CMAKE_CURRENT_BINARY_DIR}/pkg") | ||||
|  | ||||
|     foreach(SETUP_TEMPLATE ${ADD_DIST_LIB_SETUP_TEMPLATES}) | ||||
| 	    message(STATUS "${ADD_DIST_LIB_SOURCECOPY}/${SETUP_TEMPLATE}") | ||||
|         string(REPLACE ".in" ".js" CONFIG_NAME ${SETUP_TEMPLATE})  | ||||
|         configure_file("${CMAKE_CURRENT_SOURCE_DIR}/${SETUP_TEMPLATE}" "${ADD_DIST_LIB_SOURCECOPY}/${CONFIG_NAME}" @ONLY) | ||||
|     endforeach(SETUP_TEMPLATE) | ||||
|      | ||||
|  | ||||
|     # Record an explicit sentinel file for the build | ||||
|     add_custom_command( | ||||
|         DEPENDS ${SOURCES} | ||||
|         OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/timestamp" | ||||
|         message(STATUS "Building YARN") | ||||
|         COMMAND ${CMAKE_COMMAND} -E copy_if_different "${CMAKE_CURRENT_SOURCE_DIR}/*" "${ADD_DIST_LIB_SOURCECOPY}" | ||||
|         COMMAND ${CMAKE_COMMAND} -E copy_directory "${CMAKE_CURRENT_SOURCE_DIR}/src" "${ADD_DIST_LIB_SOURCECOPY}/src" | ||||
|         COMMAND ${YARN} --cwd ${ADD_DIST_LIB_SOURCECOPY} | ||||
|         COMMAND ${YARN} --cwd ${ADD_DIST_LIB_SOURCECOPY} version --no-git-tag-version --new-version "${PROJECT_VERSION}-${SVN_LAST_REVISION}" | ||||
|         COMMAND ${YARN} --cwd ${ADD_DIST_LIB_SOURCECOPY} ${YARN_BUILD_TYPE} | ||||
|         COMMAND ${CMAKE_COMMAND} -E touch "${CMAKE_CURRENT_BINARY_DIR}/timestamp" | ||||
|     ) | ||||
|  | ||||
|     add_custom_target(${ADD_DIST_LIB_TARGET} ALL  | ||||
|         # COMMAND ${CMAKE_COMMAND} -E r ${CMAKE_CURRENT_BINARY_DIR}/timestamp | ||||
|         DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/timestamp" | ||||
|     ) | ||||
|  | ||||
|     install( | ||||
|         DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/www" | ||||
|         DESTINATION "${PKG_INSTALL_DATADIR}" | ||||
|         COMPONENT ${ADD_DIST_LIB_COMPONENT} | ||||
|     ) | ||||
|  | ||||
| endfunction(add_dist_yarnlibrary) | ||||
							
								
								
									
										17
									
								
								config/ratapi.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								config/ratapi.conf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,17 @@ | ||||
| # Flask settings | ||||
| PROPAGATE_EXCEPTIONS = False | ||||
| SECRET_KEY = '9XXc5Lw+DZwXINyOmKcY5c41AMhLabqn4jFLXJntsVutrZCauB5W/AOv7tDbp63ge2SS2Ujz/OnfeQboJOrbsQ' | ||||
|  | ||||
| # Flask-SQLAlchemy settings | ||||
| SQLALCHEMY_DATABASE_URI = 'postgresql://postgres:N3SF0LVKJx1RAhFGx4fcw@ratdb/fbrat' | ||||
| SQLALCHEMY_POOL_SIZE = 30 | ||||
|  | ||||
| # Flask-User settings | ||||
| USER_EMAIL_SENDER_EMAIL = 'fbrat@a9556f3227ba.ihl.broadcom.net' | ||||
| SESSION_COOKIE_SECURE = True | ||||
| SESSION_COOKIE_SAMESITE = 'Lax' | ||||
|  | ||||
| # RAT settings | ||||
| GOOGLE_APIKEY = 'AIzaSyAjcMamfS5LhIRzQ6Qapi0uKX151himkmQ' | ||||
| HISTORY_DIR = '/mnt/nfs/rat_transfer/history' | ||||
| DEFAULT_ULS_DIR = '/mnt/nfs/rat_transfer/ULS_Database' | ||||
							
								
								
									
										400
									
								
								database_readme.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										400
									
								
								database_readme.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,400 @@ | ||||
| # Database Readme | ||||
|  | ||||
| ## **Database Description** | ||||
|  | ||||
| ### **Details of Databases** | ||||
| #### **FS_Database:** | ||||
| contains parameters defining the FS links for interference analysis. | ||||
| For the ULS databaes (for the US) These are: | ||||
| * FS Tx/Rx CallSign | ||||
| * FS Tx/Rx Lat/Long and Height above ground level, | ||||
| * FS Diversity Height above ground level, | ||||
| * FS Primary and Diversity Rx Gain, Antenna Model, Antenna Diameter, | ||||
| * FS Start/End Frequencies and Bandwidth, | ||||
| * FS passive repeater Lat/Long, Height above ground level, dimensions, antenna model, antenna diameter and gain | ||||
| * FS Rx near-field adjustment factor parameters | ||||
|  | ||||
| contains parameters defining exclusion zone(s) around each RAS antenna that needs to be protected.  | ||||
|  | ||||
| contains parameters defining FS Rx actual antenna pattern (angle-off-boresight vs. discrimination gain). | ||||
|  | ||||
| #### **proc_lidar_2019:** | ||||
| contains json files that allow showing boundaries of RAS exclusion zones and LiDAR in GUI. | ||||
| * RAS_ExclusionZone.json | ||||
| * LiDAR_Bounds.json.gz | ||||
|  | ||||
| This also contains all lidar tiles where each city has a subdirectory with tiles with a .csv that isn’t under the city subdirectory. The lidar zip file contains all of this. | ||||
|  | ||||
| #### **Multiband-BDesign3D:**  | ||||
| contains building database over Manhattan. | ||||
| #### **globe:**  | ||||
| contains NOAA GLOBE (1km resolution) terrain database. | ||||
| #### **srtm3arcsecondv003:**  | ||||
| contains 3arcsec (=90m) SRTM terrain database files. These are used in the regions where 1arcsec 3DEP is used in the event a 3DEP tile is missing. | ||||
| #### **srtm1arcsecond:**  | ||||
| contains 1arcsec (=30m) SRTM terrain database files. This is used in regions where 1arcsec 3DEP is not available. | ||||
| #### **3dep:**  | ||||
| The 1_arcsec subdirectory (one currently used) contains 1arcsec (=30m) 3DEP terrain database files over US, Canada and Mexico. | ||||
| #### **cdsm:**  | ||||
| contains the Natural Resources Canada Canadian Digital Surface Model (CDSM), 2000 at the highest available resolution. | ||||
|  | ||||
| #### **nlcd:**  | ||||
| contains nlcd_2019_land_cover_I48_20210604_resample.zip (referred to as "Production NLCD" in AFC Config UI) and federated_nlcd.zip (referred to as "WFA Test NLCD" in AFC Config UI) files. This is used to determine RLAN/FS morphology (i.e. Urban, Suburban or Rural) to pick the appropriate path/clutter loss model. In addition, it is used to determine the appropriate P.452 Rural clutter category. | ||||
| #### **landcover-2020-classification:**  | ||||
| The 2020 Land Cover of Canada produced by Natural Resources Canada. | ||||
| #### **clc:**  | ||||
| Corine Land Cover is land categorization over the EU used to determine RLAN/FS morphology.  | ||||
| #### **population:**  | ||||
| contains the Gridded Population of the World (GPW), v4.11, population density database. Use of GPW for determination of RLAN morphology is only used in the absence of a land cover database. | ||||
|  | ||||
| #### **US.kml:**  | ||||
| specifies United States' country boundary where AP access is allowed for that region.  | ||||
| #### **CA.kml:**  | ||||
| specifies Canada's country boundary where AP access is allowed for that region.  | ||||
| #### **GB.kml:**  | ||||
| specifies the Great Britain country boundary where AP access is allowed for that region.  | ||||
| #### **BRA.kml:**  | ||||
| specifies Brazil's country boundary where AP access is allowed for that region.   | ||||
|  | ||||
| #### **itudata:**  | ||||
| contains two ITU maps that are used by the ITM path loss model. 1) Radio Climate map (TropoClim.txt) and 2) Surface Refractivity map (N050.txt) | ||||
|  | ||||
| #### **winnforum databases:**  | ||||
| these are WinnForum databases used by the FS Parser (antenna_model_diameter_gain.csv, billboard_reflector.csv, category_b1_antennas.csv, high_performance_antennas.csv, fcc_fixed_service_channelization.csv, transmit_radio_unit_architecture.csv). They provide the data to validate/fix/fill-in the corresponding ULS parameters. Two other WinnForum databases (nfa_table_data.csv and rat_transfer/pr/WINNF-TS-1014-V1.2.0-App02.csv) are used by the AFC Engine for near-field adjustment factor calculation for primary/diversity receivers and passive sites respectively. Note that the nfa_table_data.csv is generated manually as a simplied version of WINNF-TS-1014-V1.2.0-App01.csv. The use of these databases is described in WINNF-TS-1014 and WINNF-TS-5008 documents. | ||||
|  | ||||
| ### **Location or procedure to download/acquire these databases** | ||||
| * **FS_Database:** Created using FS Script Parser from ULS raw data on FCC website (see details in the ULS Script documentation), RAS database from FCC 47CFR Part 15.407, and ISED's 6GHz DataExtract database on https://ised-isde.canada.ca/site/spectrum-management-system/en/spectrum-management-system-data | ||||
|  | ||||
| * **proc_lidar_2019:** raw data obtained from https://rockyweb.usgs.gov/vdelivery/Datasets/Staged/Elevation/Non_Standard_Contributed/NGA_US_Cities/ | ||||
| * **Multiband-BDesign3D:** this was purchased https://www.b-design3d.com/ | ||||
|  | ||||
| * **globe:** https://ngdc.noaa.gov/mgg/topo/globe.html | ||||
| * **srtm3arcsecondv003:** https://www2.jpl.nasa.gov/srtm/ | ||||
| * **srtm1arcsecond:** https://search.earthdata.nasa.gov/search/granules?p=C1000000240-LPDAAC_ECS&pg[0][v]=f&pg[0][gsk]=-start_date&q=srtm&tl=1702926101.019!3!! | ||||
| * **3dep:** https://data.globalchange.gov/dataset/usgs-national-elevation-dataset-ned-1-arc-second | ||||
| * **cdsm:"** https://open.canada.ca/data/en/dataset/768570f8-5761-498a-bd6a-315eb6cc023d | ||||
|  | ||||
| * **nlcd:** original file nlcd_2019_land_cover_I48_20210604 was downloaded from [link](https://www.mrlc.gov/data?f%5B0%5D=category%3Aland%20cover) (download NLCD 2019 Land Cover (CONUS)). Using gdal utilties this file was translated to nlcd_2019_land_cover_I48_20210604_resample.zip so that the 1-arcsec tiles matchup with 1-arcsec 3DEP tiles. The federated_nlcd.zip file was obtained by using other gdal utilities to convert federated's many files to one file covering CONUS. | ||||
| * **landcover-2020-classification:** original file was downloaded from [link](https://open.canada.ca/data/en/dataset/ee1580ab-a23d-4f86-a09b-79763677eb47). Using gdal utilies this file was translated to landcover-2020-classification_resampled.tif so that the 1-arcsec tiles matchup with 1-arcsec 3DEP tiles and the canada landcover classifications are mapped to the equivalent NLCD codes. | ||||
| * **clc:** original file was downloaded from the [Copernicus](https://land.copernicus.eu/pan-european/corine-land-cover/clc2018) website (download the GeoTIFF data). Login is required. Using gdal utilies this file was translated to landcover-2020-classification_resampled.tif so that the 1-arcsec tiles matchup with 1-arcsec 3DEP tiles and the canada landcover classifications are mapped to the equivalent NLCD codes. | ||||
| * **population:** https://sedac.ciesin.columbia.edu/data/set/gpw-v4-population-density-rev11 | ||||
|  | ||||
| * **US.kml:** https://public.opendatasoft.com/explore/dataset/world-administrative-boundaries/export/?flg=en-us | ||||
| * **CA.kml:** https://www12.statcan.gc.ca/census-recensement/2021/geo/sip-pis/boundary-limites/index2021-eng.cfm?year=21 (Catrographic Boundary files, selecting 'Provinces/territories' of Administrative boundaries) | ||||
| * **GB.kml:** https://public.opendatasoft.com/explore/dataset/world-administrative-boundaries/export/?flg=en-us | ||||
| * **BRA.kml:** https://public.opendatasoft.com/explore/dataset/world-administrative-boundaries/export/?flg=en-us | ||||
|   | ||||
| * **itudata:** Radio Climate map from ITU-R Rec, P.617-3 (https://www.itu.int/rec/R-REC-P.617-3-201309-S/en) and Surface Refractivity map from ITU-R Rec, P.452-17 (https://www.itu.int/rec/R-REC-P.452-17-202109-I/en) | ||||
|  | ||||
| * **winnforum databases:** The Winnforum databases used by FS parser can be downloaded from here: Use https://github.com/Wireless-Innovation-Forum/6-GHz-AFC/tree/main/data/common_data to open in browser.  The scripts use: https://raw.githubusercontent.com/Wireless-Innovation-Forum/6-GHz-AFC/main/data/common_data/ for downloading. The near-field adjustment factor databases can be downloaded from: https://6ghz.wirelessinnovation.org/baseline-standards.  | ||||
|  | ||||
| ### **Licenses and Source Citations** | ||||
|  | ||||
| #### **proc_lidar_2019** | ||||
| Available for public use with no restrictions | ||||
|  | ||||
| Disclaimer and quality information is at https://rockyweb.usgs.gov/vdelivery/Datasets/Staged/Elevation/Non_Standard_Contributed/NGA_US_Cities/00_NGA%20133%20US%20Cities%20Data%20Disclaimer%20and%20Explanation%20Readme.pdf  | ||||
|  | ||||
| #### **Globe** | ||||
| Public domain | ||||
|  | ||||
| NOAA National Geophysical Data Center. 1999: Global Land One-kilometer Base Elevation (GLOBE) v.1. NOAA National Centers for Environmental Information. https://doi.org/10.7289/V52R3PMS. Accessed TBD | ||||
|  | ||||
| #### **3DEP** | ||||
| Public domain | ||||
|  | ||||
| Data available from U.S. Geological Survey, National Geospatial Program. | ||||
|  | ||||
| #### **srtm1arcsecond** | ||||
| Public domain | ||||
|  | ||||
| NASA JPL (2013). NASA Shuttle Radar Topography Mission Global 1 arc second [Data set]. NASA EOSDIS Land Processes Distributed Active Archive Center.  | ||||
|  | ||||
| #### **CDSM** | ||||
| Natural Resource of Canada. (2015). Canada Digital Surface Model [Data set]. https://open.canada.ca/data/en/dataset/768570f8-5761-498a-bd6a-315eb6cc023d. Contains information licensed under the Open Government Licence – Canada. | ||||
|  | ||||
| #### **NLCD** | ||||
| Public domain | ||||
|  | ||||
| References: | ||||
|  | ||||
| Dewitz, J., and U.S. Geological Survey, 2021, National Land Cover Database (NLCD) 2019 Products (ver. 2.0, June 2021): U.S. Geological Survey data release, https://doi.org/10.5066/P9KZCM54 | ||||
|  | ||||
| Wickham, J., Stehman, S.V., Sorenson, D.G., Gass, L., and Dewitz, J.A., 2021, Thematic accuracy assessment of the NLCD 2016 land cover for the conterminous United States: Remote Sensing of Environment, v. 257, art. no. 112357, at https://doi.org/10.1016/j.rse.2021.112357  | ||||
|  | ||||
| Homer, Collin G., Dewitz, Jon A., Jin, Suming, Xian, George, Costello, C., Danielson, Patrick, Gass, L., Funk, M., Wickham, J., Stehman, S., Auch, Roger F., Riitters, K. H., Conterminous United States land cover change patterns 2001–2016 from the 2016 National Land Cover Database: ISPRS Journal of Photogrammetry and Remote Sensing, v. 162, p. 184–199, at https://doi.org/10.1016/j.isprsjprs.2020.02.019 | ||||
|  | ||||
| Jin, Suming, Homer, Collin, Yang, Limin, Danielson, Patrick, Dewitz, Jon, Li, Congcong, Zhu, Z., Xian, George, Howard, Danny, Overall methodology design for the United States National Land Cover Database 2016 products: Remote Sensing, v. 11, no. 24, at https://doi.org/10.3390/rs11242971 | ||||
|  | ||||
| Yang, L., Jin, S., Danielson, P., Homer, C., Gass, L., Case, A., Costello, C., Dewitz, J., Fry, J., Funk, M., Grannemann, B., Rigge, M. and G. Xian. 2018. A New Generation of the United States National Land Cover Database: Requirements, Research Priorities, Design, and Implementation Strategies, ISPRS Journal of Photogrammetry and Remote Sensing, 146, pp.108-123. | ||||
|  | ||||
| #### **2020 Canada Land Cover** | ||||
| Natural Resource of Canada. (2022). 2020 Land Cover of Canada [Data set]. https://open.canada.ca/data/en/dataset/ee1580ab-a23d-4f86-a09b-79763677eb47. Contains information licensed under the Open Government Licence – Canada. | ||||
|  | ||||
| #### **Corine Land Cover** | ||||
|  | ||||
| Access to data is based on a principle of full, open and free access as established by the Copernicus data and information policy Regulation (EU) No 1159/2013 of 12 July 2013. This regulation establishes registration and licensing conditions for GMES/Copernicus users and can be found [here](http://eur-lex.europa.eu/legal-content/EN/TXT/?uri=CELEX%3A32013R1159) | ||||
|  | ||||
| Free, full and open access to this data set is made on the conditions that: | ||||
|  | ||||
| 1. When distributing or communicating Copernicus dedicated data and Copernicus service information to the public, users shall inform the public of the source of that data and information. | ||||
| 2.  Users shall make sure not to convey the impression to the public that the user's activities are officially endorsed by the Union. | ||||
| 3.  Where that data or information has been adapted or modified, the user shall clearly state this. | ||||
| 4.  The data remain the sole property of the European Union. Any information and data produced in the framework of the action shall be the sole property of the European Union. Any communication and publication by the beneficiary shall acknowledge that the data were produced “with funding by the European Union”. | ||||
|  | ||||
| Reference: | ||||
|  | ||||
| ©European Union, Copernicus Land Monitoring Service 2018, European Environment Agency (EEA) | ||||
|  | ||||
| #### **population** | ||||
| Creative Commons Attribution 4.0 International (CC BY) License (https://creativecommons.org/licenses/by/4.0) | ||||
|  | ||||
| Center for International Earth Science Information Network - CIESIN - Columbia University. 2018. Gridded Population of the World, Version 4 (GPWv4): Population Density, Revision 11. Palisades, New York: NASA Socioeconomic Data and Applications Center (SEDAC). https://doi.org/10.7927/H49C6VHW  | ||||
|  | ||||
| #### **Canada country boundary** | ||||
| Statistics Canada (2022). Boundary Files, Census Year 2021 [Data set]. https://www12.statcan.gc.ca/census-recensement/2021/geo/sip-pis/boundary-limites/index2021-eng.cfm?year=21. Reproduced and distributed on an "as is" basis with the permission of Statistics Canada. | ||||
|  | ||||
|  | ||||
| #### **winnforum databases** | ||||
| Available for public use under the copyright of The Software Defined Radio Forum, Inc. doing business as the Wireless Innovation Forum. | ||||
|  | ||||
| THIS DOCUMENT (OR WORK PRODUCT) IS BEING OFFERED WITHOUT ANY WARRANTY WHATSOEVER, AND IN PARTICULAR, ANY WARRANTY OF NON-INFRINGEMENT IS EXPRESSLY DISCLAIMED.  ANY USE OF THIS SPECIFICATION (OR WORK PRODUCT) SHALL BE MADE ENTIRELY AT THE IMPLEMENTER'S OWN RISK, AND NEITHER THE FORUM, NOR ANY OF ITS MEMBERS OR SUBMITTERS, SHALL HAVE ANY LIABILITY WHATSOEVER TO ANY IMPLEMENTER OR THIRD PARTY FOR ANY DAMAGES OF ANY NATURE WHATSOEVER, DIRECTLY OR INDIRECTLY, ARISING FROM THE USE OF THIS DOCUMENT (OR WORK PRODUCT). | ||||
|  | ||||
| ## **Database Update** | ||||
|  | ||||
| ### **Expected update frequency of each database file** | ||||
| * **FS_Database:** daily (per FCC and ISED requirements). Note that the RAS database portion is expected to be updated rarely. | ||||
| * **proc_lidar_2019:** every few years (whenever a new database is available) | ||||
| * **Multiband-BDesign3D:** no change (unless a newer building database for Manhattan is needed) | ||||
| * **globe:** every few years (whenever a new database is available) | ||||
| * **srtm3arcsecondv003:** every few years (whenever a new database is available) | ||||
| * **srtm1arcsecond:** every few years (whenever a new database is available) | ||||
| * **3dep:** every few years (whenever a new database is available) | ||||
| * **cdsm:** every few years (whenever a new database is available) | ||||
| * **nlcd:** every few years (whenever a new database is available) | ||||
| * **2020canadalandcover:** every few years (whenever a new database is available) | ||||
| * **clc:** every few years (whenever a new database is available) | ||||
| * **population:** every few years (whenever a new database is available) | ||||
| * **US.kml:** every few years (whenever an updated country boundary is available) | ||||
| * **CA.kml:** every few years (whenever an updated country boundary is available) | ||||
| * **GB.kml:** every few years (whenever an updated country boundary is available) | ||||
| * **BRA.kml:** every few years (whenever an updated country boundary is available) | ||||
| * **itudata:** these haven't been updated for a long time but can be updated if new maps are generated. | ||||
| * **winnforum databases:** might be as early as every 6 months (at the discretion of WinnForum) | ||||
|  | ||||
| ### **Database update procedure** | ||||
| * **FS_Database:** FS Script Parser automatically updates this daily (see next section) | ||||
|  | ||||
| * **proc_lidar_2019:** download lidar database and post-process | ||||
|  | ||||
| * **globe:** download database. Convert to WGS84 using open-afc/tools/geo_converters/to_wgs84.py script. | ||||
| * **srtm3arcsecondv003:** download database. Convert to WGS84 using open-afc/tools/geo_converters/to_wgs84.py script. | ||||
| * **srtm1arcsecond:** download database. Convert to WGS84 using open-afc/tools/geo_converters/to_wgs84.py script. | ||||
| * **3dep:** download database. Convert to WGS84 using open-afc/tools/geo_converters/to_wgs84.py script. | ||||
| * **cdsm:** download database. Follow the procedures on open-afc/tools/geo_converters/Canada CDSM surface model | ||||
| * **nlcd:** download database, run proper gdal utilties to orient the tiles matching 3DEP 1-arcsec tiles and put in the proper directory | ||||
| * **clc:** download database, run proper gdal scripts (open-afc/tools/geo_converters) to convert the data categorization mapping and coordinate system and put in the proper directory | ||||
| * **2020canadalandocover:** download database, run proper gdal scripts (open-afc/tools/geo_converters) to convert the data categorization mapping and coordinate system and put in the proper directory | ||||
| * **population:** download database and put in the proper directory | ||||
|  | ||||
|  | ||||
| ## **Database Processing/Format** | ||||
| ### **Processing done (in any) on the original database to convert it to a format usable by afc-engine** | ||||
| * **FS_Database:** generated by the FS Script Parser. | ||||
| * **LiDAR_Database:** generated from significant post-processing: | ||||
|     For each city: | ||||
|    * (1) Identify bare earth and building files available. | ||||
|    * (2) Identify pairs of files where a pair consists of a bare earth and building polygon file that cover the same region. | ||||
|    * (3) Convert bare earth into tif raster file, and convert building polygon file into tif raster file where both files are on same lon/lat grid. | ||||
|    * (4) Combine bare earth and building tif files into a single tif file with bare earth on Band 1 and building height on band 2. Both Band 1 and Band 2 values are AMSL. | ||||
|    * (5) Under target directory create directory for each city, under each city create dir structure containing combined tif files. | ||||
|    * (6) For each city create info file listing tif files and min/max lon/lat for each file. | ||||
| * **srtm3arcsecondv003:** the two SRTM tiles over Manhattan are removed since they erroneously contain building height | ||||
| * **country.KML:** there is some post-processing done that is not documented here as we are moving to using a different processing. | ||||
|  | ||||
| * **Near Field Adjustment Factor File:** "nfa_table_data.csv" is created as follows. | ||||
|  | ||||
| 1. Download "WINNF-TS-1014-V1.2.0-App01 6GHz Functional Requirements - Appendix A.xlsx" from [https://6ghz.wirelessinnovation.org/baseline-standards](https://6ghz.wirelessinnovation.org/baseline-standards) | ||||
|  | ||||
|  | ||||
| 2. Note that this .xlsx file has 18 tabs labeled "xdB = 0 dB", "xdB = -1 dB", "xdB = -2 dB", ..., "xdB = -17 dB".  Also note that in each of these 18 tabs, there is data for efficiency values ($\eta$) ranging from 0.4 to 0.7 in steps of 0.05.  Further note the following:  | ||||
|     - For each xdB and efficiency, there is a two column dataset with columns labeled u and dB  | ||||
|     - The dB value is the adjustment factor. | ||||
|     - For each of these 2 column datasets, the last value of adjustment factor is 0 | ||||
|     - For each of these 2 column datasets, u begins at 0 and increases monotonically to a max value for which adjustment value is 0.  | ||||
|     - For each xdB value, the max value of u for efficiency = 0.4 is >= the max value of u for any of the other efficiency values shown. | ||||
|  | ||||
| 3. Interpolate the data.  For each 2 column dataset, use linear interpolation to compute the adjustment factor for 0.05 increments in u. | ||||
|  | ||||
| 4. Pad the data.  For each 2 column dataset, append values of u in 0.05 increments up to the max u value for efficiency = 0.4.  For these appended values append 0 for the adjustment factor value. | ||||
|  | ||||
| 5. Assemble all this data into a single 3-column CSV file named as nfa_table_data.csv.  The file header is "xdB,u,efficiency,NFA".  Subsequent data lines list values for xdB, u, efficiency, and NFA for each of the interpolated/padded 2-column datasets. | ||||
|  | ||||
| * **Near Field Adjustment Factor for Passive Repeaters File:** "WINNF-TS-1014-V1.2.0-App02.csv" is created as follows. | ||||
|  | ||||
| 1. Download "WINNF-TS-1014-V1.2.0-App02 6GHz Functional Requirements - Appendix B.xlsx" from [https://6ghz.wirelessinnovation.org/baseline-standards](https://6ghz.wirelessinnovation.org/baseline-standards) | ||||
|  | ||||
| 2. In the "Data" tab of this .xlsx file, note that this tab contains tabular gain data where each column corresponds to a different Q and each row corresponds to a different value of 1/KS. | ||||
|  | ||||
| 3. The algorithm implemented in afc-engine only uses this table for values of KS > 0.4.  This means 1/KS <= 2.5.  The values of 1/KS in the .xlsx file go up to 7.5.  For the purpose of interpolation, keep the first row in the file with 1/KS > 2.5 (2.512241), and delete all rows after this row with larger values of 1/KS. | ||||
|  | ||||
| 4. Count then number of Q values in the table (NQ). | ||||
|  | ||||
| 5. Count the number of 1/KS values in the file (NK). | ||||
|  | ||||
| 6. Replace the upper left cell in the table, that contains "1/KS" with "NQ:NK" where NQ and NK are the count values from steps 3 and 4 above. | ||||
|  | ||||
| 7. Save the Data tab in .csv format. Save the file named as WINNF-TS-1014-V1.2.0-App02.csv. | ||||
|  | ||||
| ### **Scripts to be used and procedure to invoke these scripts** | ||||
| ##### FS_Database: | ||||
| FS Script Parser. The AFC Administrator can run the parser manually or set the time for the daily update. The parser fetches the raw daily and weekly ULS data from the FCC website. | ||||
|  | ||||
| ##### NLCD creation: | ||||
| ###### Step 1. | ||||
| Ensure that gdal utilities are installed on your machine(currently gdal ver 3.3.3 used): | ||||
| ``` | ||||
| dnf install gdal | ||||
| ``` | ||||
|  | ||||
| ###### Step 2. | ||||
| Get extents of the original file by executing command below: | ||||
| ``` | ||||
| gdalinfo -norat -nomd -noct   nlcd_2019_land_cover_l48_20210604.img | ||||
| ``` | ||||
| ###### Corner Coordinates: | ||||
| ``` | ||||
| Upper Left  (-2493045.000, 3310005.000) (130d13'58.18"W, 48d42'26.63"N) | ||||
| Lower Left  (-2493045.000,  177285.000) (119d47' 9.98"W, 21d44'32.31"N) | ||||
| Upper Right ( 2342655.000, 3310005.000) ( 63d40'19.89"W, 49d10'37.43"N) | ||||
| Lower Right ( 2342655.000,  177285.000) ( 73d35'40.55"W, 22d 4'36.23"N) | ||||
| Center      (  -75195.000, 1743645.000) ( 96d52'22.83"W, 38d43' 4.71"N) | ||||
| ``` | ||||
| ###### Step 3. | ||||
| Define the minimum/maximum Latitude and Longitude coordinates that contain the entire region covered by the file. In order to line up with 3DEP database, we want to make sure that each of these values are integer multiple of 1-arcsec. From the above extents, we see that the min longitude is 130d13'58.18"W. This can be rounded down to integer multiple of 1-arcsec as -(130 + 15/60). Similarly, maximum values are rounded up to integer multiple of 1-arcsec. Finally, the resolution is defined as 1-arcsec which equals 1/3600 degrees. Below commands can be typed directly into a bash shell. | ||||
| ``` | ||||
| minLon=`bc <<< 'scale = 6; -(130 + 15/60)'` | ||||
| maxLon=`bc <<< 'scale = 6; -(63 + 37.5/60)'` | ||||
| minLat=`bc <<< 'scale = 6; (21 + 41.25/60)'` | ||||
| maxLat=`bc <<< 'scale = 6; (49 + 11.25/60)'` | ||||
| lonlatRes=`bc <<< 'scale = 20; (1/3600)'` | ||||
|  | ||||
| echo minLon = $minLon | ||||
| echo maxLon = $maxLon | ||||
| echo minLat = $minLat | ||||
| echo maxLat = $maxLat | ||||
| ``` | ||||
| ###### Step 4. | ||||
| Define the input and output files for the conversion using commands below: | ||||
| fin=nlcd_2019_land_cover_l48_20210604.img | ||||
| fout=nlcd_2019_land_cover_l48_20210604_resample.tif | ||||
|  | ||||
| ###### Step 5. | ||||
| Use gdal utility gdalwarp to convert the file to desired output | ||||
| ``` | ||||
| gdalwarp -t_srs '+proj=longlat +datum=WGS84' -tr $lonlatRes $lonlatRes -te $minLon $minLat $maxLon $maxLat $fin $fout | ||||
| ``` | ||||
|  | ||||
| ###### Step 6: | ||||
| Combine 900+ federated .int files into a single gdal .vrt file. | ||||
| ``` | ||||
| gdalbuildvrt federated_nlcd.vrt output/*.int | ||||
| ``` | ||||
|  | ||||
| ###### Step 7: | ||||
| Define the input and output files for the Federated file conversion | ||||
| ``` | ||||
| fin=federated_nlcd.vrt | ||||
| fout=federated_nlcd.tif | ||||
| ``` | ||||
|  | ||||
| ###### Step 8: | ||||
| Run gdal utility gdalwarp to convert the federated file using the exact same file extents as for the nlcd_2019_land_cover_l48_20210604_resample.tif file: | ||||
| ``` | ||||
| gdalwarp -te $minLon $minLat $maxLon $maxLat $fin $fout | ||||
| ``` | ||||
|  | ||||
|  | ||||
| ## **Database Usage ** | ||||
|  | ||||
| ### **Expected location of post-processed database on the AFC server** | ||||
| There are three category of databases: Dynamic, Static and ULS. | ||||
| 1. **Dynamic:** | ||||
| * These are assets that are subject to change more frequenty than Static, either by the user interacting with the GUI (AFC Config) uploading files (AntennaPatterns), or another asset that may change in the future | ||||
| * These are stored via the Object Storage component by default | ||||
|  | ||||
| 2. **Static:** | ||||
| * These are the assets that are not expected to change for at least a year (and some for many years) | ||||
| * Appear in the containers under /mnt/nfs/rat_transfer | ||||
| * Examples are: Terrain (3DEP, SRTM, Globe), Building (LiDAR, Multiband-BDesign3D), NLCD, Population Density | ||||
| * Below are the database directories under /mnt/nfs/rat_transfer | ||||
|   * **ULS_Database:** Fallback (static) ULS_Database in case an active ULS_Database under fbrat is missing | ||||
|   * **srtm3arcsecondv003** | ||||
|   * **RAS_Database** | ||||
|   * **proc_lidar_2019** | ||||
|   * **population** | ||||
|   * **Multiband-BDesign3D** | ||||
|   * **globe** | ||||
|   * **3dep** | ||||
|   * **nlcd** | ||||
|   * **itudata** | ||||
|  | ||||
| 3. **ULS (note: WIP):** | ||||
| * These are the supporting files for the ULS Script Parser that download, process, and create the new ULS files | ||||
| * Live under /mnt/nfs/rat_transfer/daily_uls_parse/data_files.   | ||||
|   * **WIP:** Functionality built into API | ||||
|   * Data for yesterdaysDB (used to retain FSID from day to day) and highest known FSID (to avoid collision, FSIDs are not reused currently) are stored here. | ||||
|  | ||||
| ## Mappings for use in OpenAFC | ||||
| OpenAFC containers needs several mappings to work properly.  Assuming that you are using /var/databases on your host to store the databases, you can select either option 1 here (which is assumed in the docker compose shown in the main README) or set mappings individually as shown in 2-6.    | ||||
|  | ||||
| 1) All databases in one folder - map to /mnt/nfs/rat_transfer | ||||
|       ``` | ||||
|       /var/databases:/mnt/nfs/rat_transfer | ||||
|       ``` | ||||
|       Those databases are: | ||||
|       - 3dep | ||||
|       - daily_uls_parse | ||||
|       - databases | ||||
|       - globe | ||||
|       - itudata | ||||
|       - nlcd | ||||
|       - population | ||||
|       - proc_gdal | ||||
|       - proc_lidar_2019 | ||||
|       - RAS_Database | ||||
|       - srtm3arcsecondv003 | ||||
|       - ULS_Database | ||||
|       - nfa | ||||
|       - pr | ||||
|  | ||||
|  | ||||
| 2) LiDAR Databases to /mnt/nfs/rat_transfer/proc_lidar_2019 | ||||
|       ``` | ||||
|       /var/databases/proc_lidar_2019:/mnt/nfs/rat_transfer/proc_lidar_2019 | ||||
|       ``` | ||||
| 3) RAS database to /mnt/nfs/rat_transfer/RAS_Database | ||||
|       ``` | ||||
|       /var/databases/RAS_Database:/mnt/nfs/rat_transfer/RAS_Database | ||||
|       ``` | ||||
| 4) Actual ULS Databases to /mnt/nfs/rat_transfer/ULS_Database | ||||
|       ``` | ||||
|       /var/databases/ULS_Database:/mnt/nfs/rat_transfer/ULS_Database | ||||
|       ``` | ||||
| 5) Folder with daily ULS Parse data /mnt/nfs/rat_transfer/daily_uls_parse | ||||
|       ``` | ||||
|       /var/databases/daily_uls_parse:/mnt/nfs/rat_transfer/daily_uls_parse | ||||
|       ``` | ||||
| 6) Folder with AFC Config data /mnt/nfs/afc_config (now can be moved to Object Storage by default) | ||||
|       ``` | ||||
|       /var/afc_config:/mnt/nfs/afc_config | ||||
|       ``` | ||||
| **NB: All or almost all files and folders should be owned by user and group 1003 (currently - fbrat)** | ||||
|  | ||||
| This can be applied via following command (mind the real location of these folders on your host system): | ||||
|  | ||||
| ``` | ||||
| chown -R 1003:1003 /var/databases /var/afc_config | ||||
| ``` | ||||
|  | ||||
|  | ||||
							
								
								
									
										62
									
								
								dispatcher/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								dispatcher/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,62 @@ | ||||
| # | ||||
| # Copyright (C) 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
| FROM nginx:1.24.0-alpine | ||||
| ENV PYTHONUNBUFFERED=1 | ||||
| RUN apk add --update --no-cache python3 \ | ||||
|     && ln -sf python3 /usr/bin/python \ | ||||
|     && python3 -m ensurepip | ||||
| RUN pip3 install --no-cache --upgrade \ | ||||
|         --root-user-action=ignore pip setuptools | ||||
| # | ||||
| # Install reqiored python external and internal packages | ||||
| # | ||||
| WORKDIR /wd | ||||
| # copy list of external packages | ||||
| COPY dispatcher/requirements.txt /wd | ||||
| # copy internal packages | ||||
| COPY src/afc-packages ./afc-packages | ||||
| # install | ||||
| RUN set -x \ | ||||
|     && pip3 install --no-cache --root-user-action=ignore \ | ||||
|      -r /wd/requirements.txt \ | ||||
|     && pip3 install --use-pep517 --root-user-action=ignore \ | ||||
|         -r /wd/afc-packages/pkgs.dispatcher \ | ||||
|     && rm -rf /wd/afc-packages \ | ||||
|     && pip3 uninstall -y setuptools pip \ | ||||
| # create certificate directories | ||||
|     && mkdir -p \ | ||||
|     /certificates/servers \ | ||||
|     /etc/nginx/templates | ||||
|  | ||||
| # Server side certificates | ||||
| COPY dispatcher/certs/servers/server.cert.pem \ | ||||
| dispatcher/certs/servers/server.key.pem /certificates/servers/ | ||||
| # Default client side CA certificates as a placeholder | ||||
| COPY dispatcher/certs/clients/client.bundle.pem \ | ||||
| /etc/nginx/certs/ | ||||
|  | ||||
| # Copy nginx configuration files | ||||
| COPY dispatcher/nginx.conf /etc/nginx/ | ||||
| COPY dispatcher/nginx.conf.template /etc/nginx/templates/ | ||||
|  | ||||
| COPY dispatcher/acceptor.py /wd/ | ||||
|  | ||||
| ENV AFC_MSGHND_NAME=${AFC_MSGHND_NAME:-msghnd} | ||||
| ENV AFC_MSGHND_PORT=${AFC_MSGHND_PORT:-8000} | ||||
| ENV AFC_WEBUI_NAME=${AFC_WEBUI_NAME:-rat_server} | ||||
| ENV AFC_WEBUI_PORT=${AFC_WEBUI_PORT:-80} | ||||
| ENV AFC_ENFORCE_HTTPS=${AFC_ENFORCE_HTTPS:-TRUE} | ||||
| ENV AFC_SERVER_NAME=${AFC_SERVER_NAME:-"_"} | ||||
| ENV AFC_ENFORCE_MTLS=${AFC_ENFORCE_MTLS:-false} | ||||
| ENV AFC_PROXY_CONN_TOUT=${AFC_PROXY_CONN_TOUT:-720} | ||||
| # | ||||
| ENV AFC_DEVEL_ENV=${AFC_DEVEL_ENV:-production} | ||||
| COPY dispatcher/entrypoint.sh / | ||||
| RUN chmod +x /entrypoint.sh | ||||
| CMD ["/entrypoint.sh"] | ||||
| HEALTHCHECK --start-period=60s --interval=30s --retries=1 \ | ||||
|     CMD curl --fail http://localhost/fbrat/ap-afc/healthy || exit 1 | ||||
							
								
								
									
										237
									
								
								dispatcher/acceptor.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										237
									
								
								dispatcher/acceptor.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,237 @@ | ||||
| #!/usr/bin/env python3 | ||||
| # | ||||
| # Copyright (C) 2021 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
|  | ||||
| """ | ||||
| Description | ||||
|  | ||||
| The acceptor client (aka consumer) registeres own queue within broker | ||||
| application (aka rabbitmq). Such queue used as a channel for control commands. | ||||
| """ | ||||
|  | ||||
| from appcfg import BrokerConfigurator, ObjstConfig | ||||
| import os | ||||
| import sys | ||||
| from sys import stdout | ||||
| import logging | ||||
| from logging.config import dictConfig | ||||
| import argparse | ||||
| import inspect | ||||
| import gevent | ||||
| import subprocess | ||||
| import shutil | ||||
| from ncli import MsgAcceptor | ||||
| from hchecks import MsghndHealthcheck, ObjstHealthcheck | ||||
| from fst import DataIf | ||||
|  | ||||
| dictConfig({ | ||||
|     'version': 1, | ||||
|     'disable_existing_loggers': False, | ||||
|     'formatters': { | ||||
|         'standard': { | ||||
|             'format': '%(asctime)s - [%(levelname)s] %(name)s [%(module)s.%(funcName)s:%(lineno)d]: %(message)s', | ||||
|             'datefmt': '%Y-%m-%d %H:%M:%S', | ||||
|         } | ||||
|     }, | ||||
|     'handlers': { | ||||
|         'console': { | ||||
|             'level': 'DEBUG', | ||||
|             'class': 'logging.StreamHandler', | ||||
|             'formatter': 'standard', | ||||
|         } | ||||
|     }, | ||||
|     'root': { | ||||
|         'level': 'INFO', | ||||
|         'handlers': ['console'] | ||||
|     }, | ||||
| }) | ||||
| app_log = logging.getLogger() | ||||
|  | ||||
|  | ||||
| class Configurator(dict): | ||||
|     __instance = None | ||||
|  | ||||
|     def __new__(cls): | ||||
|         if cls.__instance is None: | ||||
|             cls.__instance = dict.__new__(cls) | ||||
|         return cls.__instance | ||||
|  | ||||
|     def __init__(self): | ||||
|         dict.__init__(self) | ||||
|         self.update(BrokerConfigurator().__dict__.items()) | ||||
|         self.update(ObjstConfig().__dict__.items()) | ||||
|         self['OBJST_CERT_CLI_BUNDLE'] = \ | ||||
|             'certificate/client.bundle.pem' | ||||
|         self['DISPAT_CERT_CLI_BUNDLE'] = \ | ||||
|             '/etc/nginx/certs/client.bundle.pem' | ||||
|         self['DISPAT_CERT_CLI_BUNDLE_DFLT'] = \ | ||||
|             '/etc/nginx/certs/client.bundle.pem_dflt' | ||||
|  | ||||
|  | ||||
| log_level_map = { | ||||
|     'debug': logging.DEBUG,    # 10 | ||||
|     'info': logging.INFO,      # 20 | ||||
|     'warn': logging.WARNING,   # 30 | ||||
|     'err': logging.ERROR,      # 40 | ||||
|     'crit': logging.CRITICAL,  # 50 | ||||
| } | ||||
|  | ||||
|  | ||||
| def set_log_level(opt) -> int: | ||||
|     app_log.info(f"({os.getpid()}) {inspect.stack()[0][3]}() " | ||||
|                  f"{app_log.getEffectiveLevel()}") | ||||
|     app_log.setLevel(log_level_map[opt]) | ||||
|     return log_level_map[opt] | ||||
|  | ||||
|  | ||||
| def readiness_check(cfg): | ||||
|     """Provide readiness check by calling for response preconfigured | ||||
|        list of subjects (containers) | ||||
|     """ | ||||
|     app_log.debug(f"({os.getpid()}) {inspect.stack()[0][3]}()") | ||||
|     objst_chk = ObjstHealthcheck(cfg) | ||||
|     msghnd_chk = MsghndHealthcheck.from_hcheck_if() | ||||
|     checks = [gevent.spawn(objst_chk.healthcheck), | ||||
|               gevent.spawn(msghnd_chk.healthcheck)] | ||||
|     gevent.joinall(checks) | ||||
|     for i in checks: | ||||
|         if i.value != 0: | ||||
|             return i.value | ||||
|     return 0 | ||||
|  | ||||
|  | ||||
| def run_restart(cfg): | ||||
|     """Get messages""" | ||||
|     app_log.debug(f"({os.getpid()}) {inspect.stack()[0][3]}()") | ||||
|     with DataIf().open(cfg['OBJST_CERT_CLI_BUNDLE']) as hfile: | ||||
|         if hfile.head(): | ||||
|             app_log.debug(f"Found cert bundle file.") | ||||
|             with open(cfg['DISPAT_CERT_CLI_BUNDLE'], 'w') as ofile: | ||||
|                 ofile.write(hfile.read().decode('utf-8')) | ||||
|             app_log.info(f"{os.path.getctime(cfg['DISPAT_CERT_CLI_BUNDLE'])}, " | ||||
|                          f"{os.path.getsize(cfg['DISPAT_CERT_CLI_BUNDLE'])}") | ||||
|         else: | ||||
|             app_log.debug(f"({os.getpid()}) {inspect.stack()[0][3]}()") | ||||
|             # use default certificate (placeholder) | ||||
|             # in any case of missing file, no more certificates included | ||||
|             app_log.info(f"Misssing certificate file " | ||||
|                          f"{cfg['OBJST_CERT_CLI_BUNDLE']}, back to default " | ||||
|                          f"{cfg['DISPAT_CERT_CLI_BUNDLE_DFLT']}") | ||||
|             shutil.copy2(cfg['DISPAT_CERT_CLI_BUNDLE_DFLT'], | ||||
|                          cfg['DISPAT_CERT_CLI_BUNDLE']) | ||||
|         p = subprocess.Popen("nginx -s reload", | ||||
|                              stdout=subprocess.PIPE, shell=True) | ||||
|         app_log.info(f"{p.communicate()}") | ||||
|  | ||||
|  | ||||
| def run_remove(cfg): | ||||
|     """Get messages""" | ||||
|     app_log.debug(f"({os.getpid()}) {inspect.stack()[0][3]}() " | ||||
|                   f"{cfg['DISPAT_CERT_CLI_BUNDLE']}") | ||||
|     os.unlink(cfg['DISPAT_CERT_CLI_BUNDLE']) | ||||
|     # restore builtin certifiicate from backup | ||||
|     app_log.debug(f"({os.getpid()}) {inspect.stack()[0][3]}() " | ||||
|                   f"restore default certificate " | ||||
|                   f"{cfg['DISPAT_CERT_CLI_BUNDLE_DFLT']}") | ||||
|     shutil.copy2(cfg['DISPAT_CERT_CLI_BUNDLE_DFLT'], | ||||
|                  cfg['DISPAT_CERT_CLI_BUNDLE']) | ||||
|     p = subprocess.Popen("nginx -s reload", | ||||
|                          stdout=subprocess.PIPE, shell=True) | ||||
|     app_log.info(f"{p.communicate()}") | ||||
|  | ||||
|  | ||||
| commands_map = { | ||||
|     'cmd_restart': run_restart, | ||||
|     'cmd_remove': run_remove, | ||||
| } | ||||
|  | ||||
|  | ||||
| def get_commands(cfg, msg): | ||||
|     """Get messages""" | ||||
|     app_log.debug(f"({os.getpid()}) {inspect.stack()[0][3]}()") | ||||
|     commands_map[msg](cfg) | ||||
|  | ||||
|  | ||||
| def run_it(cfg): | ||||
|     """Execute command line run command""" | ||||
|     app_log.debug(f"({os.getpid()}) {inspect.stack()[0][3]}()") | ||||
|  | ||||
|     # backup builtin certifiicate as a default one | ||||
|     shutil.copy2(cfg['DISPAT_CERT_CLI_BUNDLE'], | ||||
|                  cfg['DISPAT_CERT_CLI_BUNDLE_DFLT']) | ||||
|     # check if lucky to find new certificate bundle already | ||||
|     run_restart(cfg) | ||||
|  | ||||
|     maker = MsgAcceptor(cfg['BROKER_URL'], cfg['BROKER_EXCH_DISPAT'], | ||||
|                         msg_handler=get_commands, handler_params=cfg) | ||||
|     app_log.info(f"({os.getpid()}) Connected to {cfg['BROKER_URL']}") | ||||
|     maker.run() | ||||
|  | ||||
|  | ||||
| # available commands to execute in alphabetical order | ||||
| execution_map = { | ||||
|     'run': run_it, | ||||
|     'check': readiness_check, | ||||
| } | ||||
|  | ||||
|  | ||||
| def make_arg_parser(): | ||||
|     """Define command line options""" | ||||
|     args_parser = argparse.ArgumentParser( | ||||
|         epilog=__doc__.strip(), | ||||
|         formatter_class=argparse.RawTextHelpFormatter) | ||||
|     args_parser.add_argument('--log', type=set_log_level, | ||||
|                              default='info', dest='log_level', | ||||
|                              help="<info|debug|warn|err|crit> - set " | ||||
|                              "logging level (default=info).\n") | ||||
|     args_parser.add_argument('--cmd', choices=execution_map.keys(), | ||||
|                              nargs='?', | ||||
|                              help="run - start accepting commands.\n" | ||||
|                              "check - run readiness check.\n") | ||||
|  | ||||
|     return args_parser | ||||
|  | ||||
|  | ||||
| def prepare_args(parser, cfg): | ||||
|     """Prepare required parameters""" | ||||
|     app_log.debug(f"{inspect.stack()[0][3]}() {parser.parse_args()}") | ||||
|     cfg.update(vars(parser.parse_args())) | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     """Main function of the utility""" | ||||
|     res = 0 | ||||
|     parser = make_arg_parser() | ||||
|     config = Configurator() | ||||
|  | ||||
|     if prepare_args(parser, config) == 1: | ||||
|         # error in preparing arguments | ||||
|         res = 1 | ||||
|     else: | ||||
|         if isinstance(config['cmd'], type(None)): | ||||
|             parser.print_help() | ||||
|  | ||||
|     if res == 0: | ||||
|         app_log.debug(f"{inspect.stack()[0][3]}() {config}") | ||||
|         res = execution_map[config['cmd']](config) | ||||
|     sys.exit(res) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     try: | ||||
|         main() | ||||
|     except KeyboardInterrupt: | ||||
|         sys.exit(1) | ||||
|  | ||||
|  | ||||
| # Local Variables: | ||||
| # mode: Python | ||||
| # indent-tabs-mode: nil | ||||
| # python-indent: 4 | ||||
| # End: | ||||
| # | ||||
| # vim: sw=4:et:tw=80:cc=+1 | ||||
							
								
								
									
										32
									
								
								dispatcher/certs/clients/client.bundle.pem
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								dispatcher/certs/clients/client.bundle.pem
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| -----BEGIN CERTIFICATE----- | ||||
| MIIFhzCCA2+gAwIBAgIUY5uGP2XsWuXYbKzpAgKjjBRQJU0wDQYJKoZIhvcNAQEL | ||||
| BQAwWjELMAkGA1UEBhMCSUwxDzANBgNVBAgMBklzcmFlbDERMA8GA1UEBwwIVGVs | ||||
| IEF2aXYxETAPBgNVBAoMCEJyb2FkY29tMRQwEgYDVQQDDAtBRkMgVGVzdGluZzAe | ||||
| Fw0yMzA1MzEwOTUzMjFaFw0zMzA1MjgwOTUzMjFaMFoxCzAJBgNVBAYTAklMMQ8w | ||||
| DQYDVQQIDAZJc3JhZWwxETAPBgNVBAcMCFRlbCBBdml2MREwDwYDVQQKDAhCcm9h | ||||
| ZGNvbTEUMBIGA1UEAwwLQUZDIFRlc3RpbmcwggIiMA0GCSqGSIb3DQEBAQUAA4IC | ||||
| DwAwggIKAoICAQCegQz88G0AH1xQuRMaITEltVinWHEnxODjE9+gaT4qoCfz7keJ | ||||
| qq0ZRyu1y8oWlV8AGU1w/eWR7MXc+qI1+BzRgecJCStDr/NJhgrlPgGMj8TBO3AM | ||||
| 8M9TJk+1/pBAVZwwOIw8eWBNQVe4Ws+xhh88V8j/mG0beFQMwj5qzlQnmhkqWHIP | ||||
| 0btmZ2KxRUFIzVS7daf8dOk9fKAqFFtmDIYPnq7vAsVYSl+xQVcTsn8wDcE7Nv9O | ||||
| 4Ctd/xy7Jo51zJKC93kspiobP3ca/CNCVMpq5FpVRKopMe/4Zb39t+owL5L2O20j | ||||
| ++TE1CeugoGImR1VhIjnI98Qo9r3M2SkaYD+R5A0oFfiJOB9MWJb2JYKXcSbDE+S | ||||
| EuqiXB5J4rKINAG6EeXcLhtztQ+StN275mOKnaFqH6Sj/v15AUT2HFYNITTgEXkX | ||||
| LirR2tKhmiXSY3j9LH7EM8M6COeGjDensCw+zUbAUWWZb/AFRlWwzoq+fkvZ+BBO | ||||
| yW57wOLV3TSHwz2H92vC6V5jjSdLhmUTO6cWhoJfl7A7/zUZAaudPexRstrYvVJQ | ||||
| IYgt/qm5u3NmbZMCuCMdTWByK79tekUgS579+SjHh4sCCefQURCT8pFwthovhgEZ | ||||
| 6A3orWhgfHiXjSsZ9DZSydKMWFt86OlO4Kq3p7jdZuuFgh+PmzP4cP6yXQIDAQAB | ||||
| o0UwQzASBgNVHRMBAf8ECDAGAQH/AgEBMA4GA1UdDwEB/wQEAwIBRjAdBgNVHQ4E | ||||
| FgQURn6ApnN3kwblVs7owTr3PQqfBnwwDQYJKoZIhvcNAQELBQADggIBAIpDebfL | ||||
| VO1CgXMvmEnMv+96pQd2GfWQj6+G/FFZm9l0dN3Hek328+4zKfxRLSHW1PF3XEPK | ||||
| WbUvukZYp+748L8X//TMXYH5xEQR3igWyjEDVpUXSJEqKkktgJ3SUeQjvq20nVA8 | ||||
| jiVQ8UjMGCsIiWFmz45kH3CkaN4CX/HYnEk2XGzob8sGc7nss0oLGVEKtcN4lFOb | ||||
| tD1R4taiYuoz71sCuOVx5m7c0ja/D0/FhXleZ3CR8qXPKTr6FiYPbwvlKAAg2pC2 | ||||
| ZtOb6UQ8rwad17HTCIv3/mEfRANVcod8GzZaiJJo7XJAwbdcB7xkJ6rRWe2PPpcK | ||||
| MCmvUErENGdDmSLULAHhbylGRDOa/BFQCBI7F+rRzrNdBbC2X5EQJP4HfMrjrR3X | ||||
| +5RAk+eyambTRvetRP8TNbUkjRJshUx/DVeFnHsyA3jvLsVZmZQF8ynFEEMsK1Ba | ||||
| wnBbBXoeZlK6bo8R/YSRhzewv+XegS62vDGb6rUe7aj6BRUR8BTnc3PF+opyUlQz | ||||
| +WfHJyFaAljE675GV0xBo3dAAMcF+0IESAcd68UHhVfebAPfLQ0D/9ksVXzm5A6J | ||||
| 51tt1dMntf4YAl+qGkAPJ5WaZmYPILrfwtuA3jA4LmrQD23wUlQqOyFYA3n/s9wo | ||||
| N1ek3w2xwY3/v24M2si/8OSEWpgtZzr5iw7q | ||||
| -----END CERTIFICATE----- | ||||
							
								
								
									
										32
									
								
								dispatcher/certs/clients/test_ca_crt.pem
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								dispatcher/certs/clients/test_ca_crt.pem
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| -----BEGIN CERTIFICATE----- | ||||
| MIIFhzCCA2+gAwIBAgIUY5uGP2XsWuXYbKzpAgKjjBRQJU0wDQYJKoZIhvcNAQEL | ||||
| BQAwWjELMAkGA1UEBhMCSUwxDzANBgNVBAgMBklzcmFlbDERMA8GA1UEBwwIVGVs | ||||
| IEF2aXYxETAPBgNVBAoMCEJyb2FkY29tMRQwEgYDVQQDDAtBRkMgVGVzdGluZzAe | ||||
| Fw0yMzA1MzEwOTUzMjFaFw0zMzA1MjgwOTUzMjFaMFoxCzAJBgNVBAYTAklMMQ8w | ||||
| DQYDVQQIDAZJc3JhZWwxETAPBgNVBAcMCFRlbCBBdml2MREwDwYDVQQKDAhCcm9h | ||||
| ZGNvbTEUMBIGA1UEAwwLQUZDIFRlc3RpbmcwggIiMA0GCSqGSIb3DQEBAQUAA4IC | ||||
| DwAwggIKAoICAQCegQz88G0AH1xQuRMaITEltVinWHEnxODjE9+gaT4qoCfz7keJ | ||||
| qq0ZRyu1y8oWlV8AGU1w/eWR7MXc+qI1+BzRgecJCStDr/NJhgrlPgGMj8TBO3AM | ||||
| 8M9TJk+1/pBAVZwwOIw8eWBNQVe4Ws+xhh88V8j/mG0beFQMwj5qzlQnmhkqWHIP | ||||
| 0btmZ2KxRUFIzVS7daf8dOk9fKAqFFtmDIYPnq7vAsVYSl+xQVcTsn8wDcE7Nv9O | ||||
| 4Ctd/xy7Jo51zJKC93kspiobP3ca/CNCVMpq5FpVRKopMe/4Zb39t+owL5L2O20j | ||||
| ++TE1CeugoGImR1VhIjnI98Qo9r3M2SkaYD+R5A0oFfiJOB9MWJb2JYKXcSbDE+S | ||||
| EuqiXB5J4rKINAG6EeXcLhtztQ+StN275mOKnaFqH6Sj/v15AUT2HFYNITTgEXkX | ||||
| LirR2tKhmiXSY3j9LH7EM8M6COeGjDensCw+zUbAUWWZb/AFRlWwzoq+fkvZ+BBO | ||||
| yW57wOLV3TSHwz2H92vC6V5jjSdLhmUTO6cWhoJfl7A7/zUZAaudPexRstrYvVJQ | ||||
| IYgt/qm5u3NmbZMCuCMdTWByK79tekUgS579+SjHh4sCCefQURCT8pFwthovhgEZ | ||||
| 6A3orWhgfHiXjSsZ9DZSydKMWFt86OlO4Kq3p7jdZuuFgh+PmzP4cP6yXQIDAQAB | ||||
| o0UwQzASBgNVHRMBAf8ECDAGAQH/AgEBMA4GA1UdDwEB/wQEAwIBRjAdBgNVHQ4E | ||||
| FgQURn6ApnN3kwblVs7owTr3PQqfBnwwDQYJKoZIhvcNAQELBQADggIBAIpDebfL | ||||
| VO1CgXMvmEnMv+96pQd2GfWQj6+G/FFZm9l0dN3Hek328+4zKfxRLSHW1PF3XEPK | ||||
| WbUvukZYp+748L8X//TMXYH5xEQR3igWyjEDVpUXSJEqKkktgJ3SUeQjvq20nVA8 | ||||
| jiVQ8UjMGCsIiWFmz45kH3CkaN4CX/HYnEk2XGzob8sGc7nss0oLGVEKtcN4lFOb | ||||
| tD1R4taiYuoz71sCuOVx5m7c0ja/D0/FhXleZ3CR8qXPKTr6FiYPbwvlKAAg2pC2 | ||||
| ZtOb6UQ8rwad17HTCIv3/mEfRANVcod8GzZaiJJo7XJAwbdcB7xkJ6rRWe2PPpcK | ||||
| MCmvUErENGdDmSLULAHhbylGRDOa/BFQCBI7F+rRzrNdBbC2X5EQJP4HfMrjrR3X | ||||
| +5RAk+eyambTRvetRP8TNbUkjRJshUx/DVeFnHsyA3jvLsVZmZQF8ynFEEMsK1Ba | ||||
| wnBbBXoeZlK6bo8R/YSRhzewv+XegS62vDGb6rUe7aj6BRUR8BTnc3PF+opyUlQz | ||||
| +WfHJyFaAljE675GV0xBo3dAAMcF+0IESAcd68UHhVfebAPfLQ0D/9ksVXzm5A6J | ||||
| 51tt1dMntf4YAl+qGkAPJ5WaZmYPILrfwtuA3jA4LmrQD23wUlQqOyFYA3n/s9wo | ||||
| N1ek3w2xwY3/v24M2si/8OSEWpgtZzr5iw7q | ||||
| -----END CERTIFICATE----- | ||||
							
								
								
									
										51
									
								
								dispatcher/certs/clients/test_ca_key.pem
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								dispatcher/certs/clients/test_ca_key.pem
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | ||||
| -----BEGIN RSA PRIVATE KEY----- | ||||
| MIIJKQIBAAKCAgEAnoEM/PBtAB9cULkTGiExJbVYp1hxJ8Tg4xPfoGk+KqAn8+5H | ||||
| iaqtGUcrtcvKFpVfABlNcP3lkezF3PqiNfgc0YHnCQkrQ6/zSYYK5T4BjI/EwTtw | ||||
| DPDPUyZPtf6QQFWcMDiMPHlgTUFXuFrPsYYfPFfI/5htG3hUDMI+as5UJ5oZKlhy | ||||
| D9G7ZmdisUVBSM1Uu3Wn/HTpPXygKhRbZgyGD56u7wLFWEpfsUFXE7J/MA3BOzb/ | ||||
| TuArXf8cuyaOdcySgvd5LKYqGz93GvwjQlTKauRaVUSqKTHv+GW9/bfqMC+S9jtt | ||||
| I/vkxNQnroKBiJkdVYSI5yPfEKPa9zNkpGmA/keQNKBX4iTgfTFiW9iWCl3EmwxP | ||||
| khLqolweSeKyiDQBuhHl3C4bc7UPkrTdu+Zjip2hah+ko/79eQFE9hxWDSE04BF5 | ||||
| Fy4q0drSoZol0mN4/Sx+xDPDOgjnhow3p7AsPs1GwFFlmW/wBUZVsM6Kvn5L2fgQ | ||||
| Tslue8Di1d00h8M9h/drwuleY40nS4ZlEzunFoaCX5ewO/81GQGrnT3sUbLa2L1S | ||||
| UCGILf6pubtzZm2TArgjHU1gciu/bXpFIEue/fkox4eLAgnn0FEQk/KRcLYaL4YB | ||||
| GegN6K1oYHx4l40rGfQ2UsnSjFhbfOjpTuCqt6e43WbrhYIfj5sz+HD+sl0CAwEA | ||||
| AQKCAgEAlduZdgOyaq070K5KmyfKmcZNHVsHEPyZSthdVAJs3kwxufUM+eG+aunZ | ||||
| L7aPSK7M9QD3MN88D612nSjx+Giuhn50+xwKJaLTOC5dWsQ3HrhG5BLYK8P5oLnW | ||||
| H1Gg/NJ0Kzsri3mOTTx8PTbOqx8NpTWyOcXZUmF/xdhYvw54jkCpjlm07bPzpCwX | ||||
| KVc7FCPd+qaQvqWiZ5nOrDo299LbZSU4a42JG6Kluqb2Nw9KJRq8GMo0tFRJbENo | ||||
| 3KDljAZwdxaXIFmx8bUdXQwKIgqcnldr+LZE01H9ejJnYNxjtE8meVtSIvVjI50a | ||||
| L2oAIi/xhgsajL5jhg4FVjbm7nM5mrMSp6Sf/YNCsvVB/LtyD3XsdtTgNbUPI9P3 | ||||
| 9ZI16X/4XlyMJsnVieQQ27gZiJmyz36r+cMF7yvZ2e14DHX+k2aRT7sNFapWGzuC | ||||
| YheFmPbApya1ZZHpLlUC3H7HRcyBufRLbJ3aseFhw/Jq5Ha0rv5fNUIVDGudiPR9 | ||||
| qbRYV2xodpElqtCYRynsiIqNSglRClw0iFQlwuS+LLFxU/wNyuGFI9jlDHWwgYd3 | ||||
| iz43oOi2hG53N+QkAZAzuQeAcQlE5q0L/UW0+4tnblxrIU/euuOc6XEb2di5kTMo | ||||
| 0RJxJ5Vm8KMRwOMUKDRpg8VMug+lAU7/E8inbakKD7GI69Z1DcECggEBAMnuVLSI | ||||
| +as1ucXXTY1izeqkskZ3Hs54fwzeEM0yoCt4l+RcMI6wPczr3gmMP/5hxglgiI6v | ||||
| AD6FTKv/17zNJ6yPREFImD6ylSXGlCV6QagynQYim+AHWkHnds5iwmXRHBtbhNrl | ||||
| nZdCMq5gvE7NDFGkHt2v8Ut6HVNeB8+9ZGUIsZuOQJbDZUK+zRR0Urx23h2KukNK | ||||
| vRsdUleWLShPn5kXuYpipxnRtAbZWaX6rbb3S4OZGpqI2HYw3r2WoA7tMl03r7F4 | ||||
| nlOhK9QudpZnGz7dYFM9vOglBym4A4KnTk45njfJ+Z9+ye8PqOdEsDDr94+N8Fd8 | ||||
| /yMbt2zoX9gjkRkCggEBAMjx9T4zj4rUJOq7zu82MufmRVSz017eKpnxMkIIIi50 | ||||
| qhPpZKrULaD3Tw/TaRpB5r/Rn+MDfwGvot7XyhsJkn9UBGxGzacC4LDLDFUU3mDz | ||||
| xPzJMlYrHIKzVyLulVZbJFKfbkrZQnzRuowQEBECpRCThVI9/yyFiHyt1rqWBLeK | ||||
| IBWHTuqq05PkuEpvY+XP3YboxR14mIEtxloa1gVcBC5ez2Kgk5evgW4DetdDXKLH | ||||
| dLdN+Bugmi3pi8pRaALDEMWRlApieHenZ50te9pgUFEBPo92OzWW4t3W7zx95P7L | ||||
| YcPVagbmGjaFOznfIB5eE3pGMwq1XyTmzgbxmL+W/+UCggEAKd0qXH9lW5R43kZj | ||||
| wqMwU6wvdaJulZmvpWhIjXIoeLq4qtReEMcDGP/xeYNFJST6HKmRxhsL7upN2f7h | ||||
| qDfUONc+7FXzklQNzdYckqSFBizwFvyH2mtL0Av/uowJB3jR3e4cXhFqmZhUz4go | ||||
| oiGqoyZma8l1OhOoDseY0P3P2Y5y2/Ai/d6mmK7b75iqKn5uUCuZsCfHit6KWrQ8 | ||||
| ynWvfdrIUaNgR18NdroK9vlAmIUud6r/M/iY/+/jzeRzbITKgz7vQtjh4i6w2n2D | ||||
| gmz/3gmhVcCf8HT0xjZrK+QpvNf/MEvEX8e2b8SMXN4FtS7GlVF0+X5lms69OWv3 | ||||
| quS8yQKCAQEAs8RyR/FMq0BavnzCBhMQu5Gv2LT/ldFff1TeR2CLY9APD70ux/0F | ||||
| zQkye9AAKPNoAZcBWNAZ4WJFbOLT9hg8LRj20/8LoCvc44zjPYJr54RtaFB+bcQn | ||||
| v229uIyEoTrsZdYnj2KKLqxaHU+JcA5TqV6wWQEJtcTIc/H5NfdbxO8XAOuJ2Dp3 | ||||
| CcoGbOD2F/Q8FKNNJK5skLRozNdRPH4zZ2B+W5eYMo1aVxdZ4BZtW1rgudRQ8DZf | ||||
| eE+FNbxaNo5YBMfWDuxFJZZoBZ9ZO+YKNE3Or/1vvuN7lkbgw9dE9ATzM4VLU1yr | ||||
| erb6Yy2PyFsVRcBjjWLw+UxaaK/enRfzWQKCAQAeS1VQnrGGwzl59LndYkuNas5V | ||||
| 7DQvTu7kXFsNKgujaF+NE3Ks0zDLx6C2zbL2JGf2LbjF+baXUbKatVU8qpvplO3f | ||||
| 15uLPq6BEI/pCKc8JThyWaE/0pnVv2l92fjNvN2EzDGsX8uj9TYaFnkE0mV7YQPh | ||||
| eliJAlT4ou2PIDrgwxqd6fiw/bkA9NSDy/tVdbAIz3p4gyf+4KvvNbUL9Q5sLtEO | ||||
| LPIurhavsWrpZIOvVxVnTxOoIe4iVE+iSy4KBgqaBMqhULJZqQnDoHBoFDzE06e9 | ||||
| jSKYCCCeYi6k4mkMHD3KvKjcLseiEmHE1w8EEEQgh5NmDnpCSMOaUEG65oqY | ||||
| -----END RSA PRIVATE KEY----- | ||||
							
								
								
									
										32
									
								
								dispatcher/certs/servers/server.bundle.pem
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								dispatcher/certs/servers/server.bundle.pem
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| -----BEGIN CERTIFICATE----- | ||||
| MIIFhzCCA2+gAwIBAgIUHZCstmrQ1m9wz/EjyUzmQxFpgQ0wDQYJKoZIhvcNAQEL | ||||
| BQAwWjELMAkGA1UEBhMCSUwxDzANBgNVBAgMBklzcmFlbDERMA8GA1UEBwwIVGVs | ||||
| IEF2aXYxETAPBgNVBAoMCEJyb2FkY29tMRQwEgYDVQQDDAtBRkMgVGVzdGluZzAe | ||||
| Fw0yMzA1MjkxMjQyMjFaFw0zMzA1MjYxMjQyMjFaMFoxCzAJBgNVBAYTAklMMQ8w | ||||
| DQYDVQQIDAZJc3JhZWwxETAPBgNVBAcMCFRlbCBBdml2MREwDwYDVQQKDAhCcm9h | ||||
| ZGNvbTEUMBIGA1UEAwwLQUZDIFRlc3RpbmcwggIiMA0GCSqGSIb3DQEBAQUAA4IC | ||||
| DwAwggIKAoICAQCuvs3GQi4x0wzi6uN7MiuwMl0rboqMzIn7B6bsD3IBhIqKVAxs | ||||
| D2n/j72EcezYZkG1dMNIZU1GWRdBl8dBhB2gdkrR6ODiTV4TPAgjTfmZ+J6IZvOY | ||||
| IwQImTBmaBhIuyC/56lxMBFQxOkfmFtTRsOgtN+rWT5Dgibkc2pUup/V+i8tveFX | ||||
| 954+QhCnFPxSQlkE6l94zgKlB5kkPlW2hvMiZu34tgnqbBuu8Zhk0a/kdMjmmWNT | ||||
| jZt4v8i8cZkgH+D5Qx6Ai8ndsIj9a7C80sOUZ68jRmDBnLh6bpX4Af7opnWux8Pc | ||||
| 32KXzzBls9cenFevv9Ue1z0M9YfYLixGeg1Detk5Qmh4y/KPEweMXw+9ZRckBpAw | ||||
| BA5czH6PlF+jnX4tjpuiddfKsx4ALqHr4Qw5rH87cepa2ia/VTWBe7eCpxdqy1yj | ||||
| IoZYjWsfn1VSpZkIXF3TzRZdJzq9ggZ9A4UjmYYr8O02DbdpFdsgryMcCCRVdgJ2 | ||||
| qsithswC7V2A7fHGOoA+Xr3/gnmBdqC2Zwc5nq8DHqVX6xl5aFCdbNtPsUtVq8CO | ||||
| wwQYh4EdqBMYos6xhRKoG/pil1e/FNNsnPg7ibZW1XWdUbUo21B33UpI41gRBPtc | ||||
| xxd1XTCx+jspXPnBrfgvJswBMPVzF+8RsZQy7VIRhUW1vYmvPlxxzXQLXwIDAQAB | ||||
| o0UwQzASBgNVHRMBAf8ECDAGAQH/AgEBMA4GA1UdDwEB/wQEAwIBRjAdBgNVHQ4E | ||||
| FgQUmqnURyaeNvKe5/Z5hEdHx6Rk6bMwDQYJKoZIhvcNAQELBQADggIBABQdPhYf | ||||
| otoudC/BYOlBXYmxDCpLYyKp5JBnP6+Uc4gNlndmHMy7Q5qPfRQNfUs6USbxpUyD | ||||
| e725AtBjkfa65fZ00C9bnzbXiiew5dKJz9HrfoRXZ4wS+X89eTsbw4LYGG/QgJRa | ||||
| aFDRt5BEM9Jh/k2AWeAOaRHz1dcQRyo2n8ZFNHdltey9z5AStGccwBSx3kSgefB/ | ||||
| 8+zxJ5Z2+C4I/6DevPD+2i0vCA7wPvq1xUNXNLvriZihjbe+BfW9yWNYt04jKjAl | ||||
| r7xUyA4s+EgrAsre2Xn+8BfItFN4BiRgfQ4k1Uhz4gHfFdOkV07voFKFAV1fHjRO | ||||
| 1u+LXQjuU9cNv+6cAO5KNAA5tuBQNBivf3vqiVvc5bb2+7ZOwLgj8/P0I+MkgwC7 | ||||
| CNJCwDFFwfsUlABT+6jfcivdrw6LGd1yewI/zLZ6o//ZXG1aky2Fr2fTbCRgEONY | ||||
| fuXDCP4tTY/n227IZ7ZTpwmICJsldmOCpc+zucczZEs7nOpbZ4DrDimRYmm7Ffgs | ||||
| 7TPsypkg9ATcOp7LAlKXn1oIPZwRPxiuaDCSPI+9h7j8U0bhk7oDE2IlmIyTZ/Za | ||||
| vUTQoKMXONpfs12PupmxZz622FRRgKLGtS1M7vEV7dWzAG64Fd37MwN2Mh5zfUBY | ||||
| 8M2AJ6MhJSLffEWNT2h1EOHDRgYbwbhjkCKU | ||||
| -----END CERTIFICATE----- | ||||
							
								
								
									
										34
									
								
								dispatcher/certs/servers/server.cert.pem
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										34
									
								
								dispatcher/certs/servers/server.cert.pem
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,34 @@ | ||||
| -----BEGIN CERTIFICATE----- | ||||
| MIIF6DCCA9CgAwIBAgIUTldfidz38+Lnukf4EC6xQ4AvLQYwDQYJKoZIhvcNAQEL | ||||
| BQAwWjELMAkGA1UEBhMCSUwxDzANBgNVBAgMBklzcmFlbDERMA8GA1UEBwwIVGVs | ||||
| IEF2aXYxETAPBgNVBAoMCEJyb2FkY29tMRQwEgYDVQQDDAtBRkMgVGVzdGluZzAe | ||||
| Fw0yMzA1MzEwODQ1NTZaFw0yNTA2MTkwODQ1NTZaMFcxCzAJBgNVBAYTAklMMQ8w | ||||
| DQYDVQQIDAZJc3JhZWwxETAPBgNVBAcMCFRlbCBBdml2MREwDwYDVQQKDAhCcm9h | ||||
| ZGNvbTERMA8GA1UEAwwIYmV0YV9zZXIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw | ||||
| ggIKAoICAQDg4U+tOhn4ZgSyo826i/bpbXzXnKI7gQgmaPygF8bA/mhuXeNBHHbU | ||||
| sTJUVNjvPFINyEeE23mGGPZ1i2JQTXsuSTYj1MLOC1ScKR/hE8+L9TxOoJPtfY1d | ||||
| hoXzgofjJI6aR9cmr6HT8Hg8gJDoSRMrLLTTEWRh36d/KgQlrPYQmtHKdstvyJop | ||||
| 5uPIy/3mfEDVy0EoXsq3spLMTLxdD8gUrBuYT56FS0q9XwNCI/+vGn9RYMOZbzA9 | ||||
| s8f8vES3AFwxuBAu1H+zoyPgFa+dGeHWoW9feu/LpSQOtK5hjlqeHWGNdHNdcc6s | ||||
| GQEVdGHqt+BGj6nd7jUCNqbNet3jWjKQCn0WnmxdnM/2gR+djhvo7B9puwHphsSx | ||||
| 8r8VghfHyNL3RytYDezONUunFG/mU2P7RzFTc2PcqxtAfz4K4REIRQwqvW4nBv2K | ||||
| 40Thgcc2SBPCoihoNdHUxlZPO5lwJdv2k07ztc35ujUMtBiqmQWQnoVkpd9OXYHV | ||||
| rI6F7TK8DP1MP1w+L7QNuzyFEtxPl2YjWNqeuWf33L74/IEL0NWhHW1yIJE9+0L3 | ||||
| 1LLrhKnbk0+C0pNFaLP5UDeqDQejvAKn5NVMhQL3i4rtdn5lHnpSd803EiBTs6ZW | ||||
| TRG351AY+tWonY4eNMgfKLoA4tc9e9iENBJI/efkytLp3K6zNdISDQIDAQABo4Go | ||||
| MIGlMB8GA1UdIwQYMBaAFJqp1Ecmnjbynuf2eYRHR8ekZOmzMAwGA1UdEwEB/wQC | ||||
| MAAwEwYDVR0lBAwwCgYIKwYBBQUHAwEwDgYDVR0PAQH/BAQDAgWgMDAGA1UdEQQp | ||||
| MCeCH3dlYi1paGwtd2NjLTAxLmlobC5icm9hZGNvbS5uZXSHBAq7gCgwHQYDVR0O | ||||
| BBYEFMh+ORROXNcfoSAuTn92epuRvtNfMA0GCSqGSIb3DQEBCwUAA4ICAQClwzvh | ||||
| gNyPG2lisJ/EooDDX22gxUInb7cmi8gKX+pNrT4UJqwgo7olgWVfJwyBnI+u7fs1 | ||||
| dXOyu4zaoSmnh2TJLtLukWhPcxFgVdfo5D0CdmV9c02qgWt+TXbApATK7F7857P6 | ||||
| sdPTbqbMAtn4Z48uGt+rluOyTO3CJ9ZzIJHuegzZEFjX24HtXXVdRLMRA1Cauxhb | ||||
| 2b+ty+k7JB5jIwJ9t+PZzb2ktKy06yGqjYyazy3RpVAxWi8aAJuBQGxHmy0ZBNLx | ||||
| 0JaqDj+D+zc+U0jezhlm3II+o0cq7THCKhZPGbZUIszTN9CFtByKoIzO4jBdnYkw | ||||
| 0d+Kws/A6dfPv8janfxTUlS50P1+/5OeZCMc7g83KMzWzIBjye16FMENJhPxhuDD | ||||
| y1ylCTnEC5YZMCfikBo9McVft6MN/z60sQgFF2TNYqEFVYpr3Z/qw8EoBmKbl/8i | ||||
| HU9Ac8GdsQFTmrFaFtlSSh/Cfq41iVlLTKjr54YJ2QvjLN+XD6geTBWTfkYIryv/ | ||||
| 9IkOcg3bLsfXp9LD5RVe0t4FdgfutYYOzNI0FMa5Q6H2C0yX+6NW0pYQT4Yny/pT | ||||
| xl7rTSy9qSd1ChkxGNHzzwrFQaPA1E+Aq4Df5J1p+sVaL17vsEV7ClIWSJXMbIP5 | ||||
| auYOE6NvyXIli3UoafQ0TIzUfB9ab+coVN/Txw== | ||||
| -----END CERTIFICATE----- | ||||
							
								
								
									
										51
									
								
								dispatcher/certs/servers/server.key.pem
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										51
									
								
								dispatcher/certs/servers/server.key.pem
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,51 @@ | ||||
| -----BEGIN RSA PRIVATE KEY----- | ||||
| MIIJKgIBAAKCAgEA4OFPrToZ+GYEsqPNuov26W1815yiO4EIJmj8oBfGwP5obl3j | ||||
| QRx21LEyVFTY7zxSDchHhNt5hhj2dYtiUE17Lkk2I9TCzgtUnCkf4RPPi/U8TqCT | ||||
| 7X2NXYaF84KH4ySOmkfXJq+h0/B4PICQ6EkTKyy00xFkYd+nfyoEJaz2EJrRynbL | ||||
| b8iaKebjyMv95nxA1ctBKF7Kt7KSzEy8XQ/IFKwbmE+ehUtKvV8DQiP/rxp/UWDD | ||||
| mW8wPbPH/LxEtwBcMbgQLtR/s6Mj4BWvnRnh1qFvX3rvy6UkDrSuYY5anh1hjXRz | ||||
| XXHOrBkBFXRh6rfgRo+p3e41AjamzXrd41oykAp9Fp5sXZzP9oEfnY4b6OwfabsB | ||||
| 6YbEsfK/FYIXx8jS90crWA3szjVLpxRv5lNj+0cxU3Nj3KsbQH8+CuERCEUMKr1u | ||||
| Jwb9iuNE4YHHNkgTwqIoaDXR1MZWTzuZcCXb9pNO87XN+bo1DLQYqpkFkJ6FZKXf | ||||
| Tl2B1ayOhe0yvAz9TD9cPi+0Dbs8hRLcT5dmI1janrln99y++PyBC9DVoR1tciCR | ||||
| PftC99Sy64Sp25NPgtKTRWiz+VA3qg0Ho7wCp+TVTIUC94uK7XZ+ZR56UnfNNxIg | ||||
| U7OmVk0Rt+dQGPrVqJ2OHjTIHyi6AOLXPXvYhDQSSP3n5MrS6dyuszXSEg0CAwEA | ||||
| AQKCAgEAhgZiqThOkBelRx6PF1Yhoz9ov0wP+GzPgazimztwbkdx0A1Oyth/DgZJ | ||||
| m68x12tY7/PkhA8WH1CzWpzmzDtRZeWmSbadH5XrKGLuKAPZl21iMu5LG6jPXuU0 | ||||
| 4ktyV3LLNrIITXsxdJIF5vEs6/PZY8ryPjVIYXidaBGPhTDPOlg7HnKsjoO9Nanx | ||||
| KhRBz2NQdNr9i2TrZo4cJXy6arBkK8XjcGRLct/LvI9q7rlrwl2Fcee8y65TzwJd | ||||
| 94fxYCvrxooPwwlMzrA1SnFCR9xMF9IBAaPQVMuocMdIgsYHxeJ26Ip100Rny3Pf | ||||
| jHzferd6CDPJJoa4uwf9Y8uNgNmZ9dbqiJR+tgdR8WuG2Bn3NzOOeN8tipPzDYuf | ||||
| 2jHO117IsgEPugbW0IQcpee3gZf/7iqaJVIIT6c0Bq2tSYcpNSRCYdOx9rR5KVH7 | ||||
| Qv2KWKl/rHHVw38jX9HxmwFjZhF6Lc4hQVHc9ZOqY0gwbQCLtqQKHOU8RcgbrjhX | ||||
| lEq7le5God2ukNctHU/CSvSF1LXRmrX+xQSdawwtpaRUtgx7YgG2cwo48rox/d3d | ||||
| Knmf8sArMkvpNCAIj7oRI8FS34NbvKUYiMqqIEUinwmemA9s7QK/8DfTFOzDS9Z4 | ||||
| hXNrU38SfQGCGSQcvwbDCjrCgQqpxoGhMYRUqPuwVo9PyuhPmxUCggEBAPT5XY3H | ||||
| vnXL6Oust2ypPvzOZ0qjDCQGWjkFIGyWuZ7RFzjPJYKBeyCH/HqQuEeWsN29gj+B | ||||
| UY9HyN9PDhEaGGRahgYmRKAOLs8BHJ7yzNTLZef5HiDFFkZ26n/QmQQGVxdvXpV5 | ||||
| rLYR4XtIvCKkooJURqkrIATTENilin09iVpYbozKKFvSYmv+HN0+t03XxqtxnGVj | ||||
| aS+lM0UeV8dWypce9Ipu1gSPLy67uSJ8p0oa99zo2OPgPjf2r9Rj8+oKLTf89aK4 | ||||
| Ev//fukbeMwtRLl6hy97gUCvyoNdgXzEIjI+rdMC13LM26BvPxgtT2mqZs7ocU8Q | ||||
| qptTEmKfVFlnNzMCggEBAOsAa2yGOE3MEaNDaOT9cOo2frpxevOjrU/rZ0Ds5ZHl | ||||
| tpiM5qm/Grjd8tbDmQo2Xcarqnq37G9ce7x1JNRMdD42UbudTx0QSg3gi6FFxZVj | ||||
| ccoDACUkIuT7NbUQV/LwCNV3/ZsWX75yanWhZUAtaAu8ZN/7dtpXVXcmZbYgs0xm | ||||
| zAMlHlSDqMYeol2uyPRX0jdcDSc3kh6pGAssBpelenALrBjdQn++4S57iWeiKUfc | ||||
| qvMtq9gHKcIRL3o/zwDln26hrZ7qgd6+hUYqG2MREs4vOPlpPwN+m2bJKrRKE0dO | ||||
| +zylLJ5GaBn3Bv34wiuDZZSQt1ChMvXmKf8OKBZEkb8CggEAbXGGwVPOnFPoEHpO | ||||
| TCZktI8GCItFXkbUQgsvTDQeY3yockHhUSfFuWfnfV5wZdNF2xrSOMruhCOe810f | ||||
| PLa61QK4Q8EPAa60bNjjT4PLzPm94mAifKNwazSvWUD5S5oFiLvBtufwKDte0DRT | ||||
| kOqai71ZADT7Dgy5xwBWGdPHLGy7nvymATfBrtuNS668N/PBl1CffZBnKtkUSbnf | ||||
| n3f/9Hno6HvR86GAg9FsSaMFHg9kUvZYB55kTZ5ROYMaMqIvR4ckunigTGx552zV | ||||
| j+pdfLvn72eu/BZNVFkPA42gdXAZOl9Xn7s0F737ozKC+wMdAS1Jifg5MEFxwkvK | ||||
| ZFK/jwKCAQEAiCUmFylrVSb00PEsw/1QfWA06y7zXFNnBPYMS8Dy/yNmNdrrh0v/ | ||||
| 3zo2hdWrxA7bJU4u5gnIIHwj83qqa5QfhCtUDq2EOAJH5OJCApy5a2LBeZdjbiER | ||||
| VjdzVgKx8Ty+4W0yr7a2oU8H/j4SuquTq7jpeBnnMXeHPBAyvOEU/x5O80N93tin | ||||
| 3p/A0SWBpo16bDgYJrA7JygvlclbyF9GH8OjYIRPElMzggpwAGoiIE/nehrrg6wi | ||||
| tRvftaNh+dMOGrnwLDEQLEuUSqH6W9p4WpthFp2ytAOVZGcHJowDvzwysV/ACbIg | ||||
| fWpv0pNbanolT3zHtx6st2kwy2MYNk5jYQKCAQEA6hsqOztjsDs3Q9GQMreKgzrp | ||||
| FrBG6zSCJYsskWofl87kiAeztGmDCMtW0SiKAGwr1QxZA2qENUS/+Pdf1tmx7nIJ | ||||
| Y+7nULd1BWKpiFxOIzY8lQtqItEpq4sJtp4q6grvHu1N6wfIyVTl75H4P/YlQo+Q | ||||
| nOP8O0RiSE63sEgyWxIzi4BeiTIHfpUDw4LGjIqZAdDbGbsaLx4CLSMoxhxHKPzu | ||||
| Yy+17mzAZAE5otdKzqCxjXjxKQtBOUA9n8Ye6e2ekoFXMmJI7DiuaVacuWQgOhCO | ||||
| oqmuTnrGXxHrWwS7j0Bt9SZhHXu0b89faPegGk9pp5wrCxZrXlLq4TvT/BuFsA== | ||||
| -----END RSA PRIVATE KEY----- | ||||
							
								
								
									
										32
									
								
								dispatcher/entrypoint.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								dispatcher/entrypoint.sh
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| #!/bin/sh | ||||
| # | ||||
| # Copyright (C) 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
| AFC_DEVEL_ENV=${AFC_DEVEL_ENV:-production} | ||||
| case "$AFC_DEVEL_ENV" in | ||||
|   "devel") | ||||
|     echo "Running debug profile"  | ||||
|     ACCEPTOR_LOG_LEVEL="--log debug" | ||||
|     BIN=nginx-debug | ||||
|     apk add --update --no-cache bash | ||||
|     ;; | ||||
|   "production") | ||||
|     echo "Running production profile" | ||||
|     ACCEPTOR_LOG_LEVEL= | ||||
|     BIN=nginx | ||||
|     ;; | ||||
|   *) | ||||
|     echo "Uknown profile" | ||||
|     ACCEPTOR_LOG_LEVEL= | ||||
|     BIN=nginx | ||||
|     ;; | ||||
| esac | ||||
|  | ||||
| /docker-entrypoint.sh $BIN -g "daemon off;" & | ||||
|  | ||||
| /wd/acceptor.py $ACCEPTOR_LOG_LEVEL --cmd run | ||||
|  | ||||
| exit $? | ||||
							
								
								
									
										19
									
								
								dispatcher/html/50x.html
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								dispatcher/html/50x.html
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | ||||
| <!DOCTYPE html> | ||||
| <html> | ||||
| <head> | ||||
| <title>Error</title> | ||||
| <style> | ||||
| html { color-scheme: light dark; } | ||||
| body { width: 35em; margin: 0 auto; | ||||
| font-family: Tahoma, Verdana, Arial, sans-serif; } | ||||
| </style> | ||||
| </head> | ||||
| <body> | ||||
| <h1>An error occurred.</h1> | ||||
| <p>Sorry, the page you are looking for is currently unavailable.<br/> | ||||
| Please try again later.</p> | ||||
| <p>If you are the system administrator of this resource then you should check | ||||
| the error log for details.</p> | ||||
| <p><em>Faithfully yours, nginx.</em></p> | ||||
| </body> | ||||
| </html> | ||||
							
								
								
									
										23
									
								
								dispatcher/html/index.html
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								dispatcher/html/index.html
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| <!DOCTYPE html> | ||||
| <html> | ||||
| <head> | ||||
| <title>Welcome to nginx!</title> | ||||
| <style> | ||||
| html { color-scheme: light dark; } | ||||
| body { width: 35em; margin: 0 auto; | ||||
| font-family: Tahoma, Verdana, Arial, sans-serif; } | ||||
| </style> | ||||
| </head> | ||||
| <body> | ||||
| <h1>Welcome to nginx!</h1> | ||||
| <p>If you see this page, the nginx web server is successfully installed and | ||||
| working. Further configuration is required.</p> | ||||
|  | ||||
| <p>For online documentation and support please refer to | ||||
| <a href="http://nginx.org/">nginx.org</a>.<br/> | ||||
| Commercial support is available at | ||||
| <a href="http://nginx.com/">nginx.com</a>.</p> | ||||
|  | ||||
| <p><em>Thank you for using nginx.</em></p> | ||||
| </body> | ||||
| </html> | ||||
							
								
								
									
										18
									
								
								dispatcher/nginx.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								dispatcher/nginx.conf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,18 @@ | ||||
| # | ||||
| # Copyright © 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
| user  nginx; | ||||
| worker_processes  auto; | ||||
|  | ||||
| error_log  /dev/stdout crit; | ||||
| pid        /var/run/nginx.pid; | ||||
|  | ||||
|  | ||||
| events { | ||||
|     worker_connections  1024; | ||||
| } | ||||
|  | ||||
| include /etc/nginx/conf.d/nginx.conf; | ||||
							
								
								
									
										219
									
								
								dispatcher/nginx.conf.template
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										219
									
								
								dispatcher/nginx.conf.template
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,219 @@ | ||||
| # | ||||
| # Copyright (C) 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
|  | ||||
| http { | ||||
|     log_format short_fmt '[$time_local] $request_time $upstream_response_time'; | ||||
|     log_format error_fmt '[$time_local] $remote_addr - $ssl_client_s_dn - $remote_user - $request_uri - $uri'; | ||||
|     access_log  /dev/stdout  error_fmt; | ||||
|     error_log  /dev/stdout debug; | ||||
|  | ||||
|  | ||||
|     upstream msghnd { | ||||
|         # use hash algo to capture complete client address | ||||
|         hash $binary_remote_addr consistent; | ||||
|         server ${AFC_MSGHND_NAME}:${AFC_MSGHND_PORT}; | ||||
|         # idle connections preserved in the cache of each worker | ||||
|         keepalive  32; | ||||
|     } | ||||
|  | ||||
|     upstream webui { | ||||
|         # use hash algo to capture complete client address | ||||
|         hash $binary_remote_addr consistent; | ||||
|         server ${AFC_WEBUI_NAME}:${AFC_WEBUI_PORT}; | ||||
|         # idle connections preserved in the cache of each worker | ||||
|         keepalive  32; | ||||
|     } | ||||
|  | ||||
|     map $scheme:$afc_https_enforce $should_redirect { | ||||
|         http:TRUE 1; | ||||
|         default 0; | ||||
|     } | ||||
|  | ||||
|     server { | ||||
|         listen 80; | ||||
|         listen [::]:80 ipv6only=on; | ||||
|         listen 443 ssl; | ||||
|         listen [::]:443 ipv6only=on ssl; | ||||
|  | ||||
|         server_name ${AFC_SERVER_NAME}; | ||||
|          | ||||
|         ssl_protocols TLSv1.2 TLSv1.3; | ||||
|         ssl_prefer_server_ciphers on; | ||||
|         ssl_certificate /certificates/servers/server.cert.pem; | ||||
|         ssl_certificate_key /certificates/servers/server.key.pem; | ||||
|  | ||||
|         ssl_client_certificate /etc/nginx/certs/client.bundle.pem; | ||||
|         ssl_verify_client optional; | ||||
|         ssl_verify_depth 10; | ||||
|  | ||||
|         #ssl_stapling on; | ||||
|         #ssl_stapling_verify on; | ||||
|  | ||||
|         # ignoring attempts to establish a session with a client that requests a wrong host name | ||||
|         set $reject_request 0; | ||||
|         set $afc_server_name ${AFC_SERVER_NAME}; | ||||
|  | ||||
|         if ($host != $server_name) { | ||||
|             set $reject_request 1; | ||||
|         } | ||||
|         # ... but not in case of a wildcard | ||||
|         if ($afc_server_name = "_") { | ||||
|             set $reject_request 0; | ||||
|         } | ||||
|         # we won't return any response to the client in case of rejection, just close the connection | ||||
|         if ($reject_request) { | ||||
|             return 444; | ||||
|         } | ||||
|          | ||||
|         # To enforce check HTTPS set AFC_ENFORCE_HTTPS to the value "true" | ||||
|         set $afc_https_enforce ${AFC_ENFORCE_HTTPS}; | ||||
|         # To enforce check mTLS set AFC_ENFORCE_MTLS to the value "true" | ||||
|         # otherwise it is optional | ||||
|         set $afc_mtls_status ${AFC_ENFORCE_MTLS}; | ||||
|  | ||||
|         if ($should_redirect = 1) { | ||||
|             return 301 https://$host$request_uri; | ||||
|         } | ||||
|  | ||||
|         set $afc_mtls_enforce ${AFC_ENFORCE_MTLS}; | ||||
|  | ||||
|         location /fbrat/ap-afc/availableSpectrumInquirySec { | ||||
|            #if ($ssl_client_verify != SUCCESS) { | ||||
|            #    return 403; | ||||
|            #} | ||||
|             # disable buffering for latency | ||||
|             proxy_buffering off; | ||||
|             # response to a request | ||||
|             proxy_read_timeout ${AFC_PROXY_CONN_TOUT}; | ||||
|             # establish a connection with a proxied server | ||||
|             proxy_connect_timeout 720; | ||||
|             # transmit a request to a proxied server | ||||
|             proxy_send_timeout 720; | ||||
|             sendfile on; | ||||
|             proxy_bind $server_addr; | ||||
|             proxy_set_header Host $http_host; | ||||
|             proxy_set_header X-Forwarded-Proto $scheme; | ||||
|             proxy_set_header X-Real-IP $remote_addr; | ||||
|             proxy_pass http://webui; | ||||
|             proxy_redirect http:// $scheme://; | ||||
|             # keep connections open by changing it's header | ||||
|             proxy_http_version 1.1; | ||||
|             proxy_set_header   "Connection" ""; | ||||
|         } | ||||
|  | ||||
|         location /fbrat/ap-afc/availableSpectrumInquiry { | ||||
|             if ($ssl_client_verify != SUCCESS) { | ||||
|                 set $afc_mtls_status "${afc_mtls_status}_false"; | ||||
|             } | ||||
|             # check if mtls is enforced and ssl_client_verify is not success | ||||
|             if ($afc_mtls_status = true_false) { | ||||
|                 return 403; | ||||
|             } | ||||
|             # disable buffering for latency | ||||
|             proxy_buffering off; | ||||
|             # response to a request | ||||
|             proxy_read_timeout ${AFC_PROXY_CONN_TOUT}; | ||||
|             # establish a connection with a proxied server | ||||
|             proxy_connect_timeout 720; | ||||
|             # transmit a request to a proxied server | ||||
|             proxy_send_timeout 720; | ||||
|             proxy_pass http://msghnd$uri$is_args$args; | ||||
|             # keep connections open by changing it's header | ||||
|             proxy_http_version 1.1; | ||||
|             proxy_set_header   "Connection" ""; | ||||
|             access_log off; | ||||
|             log_not_found off; | ||||
|         } | ||||
|  | ||||
|         # forbid internal tests | ||||
|         location /fbrat/ap-afc/availableSpectrumInquiryInternal { | ||||
|             return 403; | ||||
|         } | ||||
|  | ||||
|         # forbid webdav methods other than GET | ||||
|         location /fbrat/ratapi/v1/files { | ||||
|             limit_except GET { deny  all; } | ||||
|             sendfile on; | ||||
|             proxy_bind $server_addr; | ||||
|             proxy_set_header Host $http_host; | ||||
|             proxy_set_header X-Forwarded-Proto $scheme; | ||||
|             proxy_set_header X-Real-IP $remote_addr; | ||||
|             proxy_pass http://webui; | ||||
|             proxy_redirect http:// $scheme://; | ||||
|         } | ||||
|  | ||||
|         # forbid internal request | ||||
|         location /fbrat/ratapi/v1/GetAfcConfigByRulesetID { | ||||
|             return 403; | ||||
|         } | ||||
|  | ||||
|         # forbid internal request | ||||
|         location /fbrat/ratapi/v1/GetRulesetIDs { | ||||
|             return 403; | ||||
|         } | ||||
|  | ||||
|         location / { | ||||
|             if ($request_uri = "/") { | ||||
|                 return 301 $scheme://$http_host/fbrat; | ||||
|             } | ||||
|             sendfile on; | ||||
|             proxy_bind $server_addr; | ||||
|             proxy_set_header Host $http_host; | ||||
|             proxy_set_header X-Forwarded-Proto $scheme; | ||||
|             proxy_set_header X-Real-IP $remote_addr; | ||||
|             proxy_pass http://webui; | ||||
|             proxy_redirect http:// $scheme://; | ||||
|         } | ||||
|  | ||||
|         # redirect server error pages to the static page /50x.html | ||||
|         # | ||||
|         error_page   500 502 503 504; | ||||
|         #error_page   500 502 503 504  /50x.html; | ||||
|         #location = /50x.html { | ||||
|         #    root   /usr/share/nginx/html; | ||||
|         #} | ||||
|  | ||||
|         error_page 403 /403.html; | ||||
|         location /403.html { | ||||
|             access_log /dev/stdout error_fmt; | ||||
|         } | ||||
|         add_header X-Content-Type-Options nosniff; | ||||
|         add_header X-Frame-Options "SAMEORIGIN"; | ||||
|         add_header Content-Security-Policy  "script-src 'self' 'unsafe-eval' https://maps.googleapis.com https://code.jquery.com https://netdna.bootstrapcdn.com/bootstrap https://www.google.com/recaptcha/ https://www.gstatic.com/recaptcha/; style-src 'self' https://fonts.googleapis.com https://netdna.bootstrapcdn.com https://www.gstatic.com/recaptcha/ 'unsafe-inline'"; | ||||
|         add_header X-XSS-Protection "1; mode=block"; | ||||
|         add_header Referrer-Policy "strict-origin-when-cross-origin"; | ||||
|         add_header Permissions-Policy "geolocation=(self), microphone=(), camera=(), speaker=(), vibrate=(), payment=(), fullscreen=(self), sync-xhr=(), magnetometer=(), gyroscope=(), accelerometer=(), usb=(), autoplay=(), midi=(), encrypted-media=(), vr=(), xr-spatial-tracking=()"; | ||||
|         add_header Feature-Policy "geolocation 'self'; microphone 'none'; camera 'none'; speaker 'none'; vibrate 'none'; payment 'none'; fullscreen 'self'; sync-xhr 'none'; magnetometer 'none'; gyroscope 'none'; accelerometer 'none'; usb 'none'; autoplay 'none'; midi 'none'; encrypted-media 'self'; vr 'none'; xr-spatial-tracking 'none';"; | ||||
|    } | ||||
|  | ||||
|     # only for healthcheck | ||||
|     server { | ||||
|         listen 127.0.0.1:80; | ||||
|  | ||||
|         location /fbrat/ap-afc/healthy { | ||||
|             return 200 "OK"; | ||||
|         } | ||||
|  | ||||
|     } | ||||
|  | ||||
|     # Source data for NginxExporter (generator of Nginx Prometheus metrics) | ||||
|     server { | ||||
|         listen 8080; | ||||
|         location /stub_status { | ||||
|             stub_status; | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     include       /etc/nginx/mime.types; | ||||
|     default_type  application/octet-stream; | ||||
|  | ||||
|     sendfile       on; | ||||
|     tcp_nopush     on; | ||||
|  | ||||
|     keepalive_timeout  180; | ||||
|     server_tokens off; | ||||
| } | ||||
							
								
								
									
										77
									
								
								dispatcher/nginx_mtls.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										77
									
								
								dispatcher/nginx_mtls.conf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,77 @@ | ||||
|  | ||||
| user  nginx; | ||||
| worker_processes  auto; | ||||
|  | ||||
| error_log  /dev/stdout crit; | ||||
| pid        /var/run/nginx.pid; | ||||
|  | ||||
|  | ||||
| events { | ||||
|     worker_connections  1024; | ||||
| } | ||||
|  | ||||
|  | ||||
| http { | ||||
|     log_format short_fmt '[$time_local] $request_time $upstream_response_time'; | ||||
|     log_format error_fmt '[$time_local] $remote_addr - $ssl_client_s_dn - $remote_user - $request_uri - $uri'; | ||||
|     access_log  /dev/stdout  error_fmt; | ||||
|     error_log  /dev/stdout debug; | ||||
|  | ||||
|  | ||||
|     upstream backend { | ||||
|         ip_hash; | ||||
|         server rat_server; | ||||
|     } | ||||
| #   server { | ||||
| #       listen 80; | ||||
| #       listen [::]:80; | ||||
| #        | ||||
| #       location /fbrat/ap-afc/1.1/availableSpectrumInquiry { | ||||
| #           proxy_pass http://backend/fbrat/ap-afc/1.1/availableSpectrumInquiry$is_args$args; | ||||
| #       } | ||||
| #       location / { | ||||
| #           proxy_pass http://backend$is_args$args; | ||||
| #       } | ||||
| #   } | ||||
|     server { | ||||
|         listen 443 ssl; | ||||
|         listen [::]:443 ssl; | ||||
|         ssl_protocols TLSv1.2 TLSv1.3; | ||||
|         ssl_prefer_server_ciphers on; | ||||
|         ssl_certificate /certificates/servers/server.cert.pem; | ||||
| 	ssl_certificate_key /certificates/servers/server.key.pem; | ||||
|  | ||||
|         ssl_client_certificate /certificates/clients/client.bundle.pem; | ||||
|         ssl_verify_client on; | ||||
|         ssl_verify_depth 10; | ||||
|   | ||||
|         location / { | ||||
|             root   /wd/nginx/html; | ||||
|             index  index.html index.htm; | ||||
|             if ($ssl_client_verify != SUCCESS) { | ||||
|                 return 403; | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         location /fbrat/ap-afc/1.1/availableSpectrumInquiry { | ||||
|             if ($ssl_client_verify != SUCCESS) { | ||||
|                 return 403; | ||||
|             } | ||||
|             proxy_pass http://backend$uri$is_args$args; | ||||
|         } | ||||
|         # redirect server error pages to the static page /50x.html | ||||
|         # | ||||
|         error_page   500 502 503 504  /50x.html; | ||||
|         location = /50x.html { | ||||
|             root   /usr/share/nginx/html; | ||||
|         } | ||||
|  | ||||
|     } | ||||
|     include       /etc/nginx/mime.types; | ||||
|     default_type  application/octet-stream; | ||||
|  | ||||
|     sendfile       on; | ||||
|     tcp_nopush     on; | ||||
|  | ||||
|     keepalive_timeout  180; | ||||
| } | ||||
							
								
								
									
										6
									
								
								dispatcher/requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								dispatcher/requirements.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| amqp==5.1.1 | ||||
| gevent==23.9.1 | ||||
| greenlet==2.0.2 | ||||
| kombu==5.2.4 | ||||
| requests==2.31.0 | ||||
| vine==5.0.0 | ||||
							
								
								
									
										21
									
								
								fbrat.rpmlintrc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								fbrat.rpmlintrc
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| # Nonstandard license term | ||||
| addFilter(r'.*: W: invalid-license Commercial') | ||||
| addFilter(r'.*: W: invalid-url URL: .*') | ||||
| addFilter(r'.*: W: invalid-url Source0: .*') | ||||
|  | ||||
| # This is used for Doxygen files only | ||||
| addFilter(r'.*: W: rpm-buildroot-usage %build -DAPIDOC_INSTALL_PATH=%{buildroot}%{apidocdir} \\') | ||||
|  | ||||
| # Allow unnecessary cmake-generated linking | ||||
| addFilter(r'.*: W: unused-direct-shlib-dependency .*') | ||||
| # dbus configuration is package-driven | ||||
| addFilter(r'.*: W: conffile-without-noreplace-flag /etc/dbus-1/system\.d/.*\.conf') | ||||
| # Libary debug info is special case | ||||
| addFilter(r'.*-debuginfo\..*: W: only-non-binary-in-usr-lib') | ||||
|  | ||||
| # daemon users | ||||
| addFilter(r'fbrat\..*: W: non-standard-uid /var/lib/fbrat fbrat') | ||||
| addFilter(r'fbrat\..*: W: non-standard-gid /var/lib/fbrat fbrat') | ||||
|  | ||||
| # The statically-linked library calls exit | ||||
| addFilter(r'fbrat\..*: W: shared-lib-calls-exit /usr/lib64/libafccrashdump.so.0.0.0 .*') | ||||
							
								
								
									
										12
									
								
								gunicorn/config.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								gunicorn/config.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| # | ||||
| # Copyright 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
|  | ||||
| import prometheus_client.multiprocess | ||||
|  | ||||
|  | ||||
| def child_exit(server, worker): | ||||
|     prometheus_client.multiprocess.mark_process_dead(worker.pid) | ||||
							
								
								
									
										11
									
								
								gunicorn/gunicorn.conf.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								gunicorn/gunicorn.conf.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | ||||
| # | ||||
| # Copyright 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
| bind = '0.0.0.0:8000' | ||||
| workers = 20 | ||||
| deamon = True | ||||
| pidfile = '/run/gunicorn/openafc_app.pid' | ||||
| timeout = 120 | ||||
							
								
								
									
										28
									
								
								gunicorn/gunicorn.logs.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								gunicorn/gunicorn.logs.conf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | ||||
| [loggers] | ||||
| keys=root, gunicorn.error | ||||
|  | ||||
| [handlers] | ||||
| keys=console | ||||
|  | ||||
| [formatters] | ||||
| keys=generic | ||||
|  | ||||
| [logger_root] | ||||
| level=DEBUG | ||||
| handlers=console | ||||
|  | ||||
| [logger_gunicorn.error] | ||||
| level=DEBUG | ||||
| handlers=console | ||||
| propagate=0 | ||||
| qualname=gunicorn.error | ||||
|  | ||||
| [handler_console] | ||||
| class=StreamHandler | ||||
| formatter=generic | ||||
| args=(sys.stdout, ) | ||||
|  | ||||
| [formatter_generic] | ||||
| format=%(asctime)s [%(process)d] [%(levelname)s] %(message)s | ||||
| datefmt=%Y-%m-%d %H:%M:%S | ||||
| class=logging.Formatter | ||||
							
								
								
									
										13
									
								
								gunicorn/wsgi.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								gunicorn/wsgi.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | ||||
| # | ||||
| # Copyright 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
| import ratapi | ||||
|  | ||||
| app = ratapi.create_app( | ||||
|     config_override={ | ||||
|         'APPLICATION_ROOT': '/fbrat', | ||||
|     } | ||||
| ) | ||||
							
								
								
									
										22
									
								
								http-checkout/rathttpcheckout/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								http-checkout/rathttpcheckout/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| ''' This package is a pure collection of unittest cases. | ||||
|  | ||||
| The test configuration can be controlled by environment variables: | ||||
|  | ||||
| `HTTPCHECKOUT_BASEURL` | ||||
|     as the base URL to access the host-under-test. Make sure this has a trailing slash | ||||
| `HTTPCHECKOUT_READONLY` | ||||
|     any non-empty value will skip all tests which modify the CPO Archive state. | ||||
|  | ||||
| An example of running this checkout is: | ||||
|  | ||||
| HTTPCHECKOUT_BASEURL=http://localhost:5000/ \ | ||||
| XDG_DATA_DIRS=$PWD/testroot/share \ | ||||
| PYTHONPATH=$PWD/http-checkout \ | ||||
| nosetests -v rathttpcheckout | ||||
|  | ||||
| ''' | ||||
|  | ||||
| from .aaa import * | ||||
| from .paws import * | ||||
| from .ratapi import * | ||||
| from .www import * | ||||
							
								
								
									
										43
									
								
								http-checkout/rathttpcheckout/aaa.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								http-checkout/rathttpcheckout/aaa.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,43 @@ | ||||
| ''' Test cases related to AAA functions (not the APIs that relate to them). | ||||
| ''' | ||||
| import logging | ||||
| from .base import (ValidateHtmlResponse, BaseTestCase) | ||||
| import os | ||||
|  | ||||
| #: Logger for this module | ||||
| LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class TestUserLogin(BaseTestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         BaseTestCase.setUp(self) | ||||
|  | ||||
|     def tearDown(self): | ||||
|         BaseTestCase.tearDown(self) | ||||
|  | ||||
|     def test_login_options(self): | ||||
|         self._test_options_allow( | ||||
|             self._resolve_url('user/sign-in'), | ||||
|             {'GET', 'POST'} | ||||
|         ) | ||||
|  | ||||
|     def test_login_request(self): | ||||
|         resp = self.httpsession.get(self._resolve_url('user/sign-in')) | ||||
|         # now a location, managed by flask_login | ||||
|         self.assertEqual(200, resp.status_code) | ||||
|         self.assertTrue("csrf_token" in resp.content) | ||||
|  | ||||
|     def test_login_success(self): | ||||
|         resp = self.httpsession.post( | ||||
|             self._resolve_url('user/sign-in'), | ||||
|         ) | ||||
|         self.assertEqual(200, resp.status_code) | ||||
|         encoding = resp.headers.get("Content-Type") | ||||
|         LOGGER.debug("Mah: %s", encoding) | ||||
|         self.assertEqual("text/html; charset=utf-8", encoding) | ||||
|         self.assertTrue('form' in resp.content) | ||||
|         try: | ||||
|             ValidateHtmlResponse()(resp) | ||||
|         except Exception as err: | ||||
|             self.fail('body is not valid html: {0}'.format(err)) | ||||
							
								
								
									
										143
									
								
								http-checkout/rathttpcheckout/afc.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										143
									
								
								http-checkout/rathttpcheckout/afc.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,143 @@ | ||||
| from .base import (UserLoginBaseTestCase) | ||||
|  | ||||
|  | ||||
| class TestAfcEngine(UserLoginBaseTestCase): | ||||
|     ''' Class for testing the results of the AFC Engine | ||||
|     ''' | ||||
|  | ||||
|     def setUp(self): | ||||
|         UserLoginBaseTestCase.setUp(self) | ||||
|  | ||||
|     def tearDown(self): | ||||
|         UserLoginBaseTestCase.tearDown(self) | ||||
|  | ||||
|     def _set_afc_config(self, afc_config): | ||||
|         ''' Uploads the _afc_config variable to the server to be used for AFC Engine tests | ||||
|         ''' | ||||
|         self._test_modify_request(self._resolve_url( | ||||
|             'ratapi/v1/afcconfig/afc_config.json'), afc_config) | ||||
|  | ||||
|     def _generate_params( | ||||
|             self, | ||||
|             lat, | ||||
|             lng, | ||||
|             height, | ||||
|             semi_maj=0, | ||||
|             semi_min=0, | ||||
|             orientation=0, | ||||
|             height_type="AGL", | ||||
|             height_cert=0, | ||||
|             in_out_door="INDOOR", | ||||
|             ruleset_ids=None): | ||||
|         ''' Uses parameters to generate a well formed JSON object to be used for analysis. | ||||
|  | ||||
|         :param lat: latitude | ||||
|         :type lat: number | ||||
|  | ||||
|         :param lng: longitude | ||||
|         :type lng: number | ||||
|  | ||||
|         :param height: height | ||||
|         :type height: number | ||||
|  | ||||
|         :param semi_maj: ellipse semi-major axis (default 0) | ||||
|         :type semi_maj: number | ||||
|  | ||||
|         :param semi_min: ellipse semi-minor axis (default 0) | ||||
|         :type semi_min: number | ||||
|  | ||||
|         :param orientation: ellipse orientation. degrees clockwise from north (defualt 0) | ||||
|         :type orientation: number | ||||
|  | ||||
|         :param height_type: "AMSL" (above mean sea level)(default) | "AGL" (above ground level) | ||||
|  | ||||
|         :param height_cert: height uncertainty (default 0) | ||||
|         :type height_cert: number | ||||
|  | ||||
|         :param in_out_door: "INDOOR" | "OUTDOOR" | "ANY" | ||||
|  | ||||
|         :param ruleset_ids: list of ruleset IDs (default ['AFC-6GHZ-DEMO-1.0']) | ||||
|  | ||||
|         :returns: PawsRequest | ||||
|         ''' | ||||
|  | ||||
|         if ruleset_ids is None: | ||||
|             ruleset_ids = ['AFC-6GHZ-DEMO-1.0'] | ||||
|  | ||||
|         return { | ||||
|             'deviceDesc': { | ||||
|                 'rulesetIds': ruleset_ids, | ||||
|             }, | ||||
|             'location': { | ||||
|                 'point': { | ||||
|                     'center': { | ||||
|                         'latitude': lat, | ||||
|                         'longitude': lng, | ||||
|                     }, | ||||
|                     'semiMajorAxis': semi_maj, | ||||
|                     'semiMinorAxis': semi_min, | ||||
|                     'orientation': orientation, | ||||
|                 }, | ||||
|             }, | ||||
|             'antenna': { | ||||
|                 'height': height, | ||||
|                 'heightType': height_type, | ||||
|                 'heightUncertainty': height_cert, | ||||
|             }, | ||||
|             'capabilities': { | ||||
|                 'indoorOutdoor': in_out_door, | ||||
|             } | ||||
|         } | ||||
|  | ||||
|     def _test_geojson_result_valid(self, result): | ||||
|         ''' | ||||
|         ''' | ||||
|  | ||||
|     def _test_channel_result_valid(self, result): | ||||
|         ''' | ||||
|         ''' | ||||
|  | ||||
|     def _test_paws_result_valid(self, result, req_devicedesc): | ||||
|         ''' Tests that the structure of a returned paws object is correct | ||||
|         ''' | ||||
|  | ||||
|         # check for same device description | ||||
|         self.assertEqual(req_devicedesc, result.get('deviceDesc')) | ||||
|  | ||||
|         for spec in result.get('spectrumSpecs'): | ||||
|  | ||||
|             # check matching ruleset | ||||
|             self.assertEqual( | ||||
|                 { | ||||
|                     'authority': 'US', | ||||
|                     'rulesetId': 'AFC-6GHZ-DEMO-1.0', | ||||
|                 }, | ||||
|                 spec.get('rulesetInfo') | ||||
|             ) | ||||
|  | ||||
|             for schedule in spec.get('spectrumSchedules'): | ||||
|  | ||||
|                 # check properly formatted time | ||||
|                 self._test_iso_time(schedule.get('eventTime').get('startTime')) | ||||
|                 self._test_iso_time(schedule.get('eventTime').get('stopTime')) | ||||
|  | ||||
|                 # must have four groups of channels | ||||
|                 self.assertEqual(len(schedule.get('spectra')), 4) | ||||
|  | ||||
|                 # validate spectra contents | ||||
|                 self._test_present_bandwidths( | ||||
|                     schedule.get('spectra'), | ||||
|                     [20000000, 40000000, 80000000, 160000000]) | ||||
|  | ||||
|     def _test_iso_time(self, time): | ||||
|         ''' Tests that the time is a properly formatted ISO time string | ||||
|         ''' | ||||
|         self.assertRegexpMatches( | ||||
|             time, r'[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z') | ||||
|  | ||||
|     def _test_present_bandwidths(self, spectra, bandwidths): | ||||
|         ''' Tests to make sure each bandwidth is present in the spectra profiles | ||||
|         ''' | ||||
|         present_bands = [s.get('resolutionBwHz') for s in spectra] | ||||
|         for band_width in bandwidths: | ||||
|             self.assertIn(band_width, present_bands) | ||||
							
								
								
									
										967
									
								
								http-checkout/rathttpcheckout/base.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										967
									
								
								http-checkout/rathttpcheckout/base.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,967 @@ | ||||
| ''' Non-test support objects and classes used by the actual test cases. | ||||
| ''' | ||||
|  | ||||
| import datetime | ||||
| from io import BytesIO | ||||
| import logging | ||||
| import lxml.etree as etree | ||||
| import os | ||||
| import re | ||||
| import requests | ||||
| import shutil | ||||
| import tempfile | ||||
| import unittest | ||||
| from urlparse import urljoin | ||||
| import werkzeug.datastructures | ||||
| import werkzeug.http | ||||
| import werkzeug.urls | ||||
| from nose import SkipTest | ||||
|  | ||||
| #: Logger for this module | ||||
| LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
| #: Regex to match application/xml and applicaiton/sub+xml | ||||
| XML_CONTENT_RE = re.compile(r'^application/(.+\+)?xml$') | ||||
|  | ||||
| #: Time format for ISO 8601 "basic" time used by XML Schema. | ||||
| #: This string is usable by datetime.strftime and datetime.strptime | ||||
| TIME_FORMAT_BASIC = '%Y%m%dT%H%M%SZ' | ||||
| #: Time format for ISO 8601 "extended" time used by XML Schema. | ||||
| #: This string is usable by datetime.strftime and datetime.strptime | ||||
| TIME_FORMAT_EXTENDED = '%Y-%m-%dT%H:%M:%SZ' | ||||
|  | ||||
| #: Absolute path to this package directory | ||||
| PACKAGE_PATH = os.path.abspath(os.path.dirname(__file__)) | ||||
|  | ||||
|  | ||||
| def get_xml_parser(schema): | ||||
|     ''' Generate a function to extract an XML DOM tree from an encoded document. | ||||
|  | ||||
|     :param schema: Iff not None, the document will be validated against | ||||
|         this schema object. | ||||
|     :type use_schema: bool | ||||
|     :return: The parser function which takes a file-like parameter and | ||||
|         returns a tree object of type :py:cls:`lxml.etree.ElementTree`. | ||||
|     ''' | ||||
|     xmlparser = etree.XMLParser(schema=schema) | ||||
|  | ||||
|     def func(infile): | ||||
|         try: | ||||
|             return etree.parse(infile, parser=xmlparser) | ||||
|         except etree.XMLSyntaxError as err: | ||||
|             infile.seek(0) | ||||
|             with tempfile.NamedTemporaryFile(delete=False) as outfile: | ||||
|                 shutil.copyfileobj(infile, outfile) | ||||
|                 raise ValueError( | ||||
|                     'Failed to parse XML with error {0} in file {1}'.format( | ||||
|                         err, outfile.name)) | ||||
|  | ||||
|     return func | ||||
|  | ||||
|  | ||||
| def extract_metadict(doc): | ||||
|     ''' Extract a server metadata dictionary from its parsed XML document. | ||||
|  | ||||
|     :param doc: The document to read from. | ||||
|     :return: The metadata URL map. | ||||
|     ''' | ||||
|     metadict = {} | ||||
|     for el_a in doc.findall('//{http://www.w3.org/1999/xhtml}a'): | ||||
|         m_id = el_a.attrib.get('id') | ||||
|         m_href = el_a.attrib.get('href') | ||||
|         if m_id is None or m_href is None: | ||||
|             continue | ||||
|         metadict[m_id] = m_href | ||||
|     return metadict | ||||
|  | ||||
|  | ||||
| def merged(base, delta): | ||||
|     ''' Return a merged dictionary contents. | ||||
|  | ||||
|     :param base: The initial contents to merge. | ||||
|     :param delta: The modifications to apply. | ||||
|     :return: A dictionary containing the :py:obj:`base` updated | ||||
|         by the :py:obj:`delta`. | ||||
|     ''' | ||||
|     mod = dict(base) | ||||
|     mod.update(delta) | ||||
|     return mod | ||||
|  | ||||
|  | ||||
| def limit_count(iterable, limit): | ||||
|     ''' Wrap an iterable/generator with a count limit to only yield the first | ||||
|     :py:obj:`count` number of items. | ||||
|  | ||||
|     :param iterable: The source iterable object. | ||||
|     :param limit: The maximum number of items available from the generator. | ||||
|     :return A generator with a count limit. | ||||
|     ''' | ||||
|     count = 0 | ||||
|     for item in iterable: | ||||
|         yield item | ||||
|         count += 1 | ||||
|         if count >= limit: | ||||
|             return | ||||
|  | ||||
|  | ||||
| def modify_etag(orig): | ||||
|     ''' Given a base ETag value, generate a modified ETag which is | ||||
|     guaranteed to not match the original. | ||||
|  | ||||
|     :param str orig: The original ETag. | ||||
|     :return: A different ETag | ||||
|     ''' | ||||
|     # Inject characters near the end | ||||
|     mod = orig[:-1] + '-eh' + orig[-1:] | ||||
|     return mod | ||||
|  | ||||
|  | ||||
| class ValidateJsonResponse(object): | ||||
|     ''' Validate an expected JSON file response. | ||||
|     ''' | ||||
|  | ||||
|     def __call__(self, resp): | ||||
|         import json | ||||
|         json.loads(resp.content) | ||||
|  | ||||
|  | ||||
| class ValidateHtmlResponse(object): | ||||
|     ''' Validate an HTML response with a loose parser. | ||||
|     ''' | ||||
|  | ||||
|     def __call__(self, resp): | ||||
|         import bs4 | ||||
|         kwargs = dict( | ||||
|             markup=resp.content, | ||||
|             features='lxml', | ||||
|         ) | ||||
|         bs4.BeautifulSoup(**kwargs) | ||||
|  | ||||
|  | ||||
| class ValidateXmlResponse(object): | ||||
|     ''' Validate an expected XML file response. | ||||
|  | ||||
|     :param parser: A function to take a file-like input and output an | ||||
|         XML element tree :py:cls:`lxml.etree.ElementTree`. | ||||
|     :param require_root: If not None, the root element must be this value. | ||||
|     ''' | ||||
|  | ||||
|     def __init__(self, parser, require_root=None): | ||||
|         self._parser = parser | ||||
|         if not callable(self._parser): | ||||
|             raise ValueError('ValidateXmlResponse parser invalid') | ||||
|         self._require_root = require_root | ||||
|  | ||||
|     def __call__(self, resp): | ||||
|         xml_tree = self._parser(BytesIO(resp.content)) | ||||
|         if self._require_root is not None: | ||||
|             root_tag = xml_tree.getroot().tag | ||||
|             if self._require_root != root_tag: | ||||
|                 raise ValueError( | ||||
|                     'Required root element "{0}" not present, got "{1}"'.format( | ||||
|                         self._require_root, root_tag)) | ||||
|  | ||||
|  | ||||
| class BaseTestCase(unittest.TestCase): | ||||
|     ''' Common access and helper functions which use the :py:mod:`unittest` | ||||
|     framework but this class defines no test functions itself. | ||||
|  | ||||
|     :ivar httpsession: An :py:class:`requests.Session` instance for test use. | ||||
|     :ivar xmlparser: An :py:class:`etree.XMLParser` instance for test use. | ||||
|     ''' | ||||
|  | ||||
|     #: Cached URL to start at and to resolve from | ||||
|     BASE_URL = os.environ.get('HTTPCHECKOUT_BASEURL') | ||||
|     #: True if the editing tests should be skipped | ||||
|     READONLY = bool(os.environ.get('HTTPCHECKOUT_READONLY')) | ||||
|     #: Keep any created resources in the tearDown() method which are left behind | ||||
|     KEEP_TESTITEMS = bool(os.environ.get('HTTPCHECKOUT_KEEP_TESTITEMS')) | ||||
|  | ||||
|     def setUp(self): | ||||
|         unittest.TestCase.setUp(self) | ||||
|         self.maxDiff = 10e3 | ||||
|  | ||||
|         self.assertIsNotNone( | ||||
|             self.BASE_URL, 'Missing environment HTTPCHECKOUT_BASEURL') | ||||
|         self.httpsession = requests.Session() | ||||
|  | ||||
|         ca_roots = os.environ.get('HTTPCHECKOUT_CACERTS') | ||||
|         LOGGER.info('HTTPCHECKOUT_CACERTS is "%s"', ca_roots) | ||||
|         if ca_roots == '': | ||||
|             import warnings | ||||
|             from urllib3.exceptions import InsecureRequestWarning | ||||
|             self.httpsession.verify = False | ||||
|             warnings.filterwarnings("ignore", category=InsecureRequestWarning) | ||||
|  | ||||
|     def tearDown(self): | ||||
|         self.httpsession = None | ||||
|         unittest.TestCase.tearDown(self) | ||||
|  | ||||
|     def _assertWriteTest(self): | ||||
|         ''' Skip the current test if HTTPCHECKOUT_READONLY is set. | ||||
|         ''' | ||||
|         if self.READONLY: | ||||
|             self.skipTest( | ||||
|                 'Not running editing tests because of HTTPCHECKOUT_READONLY') | ||||
|  | ||||
|     def _resolve_url(self, url): | ||||
|         ''' Resolve a URL relative to the original base URL. | ||||
|  | ||||
|         :param url: The URL to resolve. | ||||
|         :type url: str | ||||
|         :return: The resolved absolute URL to request on. | ||||
|         :rtype: str | ||||
|         ''' | ||||
|         return urljoin(self.BASE_URL, url) | ||||
|  | ||||
|     def assertSameUrlPath(self, first, second): | ||||
|         ''' Assert that two URLs are equal except for their query/fragment parts. | ||||
|         ''' | ||||
|         f_url = werkzeug.urls.url_parse(first) | ||||
|         s_url = werkzeug.urls.url_parse(second) | ||||
|  | ||||
|         for attr in ('scheme', 'netloc', 'path'): | ||||
|             self.assertEqual( | ||||
|                 getattr(f_url, attr), | ||||
|                 getattr(s_url, attr), | ||||
|                 'Mismatched URL {0}'.format(attr) | ||||
|             ) | ||||
|  | ||||
|     def _get_xml_parser(self, use_schema=None): | ||||
|         ''' Generate a function to extract an XML DOM tree from an encoded document. | ||||
|  | ||||
|         :return: The parser function which takes a file-like parameter and | ||||
|             returns a tree object of type :py:cls:`lxml.etree.ElementTree`. | ||||
|         ''' | ||||
|         return get_xml_parser(schema=use_schema) | ||||
|  | ||||
|     def _get_xml_encoder(self): | ||||
|         ''' Generate a function to encode a document from an XML DOM tree. | ||||
|  | ||||
|         :return: The parser function which takes a parameter of a | ||||
|             tree object of type :py:cls:`lxml.etree.ElementTree` and | ||||
|             returns a file-like object. | ||||
|         ''' | ||||
|  | ||||
|         def func(doc, outfile=None): | ||||
|             ''' Encode a document. | ||||
|  | ||||
|             :parm doc: The document to encode. | ||||
|             :type doc: :py:cls:`lxml.etree.ElementTree` | ||||
|             :param outfile: An optional file-like object to encode into. | ||||
|                 This must be None if the encoder is used multiple times. | ||||
|             :type outfile: file-like or None | ||||
|             :return: The encoded file-like object. | ||||
|             ''' | ||||
|             if outfile is None: | ||||
|                 outfile = BytesIO() | ||||
|  | ||||
|             doc.write(outfile, encoding='UTF-8', xml_declaration=True) | ||||
|             if hasattr(outfile, 'seek'): | ||||
|                 try: | ||||
|                     outfile.seek(0) | ||||
|                 except BaseException: | ||||
|                     pass | ||||
|             return outfile | ||||
|  | ||||
|         return func | ||||
|  | ||||
|     def _test_working_links(self, text): | ||||
|         ''' Verify that html has well formed links | ||||
|  | ||||
|         :param text: html doc with href's | ||||
|         ''' | ||||
|  | ||||
|         import bs4 | ||||
|         html = bs4.BeautifulSoup(text, 'html.parser') | ||||
|         for link in [a['href'] for a in html.find_all('a')]: | ||||
|             self._test_working_link(link) | ||||
|  | ||||
|     def _test_working_link(self, url): | ||||
|         ''' Verify that a url returns a 200 response | ||||
|  | ||||
|         :param url: The URL to be checked | ||||
|         ''' | ||||
|  | ||||
|         resolved_url = self._resolve_url(url) | ||||
|         resp = self.httpsession.get(resolved_url) | ||||
|         self.assertEqual(200, resp.status_code) | ||||
|  | ||||
|     def _test_options_allow(self, url, methods): | ||||
|         ''' Verify that the OPTIONS response for a URL matches a specific set. | ||||
|  | ||||
|         :param url: The URL to pass to :py:mod:`requests` | ||||
|         :type url: str | ||||
|         :param methods: The method names which must be identical to the response. | ||||
|         :type methods: iterable | ||||
|         :param params: URL parameter dictionary to pass to :py:mod:`requests`. | ||||
|         :type params: dict or None | ||||
|         ''' | ||||
|         methods = set([str(m).upper() for m in methods]) | ||||
|         methods.add('OPTIONS') | ||||
|         if 'GET' in methods: | ||||
|             methods.add('HEAD') | ||||
|  | ||||
|         resolved_url = self._resolve_url(url) | ||||
|         resp = self.httpsession.options(resolved_url) | ||||
|         self.assertEqual(200, resp.status_code) | ||||
|         got_allow = werkzeug.http.parse_set_header(resp.headers['allow']) | ||||
|         self.assertEqual(methods, set(got_allow)) | ||||
|  | ||||
|     def _test_path_contents( | ||||
|             self, | ||||
|             url, | ||||
|             params=None, | ||||
|             base_headers=None, | ||||
|             validate_response_pre=None, | ||||
|             validate_response_post=None, | ||||
|             must_authorize=True, | ||||
|             valid_status=None, | ||||
|             content_type=None, | ||||
|             valid_encodings=None, | ||||
|             require_length=True, | ||||
|             require_vary=None, | ||||
|             require_etag=True, | ||||
|             require_lastmod=True, | ||||
|             require_cacheable=True, | ||||
|             cache_must_revalidate=False): | ||||
|         ''' Common assertions for static resources. | ||||
|  | ||||
|         :param url: The URL to pass to :py:mod:`requests` | ||||
|         :type url: str | ||||
|         :param params: URL parameter dictionary to pass to :py:mod:`requests`. | ||||
|         :type params: dict or None | ||||
|         :param base_headers: A dictionary of headers to send with every request. | ||||
|         :type base_headers: dict or None | ||||
|         :param validate_response_pre: A callable which takes a single argument of | ||||
|             the response object and performs its own validation of the headers | ||||
|             and/or body for each non-cached response. | ||||
|             This is performed before any of the parametric checks. | ||||
|         :type validate_response_pre: callable or None | ||||
|         :param validate_response_post: A callable which takes a single argument of | ||||
|             the response object and performs its own validation of the headers | ||||
|             and/or body for each non-cached response. | ||||
|             This is performed after any of the parametric checks. | ||||
|         :type validate_response_post: callable or None | ||||
|         :param must_authorize: Access to the resource without an Authorization | ||||
|             header is attempted and compared against this value. | ||||
|         :type must_authorize: bool | ||||
|         :param valid_status: A set of valid status codes to allow. | ||||
|             If not provided, only code 200 is valid. | ||||
|         :type valid_status: set or None | ||||
|         :param content_type: If not None, the required Content-Type header. | ||||
|         :type content_type: bool or None | ||||
|         :param valid_encodings: If not None, a list of content encodings to check for. | ||||
|             The resource must provide each of the non-identity encodings listed. | ||||
|         :type valid_encodings: list or None | ||||
|         :param require_length: If either true or false, assert that the | ||||
|             content-length header is present or not. | ||||
|         :type require_length: bool or None | ||||
|         :param require_vary: A set of Vary reults required to be present | ||||
|             in the response. | ||||
|             If the :py:obj:`valid_encodings` list has more than the identity | ||||
|             encoding present, then 'accept-encoding' will be automatically | ||||
|             added to this vary list. | ||||
|         :type require_vary: list or None | ||||
|         :param require_etag: If not None, whether the ETag is required present | ||||
|             or not present (True or False) or a specific string value. | ||||
|         :type require_etag: str or bool or None | ||||
|         :param require_lastmod: If not None, whether the Last-Modified is | ||||
|             required present or not present (True or False) or a specific value. | ||||
|         :type require_lastmod: str or bool or None | ||||
|         :param require_cacheable: If true, the resource is checked for its cacheability. | ||||
|             Not all resources should be cacheable (even if not explicitly marked no-cache). | ||||
|         :type require_cacheable: bool | ||||
|         :param cache_must_revalidate: If True, the response must have its | ||||
|             'must-revalidate' cache control header set. | ||||
|         :type cache_must_revalidate: bool | ||||
|         :raises: raises unittest exceptions if an assertion fails | ||||
|         ''' | ||||
|  | ||||
|         if base_headers is None: | ||||
|             base_headers = {} | ||||
|  | ||||
|         if valid_status is None: | ||||
|             valid_status = [200] | ||||
|         valid_status = set(valid_status) | ||||
|  | ||||
|         # Set of valid encodings to require | ||||
|         if valid_encodings is None: | ||||
|             valid_encodings = [] | ||||
|         valid_encodings = set(valid_encodings) | ||||
|         valid_encodings.add('identity') | ||||
|         # Force as ordered list with identity encoding first and gzip always | ||||
|         # attempted | ||||
|         try_encodings = set(valid_encodings) | ||||
|         try_encodings.discard('identity') | ||||
|         try_encodings = sorted(list(try_encodings)) | ||||
|         try_encodings.insert(0, 'identity') | ||||
|         # Cached identity-encoded contents | ||||
|         identity_body = None | ||||
|  | ||||
|         if require_vary is None: | ||||
|             require_vary = [] | ||||
|         require_vary = set(require_vary) | ||||
|         if len(valid_encodings) > 1: | ||||
|             require_vary.add('accept-encoding') | ||||
|  | ||||
|         resolved_url = self._resolve_url(url) | ||||
|  | ||||
|         # Options on the resource itself | ||||
|         resp = self.httpsession.options( | ||||
|             resolved_url, params=params, | ||||
|             allow_redirects=False, | ||||
|             headers=base_headers, | ||||
|         ) | ||||
|         self.assertEqual(200, resp.status_code) | ||||
|         got_allow = werkzeug.http.parse_set_header(resp.headers.get('allow')) | ||||
|         self.assertIn('options', got_allow) | ||||
|         if 404 not in valid_status: | ||||
|             self.assertIn('head', got_allow) | ||||
|             self.assertIn('get', got_allow) | ||||
|  | ||||
|         # Options without authentication | ||||
|         resp = self.httpsession.options( | ||||
|             resolved_url, params=params, | ||||
|             allow_redirects=False, | ||||
|             headers=merged(base_headers, { | ||||
|                 'authorization': None, | ||||
|             }), | ||||
|         ) | ||||
|         if must_authorize: | ||||
|             self.assertEqual(401, resp.status_code) | ||||
|         else: | ||||
|             self.assertEqual(200, resp.status_code) | ||||
|  | ||||
|         for try_encoding in try_encodings: | ||||
|             # initial non-cache response | ||||
|             enc_headers = merged( | ||||
|                 base_headers, | ||||
|                 { | ||||
|                     'accept-encoding': try_encoding, | ||||
|                 } | ||||
|             ) | ||||
|             resp = self.httpsession.get( | ||||
|                 resolved_url, params=params, | ||||
|                 allow_redirects=False, | ||||
|                 headers=enc_headers, | ||||
|                 stream=True, | ||||
|             ) | ||||
|             # External validation first | ||||
|             if validate_response_pre: | ||||
|                 try: | ||||
|                     validate_response_pre(resp) | ||||
|                 except Exception as err: | ||||
|                     self.fail('Failed pre-validation: {0}'.format(err)) | ||||
|  | ||||
|             # Now parametric validation | ||||
|             self.assertIn(resp.status_code, valid_status) | ||||
|  | ||||
|             got_content_type = werkzeug.http.parse_options_header( | ||||
|                 resp.headers['content-type']) | ||||
|             if content_type is not None: | ||||
|                 self.assertEqual(content_type.lower(), | ||||
|                                  got_content_type[0].lower()) | ||||
|  | ||||
|             # Encoding comparison compared to valid | ||||
|             got_encoding = resp.headers.get('content-encoding', 'identity') | ||||
|             if try_encoding in valid_encodings: | ||||
|                 self.assertEqual(try_encoding, got_encoding) | ||||
|             else: | ||||
|                 self.assertEqual( | ||||
|                     'identity', got_encoding, | ||||
|                     msg='"{0}" was supposed to be a disallowed content-encoding but it was accepted'.format( | ||||
|                         try_encoding) | ||||
|                 ) | ||||
|  | ||||
|             got_length = resp.headers.get('content-length') | ||||
|             if require_length is True: | ||||
|                 self.assertIsNotNone(got_length, msg='Content-Length missing') | ||||
|             elif require_length is False: | ||||
|                 self.assertIsNone( | ||||
|                     got_length, msg='Content-Length should not be present') | ||||
|  | ||||
|             # Guarantee type is correct also | ||||
|             if got_length is not None: | ||||
|                 try: | ||||
|                     got_length = int(got_length) | ||||
|                 except ValueError: | ||||
|                     self.fail( | ||||
|                         'Got a non-integer Content-Length: {0}'.format(got_length)) | ||||
|  | ||||
|             got_vary = werkzeug.http.parse_set_header(resp.headers.get('vary')) | ||||
|             for item in require_vary: | ||||
|                 LOGGER.debug("headers: %s", resp.headers) | ||||
|                 self.assertIn( | ||||
|                     item, | ||||
|                     got_vary, | ||||
|                     msg='Vary header missing item "{0}" got {1}'.format( | ||||
|                         item, | ||||
|                         got_vary)) | ||||
|  | ||||
|             got_etag = resp.headers.get('etag') | ||||
|             got_lastmod = resp.headers.get('last-modified') | ||||
|             if resp.status_code != 204: | ||||
|                 if require_etag is True: | ||||
|                     self.assertIsNotNone(got_etag, msg='ETag header missing') | ||||
|                 elif require_etag is False: | ||||
|                     self.assertIsNone( | ||||
|                         got_etag, msg='ETag header should not be present') | ||||
|                 elif require_etag is not None: | ||||
|                     self.assertEqual(require_etag, got_etag) | ||||
|  | ||||
|                 if require_lastmod is True: | ||||
|                     self.assertIsNotNone( | ||||
|                         got_lastmod, msg='Last-Modified header missing') | ||||
|                 elif require_lastmod is False: | ||||
|                     self.assertIsNone( | ||||
|                         got_lastmod, msg='Last-Modified header should not be present') | ||||
|                 elif require_lastmod is not None: | ||||
|                     self.assertEqual(require_lastmod, got_lastmod) | ||||
|  | ||||
|             # Caching headers | ||||
|             cache_control = werkzeug.http.parse_cache_control_header( | ||||
|                 resp.headers.get('cache-control'), | ||||
|                 cls=werkzeug.datastructures.ResponseCacheControl, | ||||
|             ) | ||||
|             # The resource must define its domain | ||||
|             if False: | ||||
|                 self.assertTrue( | ||||
|                     cache_control.no_cache | ||||
|                     or cache_control.public  # pylint: disable=no-member | ||||
|                     or cache_control.private,  # pylint: disable=no-member | ||||
|                     msg='Missing cache public/private assertion for {0}'.format( | ||||
|                         resolved_url) | ||||
|                 ) | ||||
|             if require_cacheable is not False and cache_must_revalidate is True: | ||||
|                 self.assertTrue( | ||||
|                     cache_control.must_revalidate)  # pylint: disable=no-member | ||||
|             if require_cacheable is True: | ||||
|                 self.assertFalse(cache_control.no_cache) | ||||
|                 self.assertFalse(cache_control.no_store) | ||||
| #                self.assertLessEqual(0, cache_control.max_age) | ||||
|             elif require_cacheable is False: | ||||
|                 # FIXME not always true | ||||
|                 self.assertTrue(cache_control.no_cache) | ||||
|  | ||||
|             # Actual body content itself | ||||
|             got_body = str(resp.content) | ||||
|             if resp.status_code == 204: | ||||
|                 self.assertEqual('', got_body) | ||||
|             else: | ||||
|                 # Ensure decoded body is identical | ||||
|                 if got_encoding == 'identity': | ||||
|                     identity_body = got_body | ||||
|                     self.assertIsNotNone(identity_body) | ||||
|                     if got_length is not None: | ||||
|                         self.assertEqual(len(identity_body), got_length) | ||||
|                 else: | ||||
|                     self.assertEqual(identity_body, got_body) | ||||
|  | ||||
|                 # XML specific decoding | ||||
|                 if XML_CONTENT_RE.match( | ||||
|                         got_content_type[0]) is not None and validate_response_post is None: | ||||
|                     validate_response_post = ValidateXmlResponse( | ||||
|                         self._get_xml_parser(use_schema=True)) | ||||
|  | ||||
|                 # After all parametric tests on this response | ||||
|                 if validate_response_post: | ||||
|                     try: | ||||
|                         validate_response_post(resp) | ||||
|                     except Exception as err: | ||||
|                         self.fail('Failed post-validation: {0}'.format(err)) | ||||
|  | ||||
|             # Check the unauthorized view of same URL | ||||
|             for method in ('GET', 'HEAD'): | ||||
|                 resp = self.httpsession.request( | ||||
|                     method, | ||||
|                     resolved_url, params=params, | ||||
|                     allow_redirects=False, | ||||
|                     headers=merged(enc_headers, { | ||||
|                         'authorization': None, | ||||
|                     }), | ||||
|                 ) | ||||
|                 if must_authorize: | ||||
|                     self.assertEqual( | ||||
|                         401, | ||||
|                         resp.status_code, | ||||
|                         msg='For {0} on {1}: Expected 401 status got {2}'.format( | ||||
|                             method, | ||||
|                             resolved_url, | ||||
|                             resp.status_code)) | ||||
|                 else: | ||||
|                     self.assertIn( | ||||
|                         resp.status_code, | ||||
|                         valid_status, | ||||
|                         msg='For {0} on {1}: Expected valid status got {2}'.format( | ||||
|                             method, | ||||
|                             resolved_url, | ||||
|                             resp.status_code)) | ||||
|  | ||||
|             # Any resource with cache control header | ||||
|             resp = self.httpsession.head( | ||||
|                 resolved_url, params=params, | ||||
|                 allow_redirects=False, | ||||
|                 headers=merged(enc_headers, { | ||||
|                     'if-match': '*', | ||||
|                 }), | ||||
|             ) | ||||
|             self.assertIn(resp.status_code, valid_status) | ||||
|             # Caching with ETag | ||||
|             if got_etag is not None: | ||||
|                 self.assertIn(resp.status_code, valid_status) | ||||
|                 # Existing resource | ||||
|                 resp = self.httpsession.head( | ||||
|                     resolved_url, params=params, | ||||
|                     allow_redirects=False, | ||||
|                     headers=merged(enc_headers, { | ||||
|                         'if-match': got_etag, | ||||
|                     }), | ||||
|                 ) | ||||
|                 self.assertIn(resp.status_code, valid_status) | ||||
|                 # Client cache response | ||||
|                 resp = self.httpsession.head( | ||||
|                     resolved_url, params=params, | ||||
|                     allow_redirects=False, | ||||
|                     headers=merged(enc_headers, { | ||||
|                         'if-none-match': got_etag, | ||||
|                     }), | ||||
|                 ) | ||||
|                 self.assertIn(resp.status_code, [ | ||||
|                               304] if require_cacheable else valid_status) | ||||
|                 # With adjusted ETag | ||||
|                 mod_etag = modify_etag(got_etag) | ||||
|                 resp = self.httpsession.head( | ||||
|                     resolved_url, params=params, | ||||
|                     allow_redirects=False, | ||||
|                     headers=merged(enc_headers, { | ||||
|                         'if-none-match': mod_etag, | ||||
|                     }), | ||||
|                 ) | ||||
|                 self.assertIn(resp.status_code, valid_status) | ||||
|  | ||||
|             # Caching with Last-Modified | ||||
|             if got_lastmod is not None: | ||||
|                 # No changes here so normal response | ||||
|                 resp = self.httpsession.head( | ||||
|                     resolved_url, params=params, | ||||
|                     allow_redirects=False, | ||||
|                     headers=merged(enc_headers, { | ||||
|                         'if-unmodified-since': got_lastmod, | ||||
|                     }), | ||||
|                 ) | ||||
| #                self.assertIn(resp.status_code, valid_status) | ||||
|  | ||||
|                 # An earlier time will give a 412 | ||||
|                 new_time = werkzeug.http.parse_date( | ||||
|                     got_lastmod) - datetime.timedelta(seconds=5) | ||||
|                 resp = self.httpsession.head( | ||||
|                     resolved_url, params=params, allow_redirects=False, headers=merged( | ||||
|                         enc_headers, { | ||||
|                             'if-unmodified-since': werkzeug.http.http_date(new_time), }), ) | ||||
|                 self.assertIn(resp.status_code, [ | ||||
|                               412] if require_cacheable else valid_status) | ||||
|  | ||||
|                 # An later time is normal response | ||||
|                 new_time = werkzeug.http.parse_date( | ||||
|                     got_lastmod) + datetime.timedelta(seconds=5) | ||||
|                 resp = self.httpsession.head( | ||||
|                     resolved_url, params=params, allow_redirects=False, headers=merged( | ||||
|                         enc_headers, { | ||||
|                             'if-unmodified-since': werkzeug.http.http_date(new_time), }), ) | ||||
|                 self.assertIn(resp.status_code, valid_status) | ||||
|  | ||||
|                 # Client cache response | ||||
|                 resp = self.httpsession.head( | ||||
|                     resolved_url, params=params, | ||||
|                     allow_redirects=False, | ||||
|                     headers=merged(enc_headers, { | ||||
|                         'if-modified-since': got_lastmod, | ||||
|                     }), | ||||
|                 ) | ||||
| #                self.assertIn(resp.status_code, [304] if require_cacheable else valid_status) | ||||
|  | ||||
|                 # A later time should also give a 304 response | ||||
|                 new_time = werkzeug.http.parse_date( | ||||
|                     got_lastmod) + datetime.timedelta(seconds=5) | ||||
|                 resp = self.httpsession.head( | ||||
|                     resolved_url, params=params, | ||||
|                     allow_redirects=False, | ||||
|                     headers=merged(enc_headers, { | ||||
|                         'if-modified-since': werkzeug.http.http_date(new_time), | ||||
|                     }), | ||||
|                 ) | ||||
|                 self.assertIn(resp.status_code, [ | ||||
|                               304] if require_cacheable else valid_status) | ||||
|  | ||||
|                 # An earlier time will give a 200 response | ||||
|                 new_time = werkzeug.http.parse_date( | ||||
|                     got_lastmod) - datetime.timedelta(seconds=5) | ||||
|                 resp = self.httpsession.head( | ||||
|                     resolved_url, params=params, | ||||
|                     allow_redirects=False, | ||||
|                     headers=merged(enc_headers, { | ||||
|                         'if-modified-since': werkzeug.http.http_date(new_time), | ||||
|                     }), | ||||
|                 ) | ||||
|                 self.assertIn(resp.status_code, valid_status) | ||||
|  | ||||
|     def _test_modify_request( | ||||
|             self, url, up_data, method='POST', params=None, **kwargs): | ||||
|         ''' Common assertions for static resources. | ||||
|  | ||||
|         :param url: The URL to pass to :py:mod:`requests` | ||||
|         :type url: str | ||||
|         :param up_data: The request body data. | ||||
|         :type up_data: str or file-like | ||||
|         :param str method: The method to request the modification. | ||||
|         :param params: URL parameter dictionary to pass to :py:mod:`requests`. | ||||
|         :type params: dict or None | ||||
|         :param base_headers: A dictionary of headers to send with every request. | ||||
|         :type base_headers: dict or None | ||||
|         :param must_authorize: Access to the resource without an Authorization | ||||
|             header is attempted and compared against this value. | ||||
|         :type must_authorize: bool | ||||
|         :param valid_status: A set of valid status codes to allow. | ||||
|             If not provided, only code 201 is valid for new resources and 204 for existing ones. | ||||
|         :type valid_status: set or None | ||||
|         :param empty_is_valid: Whether or not an empty document is a valid modification. | ||||
|             The default is False. | ||||
|         :type empty_is_valid: bool | ||||
|         :param is_idempotent: Whether or not sending the same request should | ||||
|             not change the resource (except for the modify time). | ||||
|             The default is true if the method is PUT. | ||||
|         :type is_idempotent: bool | ||||
|         :param require_etag: If not None, whether the ETag is required present or not present. | ||||
|         :type require_etag: bool or None | ||||
|         ''' | ||||
|         method = method.lower() | ||||
|         # Arguments not passed to _assert_modify_response() | ||||
|         base_headers = kwargs.pop('base_headers', None) | ||||
|         if base_headers is None: | ||||
|             base_headers = {} | ||||
|         must_authorize = kwargs.pop('must_authorize', True) | ||||
|         empty_is_valid = kwargs.pop('empty_is_valid', False) | ||||
|         is_idempotent = kwargs.pop('is_idempotent', method == 'put') | ||||
|  | ||||
|         resolved_url = self._resolve_url(url) | ||||
|  | ||||
|         if hasattr(up_data, 'seek'): | ||||
|  | ||||
|             def reset_up_data(): | ||||
|                 up_data.seek(0) | ||||
|                 return up_data | ||||
|  | ||||
|         else: | ||||
|  | ||||
|             def reset_up_data(): | ||||
|                 return up_data | ||||
|  | ||||
|         # Options on the resource itself | ||||
|         resp = self.httpsession.options( | ||||
|             resolved_url, params=params, | ||||
|             allow_redirects=False, | ||||
|             headers=base_headers, | ||||
|         ) | ||||
|         self.assertEqual(200, resp.status_code) | ||||
|         got_allow = werkzeug.http.parse_set_header(resp.headers['allow']) | ||||
|         self.assertIn('options', got_allow) | ||||
|         self.assertIn(method, got_allow) | ||||
|         # Options without authentication | ||||
|         resp = self.httpsession.options( | ||||
|             resolved_url, params=params, | ||||
|             allow_redirects=False, | ||||
|             headers=merged(base_headers, { | ||||
|                 'authorization': None, | ||||
|             }), | ||||
|         ) | ||||
|         if must_authorize: | ||||
|             self.assertEqual(401, resp.status_code) | ||||
|         else: | ||||
|             self.assertEqual(200, resp.status_code) | ||||
|  | ||||
|         # Initial state for conditions | ||||
|         resp_head = self.httpsession.head( | ||||
|             resolved_url, params=params, | ||||
|             headers={'accept-encoding': 'identity'}, | ||||
|         ) | ||||
|         init_status = resp_head.status_code | ||||
|         self.assertIn(init_status, {200, 404}) | ||||
|         init_etag = resp_head.headers.get('etag') | ||||
|  | ||||
|         if init_status == 200: | ||||
|             # Replacing resource | ||||
|             if kwargs.get('valid_status') is None: | ||||
|                 kwargs['valid_status'] = [204] | ||||
|             match_etag = init_etag if init_etag else '*' | ||||
|             add_headers_fail = {'if-none-match': match_etag} | ||||
|             add_headers_good = {'if-match': match_etag} | ||||
|  | ||||
|         elif init_status == 404: | ||||
|             # New resource | ||||
|             if kwargs.get('valid_status') is None: | ||||
|                 kwargs['valid_status'] = [201] | ||||
|             add_headers_fail = {'if-match': '*'} | ||||
|             add_headers_good = {'if-none-match': '*'} | ||||
|  | ||||
|         if not empty_is_valid: | ||||
|             # Invalid header content | ||||
|             resp = self.httpsession.request( | ||||
|                 method, resolved_url, params=params, | ||||
|             ) | ||||
|             self.assertEqual(415, resp.status_code) | ||||
|             # Invalid (empty) body content | ||||
|             resp = self.httpsession.request( | ||||
|                 method, resolved_url, params=params, | ||||
|                 headers=base_headers, | ||||
|             ) | ||||
|             self.assertEqual(415, resp.status_code) | ||||
|  | ||||
|         # Check precondition failure | ||||
|         resp = self.httpsession.request( | ||||
|             method, resolved_url, params=params, | ||||
|             headers=merged(base_headers, add_headers_fail), | ||||
|             data=reset_up_data(), | ||||
|         ) | ||||
|         self.assertEqual(412, resp.status_code) | ||||
|  | ||||
|         if must_authorize: | ||||
|             # Unauthorized access with otherwise valid request | ||||
|             resp = self.httpsession.request( | ||||
|                 method, resolved_url, params=params, | ||||
|                 headers=merged(base_headers, { | ||||
|                     'authorization': None, | ||||
|                 }), | ||||
|                 data=reset_up_data(), | ||||
|             ) | ||||
|             self.assertEqual(401, resp.status_code) | ||||
|  | ||||
|         # Actual modifying request | ||||
|         resp_mod = self.httpsession.request( | ||||
|             method, resolved_url, params=params, | ||||
|             headers=merged(base_headers, add_headers_good), | ||||
|             data=reset_up_data(), | ||||
|         ) | ||||
|         self._assert_modify_response(resp_mod, **kwargs) | ||||
|         got_modtime = resp_mod.headers.get('last-modified') | ||||
|         got_etag = resp_mod.headers.get('etag') | ||||
|  | ||||
|         # Verify the same info is present in new HEAD reply | ||||
|         resp_head = self.httpsession.head( | ||||
|             resolved_url, params=params, | ||||
|             headers={'accept-encoding': 'identity'}, | ||||
|         ) | ||||
|         self.assertEqual(200, resp_head.status_code) | ||||
|         self.assertEqual(got_modtime, resp_head.headers.get('last-modified')) | ||||
|         self.assertEqual(got_etag, resp_head.headers.get('etag')) | ||||
|  | ||||
|         if is_idempotent: | ||||
|             # Check a duplicate request | ||||
|             add_headers_good = {'if-match': got_etag} | ||||
|             kwargs['valid_status'] = [204] | ||||
|             resp_mod = self.httpsession.request( | ||||
|                 method, resolved_url, params=params, | ||||
|                 headers=merged(base_headers, add_headers_good), | ||||
|                 data=reset_up_data(), | ||||
|             ) | ||||
|             self._assert_modify_response(resp_mod, **kwargs) | ||||
|             self.assertEqual(got_etag, resp_mod.headers.get('etag')) | ||||
|  | ||||
|         # Give back the final valid response | ||||
|         return resp_mod | ||||
|  | ||||
|     def _assert_modify_response(self, resp, valid_status=None, | ||||
|                                 require_etag=True, require_lastmod=True, | ||||
|                                 old_etag=None): | ||||
|         ''' Verify the contents of a response to HTTP modification with no body. | ||||
|  | ||||
|         :param resp: The response object to check. | ||||
|         :type resp: :py:cls:`requests.Response` | ||||
|         :param valid_status: A set of valid status codes to allow. | ||||
|             If not provided, only codes (200, 201, 204) are valid. | ||||
|         :type valid_status: set or None | ||||
|         :param require_etag: If not None, whether the ETag is required present | ||||
|             or not present (True or False) or a specific string value. | ||||
|         :type require_etag: str or bool or None | ||||
|         :param require_lastmod: If not None, whether the Last-Modified is | ||||
|             required present or not present (True or False) or a specific value. | ||||
|         :type require_lastmod: str or bool or None | ||||
|         :param old_etag: An optional old ETag value to compare against. | ||||
|             The new response must have a different ETag value than this. | ||||
|         :type old_etag: str or None | ||||
|         ''' | ||||
|         if valid_status is None: | ||||
|             valid_status = [200, 201, 204] | ||||
|         valid_status = set(valid_status) | ||||
|  | ||||
|         self.assertIn(resp.status_code, valid_status) | ||||
|         got_lastmod = resp.headers.get('last-modified') | ||||
|         got_etag = resp.headers.get('etag') | ||||
|  | ||||
|         if require_etag is True: | ||||
|             self.assertIsNotNone(got_etag, msg='ETag header missing') | ||||
|         elif require_etag is False: | ||||
|             self.assertIsNone( | ||||
|                 got_etag, msg='ETag header should not be present') | ||||
|         elif require_etag is not None: | ||||
|             self.assertEqual(require_etag, got_etag) | ||||
|  | ||||
|         if require_lastmod is True: | ||||
|             self.assertIsNotNone( | ||||
|                 got_lastmod, msg='Last-Modified header missing') | ||||
|         elif require_lastmod is False: | ||||
|             self.assertIsNone( | ||||
|                 got_lastmod, msg='Last-Modified header should not be present') | ||||
|         elif require_lastmod is not None: | ||||
|             self.assertEqual(require_lastmod, got_lastmod) | ||||
|  | ||||
|         if old_etag is not None: | ||||
|             self.assertNotEqual(old_etag, got_etag) | ||||
|  | ||||
|         # Empty body | ||||
|         self.assertFalse(bool(resp.content)) | ||||
|  | ||||
|  | ||||
| class UserLoginBaseTestCase(BaseTestCase): | ||||
|     """Wraps tests in login/logout flow | ||||
|  | ||||
|     Encapsulates login/logout wrapping of tests. | ||||
|     Tests that require authentication will need to use the saved login_token and cookies as headers in their requests | ||||
|     """ | ||||
|     #: User name to test as this assumes a user HTTPCHECKOUT_ACCTNAME already exists in the User DB | ||||
|     VALID_ACCTNAME = os.environ.get( | ||||
|         'HTTPCHECKOUT_ACCTNAME', 'admin').decode('utf8') | ||||
|     VALID_PASSPHRASE = os.environ.get( | ||||
|         'HTTPCHECKOUT_PASSPHRASE', 'admin').decode('utf8') | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         BaseTestCase.__init__(self, *args, **kwargs) | ||||
|         self.login_token = None | ||||
|         self.cookies = None | ||||
|  | ||||
|     def setUp(self): | ||||
|         BaseTestCase.setUp(self) | ||||
|         resp = self.httpsession.post( | ||||
|             self._resolve_url('auth/login'), | ||||
|             json={ | ||||
|                 'email': self.VALID_ACCTNAME, | ||||
|                 'password': self.VALID_PASSPHRASE}) | ||||
|         LOGGER.debug('code: %s, login headers: %s', | ||||
|                      resp.status_code, resp.content) | ||||
|         self.cookies = resp.cookies | ||||
|         if resp.status_code == 404: | ||||
|             self.fail(msg="{} not found on this server.".format( | ||||
|                 self._resolve_url('auth/login'))) | ||||
|         try: | ||||
|             self.login_token = resp.json()["token"] | ||||
|         except ValueError: | ||||
|             raise SkipTest("Could not login as {}".format(self.VALID_ACCTNAME)) | ||||
|  | ||||
|     def tearDown(self): | ||||
|         resp = self.httpsession.post(self._resolve_url( | ||||
|             'auth/logout'), params={'Authorization': self.login_token}) | ||||
|         LOGGER.debug('response code: %d\nbody: %s', | ||||
|                      resp.status_code, resp.content) | ||||
|         self.login_token = None | ||||
|         self.cookies = None | ||||
|         BaseTestCase.tearDown(self) | ||||
							
								
								
									
										267
									
								
								http-checkout/rathttpcheckout/paws.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										267
									
								
								http-checkout/rathttpcheckout/paws.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,267 @@ | ||||
| ''' Test cases related to PAWS API. | ||||
| ''' | ||||
|  | ||||
| import logging | ||||
| from urlparse import urljoin | ||||
| from random import randint | ||||
| from .base import (BaseTestCase, UserLoginBaseTestCase) | ||||
| from afc import (TestAfcEngine) | ||||
| from nose import SkipTest | ||||
|  | ||||
| #: Logger for this module | ||||
| LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class TestPawsApi(TestAfcEngine): | ||||
|     ''' Test case to verify the PAWS JSON-RPC endpoint. | ||||
|     ''' | ||||
|  | ||||
|     def setUp(self): | ||||
|         UserLoginBaseTestCase.setUp(self) | ||||
|  | ||||
|         # Get the actual endpoint URL | ||||
|         resp = self.httpsession.head( | ||||
|             self._resolve_url(''), | ||||
|             allow_redirects=True | ||||
|         ) | ||||
|         self.assertIn(resp.status_code, (200,)) | ||||
|         index_url = resp.url | ||||
|         config_url = urljoin(index_url, '../ratapi/v1/guiconfig') | ||||
|         resp = self.httpsession.get(config_url) | ||||
|         self.guiconfig = resp.json() | ||||
|         self.paws_url = self._resolve_url(self.guiconfig['paws_url']) | ||||
|  | ||||
|     def tearDown(self): | ||||
|         UserLoginBaseTestCase.tearDown(self) | ||||
|  | ||||
|     def _call_jsonrpc(self, url, method, params, expect_status=200, | ||||
|                       expect_error=None, expect_result=None): | ||||
|         if not params: | ||||
|             params = {} | ||||
|  | ||||
|         req_id = randint(1, 10**9) | ||||
|         req_body = { | ||||
|             'jsonrpc': '2.0', | ||||
|             'id': req_id, | ||||
|             'method': method, | ||||
|             'params': params, | ||||
|         } | ||||
|         LOGGER.debug("request:\n%s", req_body) | ||||
|         resp = self.httpsession.post( | ||||
|             url, | ||||
|             headers={ | ||||
|                 'accept-encoding': 'gzip', | ||||
|             }, | ||||
|             json=req_body, | ||||
|         ) | ||||
|  | ||||
|         LOGGER.debug("request code: %d body:\n%s", | ||||
|                      resp.status_code, resp.content) | ||||
|         self.assertEqual(expect_status, resp.status_code) | ||||
|         self.assertEqual('application/json', resp.headers.get('content-type')) | ||||
|         resp_body = resp.json() | ||||
|         self.assertEqual('2.0', resp_body.get('jsonrpc')) | ||||
|         self.assertEqual(req_id, resp_body.get('id')) | ||||
|  | ||||
|         if expect_error is not None: | ||||
|             err_obj = resp_body.get('error') | ||||
|             self.assertIsNotNone(err_obj) | ||||
|             for (key, val) in expect_error.iteritems(): | ||||
|                 LOGGER.debug("%s ==? %s", val, err_obj.get(key)) | ||||
|                 self.assertEqual(val, err_obj.get(key)) | ||||
|         elif expect_result is not None: | ||||
|             result_obj = resp_body.get('result') | ||||
|             self.assertIsNotNone( | ||||
|                 result_obj, msg="In body {}".format(resp_body)) | ||||
|             for (key, val) in expect_result.iteritems(): | ||||
|                 self.assertEqual(val, result_obj.get(key)) | ||||
|  | ||||
|         return resp_body | ||||
|  | ||||
|     def test_browser_redirect(self): | ||||
|         resp = self.httpsession.head( | ||||
|             self.paws_url | ||||
|         ) | ||||
|         self.assertEqual(302, resp.status_code) | ||||
|         self.assertEqual(urljoin(self.paws_url, 'paws/browse/'), | ||||
|                          resp.headers.get('location')) | ||||
|  | ||||
|     def test_jsonrpc_empty(self): | ||||
|         resp = self.httpsession.post( | ||||
|             self.paws_url, | ||||
|             json={}, | ||||
|         ) | ||||
|         self.assertEqual(200, resp.status_code) | ||||
|         self.assertEqual('application/json', resp.headers.get('content-type')) | ||||
|  | ||||
|         got_body = resp.json() | ||||
|         self.assertEqual(u'2.0', got_body.get('jsonrpc')) | ||||
|         self.assertEqual(None, got_body.get('id')) | ||||
|         got_error = got_body.get('error') | ||||
|         self.assertIsNotNone(got_error) | ||||
|         self.assertEqual(-32602, got_error['code']) | ||||
|         self.assertEqual(u'InvalidParamsError', got_error['name']) | ||||
|  | ||||
|     def test_jsonrpc_badmethod(self): | ||||
|         self._call_jsonrpc( | ||||
|             self.paws_url, | ||||
|             method='hi', | ||||
|             params={}, | ||||
|             expect_error={ | ||||
|                 u'code': -32601, | ||||
|                 u'name': 'MethodNotFoundError', | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|     def test_jsonrpc_badargs(self): | ||||
|         self._call_jsonrpc( | ||||
|             self.paws_url, | ||||
|             method='spectrum.paws.getSpectrum', | ||||
|             params={}, | ||||
|             expect_error={ | ||||
|                 u'code': -32602, | ||||
|                 u'name': u'InvalidParamsError', | ||||
|                 u'message': u'InvalidParamsError: Required parameter names: deviceDesc location antenna capabilities type version', | ||||
|             }) | ||||
|  | ||||
|     def test_jsonrpc_no_rulesets(self): | ||||
|         req_devicedesc = { | ||||
|             "serialNumber": "sn-test", | ||||
|         } | ||||
|         self._call_jsonrpc( | ||||
|             self.paws_url, | ||||
|             method='spectrum.paws.getSpectrum', | ||||
|             params={ | ||||
|                 "antenna": { | ||||
|                     "height": 25, | ||||
|                     "heightType": "AMSL", | ||||
|                     "heightUncertainty": 5 | ||||
|                 }, | ||||
|                 "capabilities": { | ||||
|                     "indoorOutdoor": "INDOOR" | ||||
|                 }, | ||||
|                 "deviceDesc": req_devicedesc, | ||||
|                 "location": { | ||||
|                     "point": { | ||||
|                         "center": { | ||||
|                             "latitude": 40.75, | ||||
|                             "longitude": -74 | ||||
|                         }, | ||||
|                         "orientation": 48, | ||||
|                         "semiMajorAxis": 100, | ||||
|                         "semiMinorAxis": 75 | ||||
|                     } | ||||
|                 }, | ||||
|                 "type": "AVAIL_SPECTRUM_REQ", | ||||
|                 "version": "1.0" | ||||
|             }, | ||||
|             expect_status=401, | ||||
|             expect_error={ | ||||
|                 u'code': 401, | ||||
|                 u'name': u'InvalidCredentialsError', | ||||
|                 u'message': u'InvalidCredentialsError: Invalid rulesetIds: [\"AFC-6GHZ-DEMO-1.1\"] expected', | ||||
|             } | ||||
|         ) | ||||
|  | ||||
|     def test_paws_valid(self): | ||||
|         afc_loc = self.guiconfig["afcconfig_defaults"] | ||||
|         LOGGER.debug("cookies: %s, token: %s", self.cookies, self.login_token) | ||||
|         resp = self.httpsession.head( | ||||
|             self._resolve_url(afc_loc), | ||||
|             headers={ | ||||
|                 'Authorization': self.login_token}, | ||||
|             cookies=self.cookies) | ||||
|         code = resp.status_code | ||||
|         LOGGER.debug("status: %d, url: %s", code, self._resolve_url(afc_loc)) | ||||
|         if code == 404: | ||||
|             raise SkipTest("AFC Config does not exist.") | ||||
|         req_devicedesc = { | ||||
|             "serialNumber": "sn-test", | ||||
|             "rulesetIds": ["AFC-6GHZ-DEMO-1.1"] | ||||
|         } | ||||
|  | ||||
|         self._call_jsonrpc( | ||||
|             self.paws_url, | ||||
|             method='spectrum.paws.getSpectrum', | ||||
|             params={ | ||||
|                 "antenna": { | ||||
|                     "height": 25, | ||||
|                     "heightType": "AMSL", | ||||
|                     "heightUncertainty": 5 | ||||
|                 }, | ||||
|                 "capabilities": { | ||||
|                     "indoorOutdoor": "INDOOR" | ||||
|                 }, | ||||
|                 "deviceDesc": req_devicedesc, | ||||
|                 "location": { | ||||
|                     "point": { | ||||
|                         "center": { | ||||
|                             "latitude": 40.75, | ||||
|                             "longitude": -74 | ||||
|                         }, | ||||
|                         "orientation": 48, | ||||
|                         "semiMajorAxis": 100, | ||||
|                         "semiMinorAxis": 75 | ||||
|                     } | ||||
|                 }, | ||||
|                 "type": "AVAIL_SPECTRUM_REQ", | ||||
|                 "version": "1.0" | ||||
|             }, | ||||
|             expect_result={ | ||||
|                 'version': '1.0', | ||||
|                 'type': 'AVAIL_SPECTRUM_RESP', | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_paws_resp_structure(self): | ||||
|         afc_loc = self.guiconfig["afcconfig_defaults"] | ||||
|         LOGGER.debug("cookies: %s, token: %s", self.cookies, self.login_token) | ||||
|         resp = self.httpsession.get( | ||||
|             self._resolve_url(afc_loc), | ||||
|             headers={ | ||||
|                 'Authorization': self.login_token}, | ||||
|             cookies=self.cookies) | ||||
|         code = resp.status_code | ||||
|         LOGGER.debug("status: %d, url: %s", code, self._resolve_url(afc_loc)) | ||||
|         if code == 404: | ||||
|             raise SkipTest("AFC Config does not exist.") | ||||
|         req_devicedesc = { | ||||
|             "serialNumber": "sn-test", | ||||
|             "rulesetIds": ["AFC-6GHZ-DEMO-1.1"] | ||||
|         } | ||||
|  | ||||
|         response = self._call_jsonrpc( | ||||
|             self.paws_url, | ||||
|             method='spectrum.paws.getSpectrum', | ||||
|             params={ | ||||
|                 "antenna": { | ||||
|                     "height": 25, | ||||
|                     "heightType": "AMSL", | ||||
|                     "heightUncertainty": 5 | ||||
|                 }, | ||||
|                 "capabilities": { | ||||
|                     "indoorOutdoor": "INDOOR" | ||||
|                 }, | ||||
|                 "deviceDesc": req_devicedesc, | ||||
|                 "location": { | ||||
|                     "point": { | ||||
|                         "center": { | ||||
|                             "latitude": 40.75, | ||||
|                             "longitude": -74 | ||||
|                         }, | ||||
|                         "orientation": 80, | ||||
|                         "semiMajorAxis": 500, | ||||
|                         "semiMinorAxis": 400 | ||||
|                     } | ||||
|                 }, | ||||
|                 "type": "AVAIL_SPECTRUM_REQ", | ||||
|                 "version": "1.0" | ||||
|             }, | ||||
|             expect_result={ | ||||
|                 'version': '1.0', | ||||
|                 'type': 'AVAIL_SPECTRUM_RESP', | ||||
|             }, | ||||
|         ) | ||||
|         result = response['result'] | ||||
|  | ||||
|         self._test_paws_result_valid(result, req_devicedesc) | ||||
							
								
								
									
										161
									
								
								http-checkout/rathttpcheckout/ratapi.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										161
									
								
								http-checkout/rathttpcheckout/ratapi.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,161 @@ | ||||
| ''' Test cases related to PAWS API. | ||||
| ''' | ||||
|  | ||||
| from urlparse import urljoin | ||||
| from .base import (ValidateJsonResponse, ValidateHtmlResponse, BaseTestCase) | ||||
|  | ||||
|  | ||||
| class TestRatApi(BaseTestCase): | ||||
|     ''' Test case to verify the RAT RESTful API. | ||||
|     ''' | ||||
|  | ||||
|     def setUp(self): | ||||
|         BaseTestCase.setUp(self) | ||||
|  | ||||
|         # Get the actual endpoint URL | ||||
|         resp = self.httpsession.head( | ||||
|             self._resolve_url(''), | ||||
|             allow_redirects=True | ||||
|         ) | ||||
|         self.assertIn(resp.status_code, (200,)) | ||||
|         index_url = resp.url | ||||
|  | ||||
|         self.guiconfig_url = urljoin(index_url, '../ratapi/v1/guiconfig') | ||||
|         resp = self.httpsession.get(self.guiconfig_url) | ||||
|         self.guiconfig = resp.json() | ||||
|  | ||||
|     def tearDown(self): | ||||
|         BaseTestCase.tearDown(self) | ||||
|  | ||||
|     def test_guiconfig_cache(self): | ||||
|         self._test_path_contents( | ||||
|             self.guiconfig_url, | ||||
|             must_authorize=False, | ||||
|             valid_encodings=None, | ||||
|             require_etag=None, | ||||
|             require_lastmod=None, | ||||
|             validate_response_post=ValidateJsonResponse(), | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class TestUlsDb(BaseTestCase): | ||||
|     ''' Test case to verify the ULS DB. | ||||
|     ''' | ||||
|  | ||||
|     def setUp(self): | ||||
|         BaseTestCase.setUp(self) | ||||
|  | ||||
|         # Get the actual endpoint URL | ||||
|         resp = self.httpsession.head( | ||||
|             self._resolve_url(''), | ||||
|             allow_redirects=True | ||||
|         ) | ||||
|         self.assertIn(resp.status_code, (200,)) | ||||
|         index_url = resp.url | ||||
|  | ||||
|         self.uls_db_url = urljoin(index_url, '../ratapi/v1/files/uls_db') | ||||
|         uls_resp = self.httpsession.get(self.uls_db_url) | ||||
|         self.uls_db = uls_resp | ||||
|  | ||||
|         self.uls_csv_to_sql = urljoin( | ||||
|             index_url, '../ratapi/v1/convert/uls/csv/sql/') | ||||
|  | ||||
|     def tearDown(self): | ||||
|         BaseTestCase.tearDown(self) | ||||
|  | ||||
|     def test_webdav(self): | ||||
|         self._test_path_contents( | ||||
|             self.uls_db_url, | ||||
|             must_authorize=False, | ||||
|             require_etag=False, | ||||
|             valid_encodings=None, | ||||
|             require_lastmod=False, | ||||
|             validate_response_post=ValidateHtmlResponse() | ||||
|         ) | ||||
|  | ||||
|     def test_links(self): | ||||
|         self._test_working_links(self.uls_db.text) | ||||
|  | ||||
|     def test_bad_file(self): | ||||
|         uls_db_url = self.uls_db_url + "/" | ||||
|         self._test_path_contents( | ||||
|             urljoin(uls_db_url, 'bad_file_name.csv'), | ||||
|             must_authorize=False, | ||||
|             require_etag=False, | ||||
|             valid_encodings=None, | ||||
|             require_lastmod=False, | ||||
|             valid_status=[404] | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class TestAntennaPattern(BaseTestCase): | ||||
|     ''' Test case to verify the Antenna Pattern. | ||||
|     ''' | ||||
|  | ||||
|     def setUp(self): | ||||
|         BaseTestCase.setUp(self) | ||||
|  | ||||
|         # Get the actual endpoint URL | ||||
|         resp = self.httpsession.head( | ||||
|             self._resolve_url(''), | ||||
|             allow_redirects=True | ||||
|         ) | ||||
|         self.assertIn(resp.status_code, (200,)) | ||||
|         index_url = resp.url | ||||
|  | ||||
|         self.antenna_url = urljoin( | ||||
|             index_url, '../ratapi/v1/files/antenna_pattern') | ||||
|         antenna_pattern = self.httpsession.get(self.antenna_url) | ||||
|         self.antenna_pattern = antenna_pattern | ||||
|  | ||||
|     def tearDown(self): | ||||
|         BaseTestCase.tearDown(self) | ||||
|  | ||||
|     def test_webdav(self): | ||||
|         self._test_path_contents( | ||||
|             self.antenna_url, | ||||
|             must_authorize=False, | ||||
|             require_etag=False, | ||||
|             valid_encodings=None, | ||||
|             require_lastmod=False, | ||||
|             validate_response_post=ValidateHtmlResponse() | ||||
|         ) | ||||
|  | ||||
|     def test_links(self): | ||||
|         self._test_working_links(self.antenna_pattern.text) | ||||
|  | ||||
|  | ||||
| class TestHistory(BaseTestCase): | ||||
|     ''' Test case to verify the Histories. | ||||
|     ''' | ||||
|  | ||||
|     def setUp(self): | ||||
|         BaseTestCase.setUp(self) | ||||
|  | ||||
|         # Get the actual endpoint URL | ||||
|         resp = self.httpsession.head( | ||||
|             self._resolve_url(''), | ||||
|             allow_redirects=True | ||||
|         ) | ||||
|         self.assertIn(resp.status_code, (200,)) | ||||
|         index_url = resp.url | ||||
|  | ||||
|         self.history_url = urljoin(index_url, '../ratapi/v1/history') | ||||
|         history = self.httpsession.get(self.history_url) | ||||
|         self.history = history | ||||
|  | ||||
|     def tearDown(self): | ||||
|         BaseTestCase.tearDown(self) | ||||
|  | ||||
|     def test_webdav(self): | ||||
|         self._test_path_contents( | ||||
|             self.history_url, | ||||
|             must_authorize=False, | ||||
|             require_etag=False, | ||||
|             valid_encodings=None, | ||||
|             require_lastmod=False, | ||||
|             validate_response_post=ValidateHtmlResponse() | ||||
|         ) | ||||
|  | ||||
|     def test_links(self): | ||||
|         self._test_working_links(self.history.text) | ||||
							
								
								
									
										76
									
								
								http-checkout/rathttpcheckout/www.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										76
									
								
								http-checkout/rathttpcheckout/www.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,76 @@ | ||||
| ''' Test cases related to Web pages (not the APIs used by them). | ||||
| ''' | ||||
|  | ||||
| import logging | ||||
| from .base import (ValidateHtmlResponse, BaseTestCase) | ||||
|  | ||||
| #: Logger for this module | ||||
| LOGGER = logging.getLogger(__name__) | ||||
|  | ||||
|  | ||||
| class TestWebApp(BaseTestCase): | ||||
|     ''' Test case to verify the PAWS JSON-RPC endpoint. | ||||
|     ''' | ||||
|  | ||||
|     def setUp(self): | ||||
|         BaseTestCase.setUp(self) | ||||
|  | ||||
|     def tearDown(self): | ||||
|         BaseTestCase.tearDown(self) | ||||
|  | ||||
|     def test_root_redirect(self): | ||||
|         resp = self.httpsession.head(self._resolve_url('')) | ||||
|         self.assertEqual(302, resp.status_code) | ||||
|         self.assertEqual(self._resolve_url('www/index.html'), | ||||
|                          resp.headers.get('location')) | ||||
|  | ||||
|     def test_html_app(self): | ||||
|         self._test_path_contents( | ||||
|             self._resolve_url('www/index.html'), | ||||
|             valid_encodings=None, | ||||
|             content_type='text/html', | ||||
|             must_authorize=False, | ||||
|             require_etag=True, | ||||
|             require_lastmod=True, | ||||
|             require_cacheable=True, | ||||
|             validate_response_post=ValidateHtmlResponse(), | ||||
|         ) | ||||
|  | ||||
|     def test_guiconfig(self): | ||||
|         required_keys = frozenset([ | ||||
|             'afcconfig_defaults', | ||||
|             'uls_convert_url', | ||||
|             'login_url', | ||||
|             'paws_url', | ||||
|             'history_url', | ||||
|             'admin_url', | ||||
|             'user_url', | ||||
|             'ap_deny_admin_url', | ||||
|             'rat_api_analysis', | ||||
|             'version', | ||||
|             'antenna_url', | ||||
|             'google_apikey', | ||||
|             'uls_url' | ||||
|         ]) | ||||
|         resp = self.httpsession.get(self._resolve_url('ratapi/v1/guiconfig')) | ||||
|         encoding = resp.headers["Content-Type"] | ||||
|         self.assertEqual("application/json", encoding) | ||||
|         try: | ||||
|             parsed_body = resp.json() | ||||
|         except ValueError: | ||||
|             self.fail("Body is not valid JSON.") | ||||
|  | ||||
|         missing_keys = required_keys - frozenset(parsed_body.keys()) | ||||
|         if missing_keys: | ||||
|             self.fail('Required keys: {0}'.format(' '.join(required_keys))) | ||||
|  | ||||
|         non_200_eps = {} | ||||
|         for value in parsed_body.values(): | ||||
|             resp = self.httpsession.options(self._resolve_url(value)) | ||||
|             LOGGER.debug("Verifying status of %s", self._resolve_url(value)) | ||||
|             if resp.status_code != 200: | ||||
|                 non_200_eps[value] = resp.status_code | ||||
|             self.assertEqual( | ||||
|                 {}, | ||||
|                 non_200_eps, | ||||
|                 msg="{}, were defined in GUI config as required endpoint(s) but returned non-200 status on OPTIONS".format(non_200_eps)) | ||||
							
								
								
									
										24
									
								
								infra/afc/Chart.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								infra/afc/Chart.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| apiVersion: v2 | ||||
| name: afc | ||||
| description: A Helm chart for Kubernetes | ||||
|  | ||||
| # A chart can be either an 'application' or a 'library' chart. | ||||
| # | ||||
| # Application charts are a collection of templates that can be packaged into versioned archives | ||||
| # to be deployed. | ||||
| # | ||||
| # Library charts provide useful utilities or functions for the chart developer. They're included as | ||||
| # a dependency of application charts to inject those utilities and functions into the rendering | ||||
| # pipeline. Library charts do not define any templates and therefore cannot be deployed. | ||||
| type: application | ||||
|  | ||||
| # This is the chart version. This version number should be incremented each time you make changes | ||||
| # to the chart and its templates, including the app version. | ||||
| # Versions are expected to follow Semantic Versioning (https://semver.org/) | ||||
| version: 0.3.1 | ||||
|  | ||||
| # This is the version number of the application being deployed. This version number should be | ||||
| # incremented each time you make changes to the application. Versions are not expected to | ||||
| # follow Semantic Versioning. They should reflect the version the application is using. | ||||
| # It is recommended to use it with quotes. | ||||
| appVersion: "3.8.15.0" | ||||
							
								
								
									
										3
									
								
								infra/afc/config
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								infra/afc/config
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| nonMasqueradeCIDRs: | ||||
| - 240.0.0.0/4 | ||||
| resyncInterval: 60s | ||||
							
								
								
									
										38
									
								
								infra/afc/templates/NOTES.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								infra/afc/templates/NOTES.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,38 @@ | ||||
| 1. Get the application URL by running these commands: | ||||
| {{- if .Values.ingress.enabled }} | ||||
| {{- range $host := .Values.ingress.hosts }} | ||||
|   {{- range .paths }} | ||||
|   http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }} | ||||
|   {{- end }} | ||||
| {{- end }} | ||||
| {{- else }} | ||||
|  | ||||
|   {{- if eq .Values.service.msghnd.type "LoadBalancer" }} | ||||
|   echo "Fetching LoadBalancer IP for msghnd..." | ||||
|   export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ .Chart.Name }}-{{ .Values.service.msghnd.hostname }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}") | ||||
|   echo http://$SERVICE_IP:{{ .Values.service.msghnd.port }} | ||||
|   {{- end }} | ||||
|  | ||||
|   {{- if eq .Values.service.webui.type "LoadBalancer" }} | ||||
|   echo "Fetching LoadBalancer IP for webui..." | ||||
|   export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ .Chart.Name }}-{{ .Values.service.webui.hostname }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}") | ||||
|   echo http://$SERVICE_IP:{{ .Values.service.webui.port }} | ||||
|   {{- end }} | ||||
|  | ||||
|   {{- if eq .Values.service.objst.type "ClusterIP" }} | ||||
|   echo "Fetching ClusterIP for objst..." | ||||
|   export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "afc.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}") | ||||
|   export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}") | ||||
|   echo "Visit http://127.0.0.1:{{ .Values.service.objst.fileStoragePort }} to use your application" | ||||
|   kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:{{ .Values.service.objst.fileStoragePort }} | ||||
|   {{- end }} | ||||
|  | ||||
|   {{- if eq .Values.service.rmq.type "ClusterIP" }} | ||||
|   echo "Fetching ClusterIP for rmq..." | ||||
|   export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "afc.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}") | ||||
|   export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}") | ||||
|   echo "Visit http://127.0.0.1:{{ .Values.service.rmq.port }} to use your application" | ||||
|   kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:{{ .Values.service.rmq.port }} | ||||
|   {{- end }} | ||||
|  | ||||
| {{- end }} | ||||
							
								
								
									
										62
									
								
								infra/afc/templates/_helpers.tpl
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								infra/afc/templates/_helpers.tpl
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,62 @@ | ||||
| {{/* | ||||
| Expand the name of the chart. | ||||
| */}} | ||||
| {{- define "afc.name" -}} | ||||
| {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} | ||||
| {{- end }} | ||||
|  | ||||
| {{/* | ||||
| Create a default fully qualified app name. | ||||
| We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). | ||||
| If release name contains chart name it will be used as a full name. | ||||
| */}} | ||||
| {{- define "afc.fullname" -}} | ||||
| {{- if .Values.fullnameOverride }} | ||||
| {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} | ||||
| {{- else }} | ||||
| {{- $name := default .Chart.Name .Values.nameOverride }} | ||||
| {{- if contains $name .Release.Name }} | ||||
| {{- .Release.Name | trunc 63 | trimSuffix "-" }} | ||||
| {{- else }} | ||||
| {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} | ||||
| {{- end }} | ||||
| {{- end }} | ||||
| {{- end }} | ||||
|  | ||||
| {{/* | ||||
| Create chart name and version as used by the chart label. | ||||
| */}} | ||||
| {{- define "afc.chart" -}} | ||||
| {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} | ||||
| {{- end }} | ||||
|  | ||||
| {{/* | ||||
| Common labels | ||||
| */}} | ||||
| {{- define "afc.labels" -}} | ||||
| helm.sh/chart: {{ include "afc.chart" . }} | ||||
| {{ include "afc.selectorLabels" . }} | ||||
| {{- if .Chart.AppVersion }} | ||||
| app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} | ||||
| {{- end }} | ||||
| app.kubernetes.io/managed-by: {{ .Release.Service }} | ||||
| {{- end }} | ||||
|  | ||||
| {{/* | ||||
| Selector labels | ||||
| */}} | ||||
| {{- define "afc.selectorLabels" -}} | ||||
| app.kubernetes.io/name: {{ include "afc.name" . }} | ||||
| app.kubernetes.io/instance: {{ .Release.Name }} | ||||
| {{- end }} | ||||
|  | ||||
| {{/* | ||||
| Create the name of the service account to use | ||||
| */}} | ||||
| {{- define "afc.serviceAccountName" -}} | ||||
| {{- if .Values.serviceAccount.create }} | ||||
| {{- default (include "afc.fullname" .) .Values.serviceAccount.name }} | ||||
| {{- else }} | ||||
| {{- default "default" .Values.serviceAccount.name }} | ||||
| {{- end }} | ||||
| {{- end }} | ||||
							
								
								
									
										71
									
								
								infra/afc/templates/deployment-als-kafka.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										71
									
								
								infra/afc/templates/deployment-als-kafka.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,71 @@ | ||||
| apiVersion: apps/v1 | ||||
| kind: Deployment | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-{{ .Values.service.als_kafka.hostname }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   {{- if not .Values.autoscaling.enabled }} | ||||
|   replicas: {{ .Values.replicaCount.als_kafka }} | ||||
|   {{- end }} | ||||
|   selector: | ||||
|     matchLabels: | ||||
|       {{- include "afc.selectorLabels" . | nindent 6 }} | ||||
|   template: | ||||
|     metadata: | ||||
|       {{- with .Values.podAnnotations }} | ||||
|       annotations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       labels: | ||||
|         {{- include "afc.selectorLabels" . | nindent 8 }} | ||||
|         afc: als-kafka | ||||
|     spec: | ||||
|       {{- with .Values.imagePullSecrets }} | ||||
|       imagePullSecrets: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       serviceAccountName: {{ include "afc.serviceAccountName" . }} | ||||
|       securityContext: | ||||
|         {{- toYaml .Values.podSecurityContext | nindent 8 }} | ||||
|       containers: | ||||
|         - name: {{ .Chart.Name }}-{{ .Values.service.als_kafka.hostname }} | ||||
|           securityContext: | ||||
|             {{- toYaml .Values.securityContext | nindent 12 }} | ||||
|           image: "{{ .Values.image.als_kafka.repository }}:{{ .Values.image.als_kafka.tag | default .Chart.AppVersion }}" | ||||
|           imagePullPolicy: {{ .Values.image.als_kafka.pullPolicy }} | ||||
|           ports: | ||||
|             - name: als-kafka-port | ||||
|               containerPort: {{ .Values.service.als_kafka.port | int }} | ||||
|               protocol: TCP | ||||
|           # livenessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: /fbrat/www/index.html | ||||
|           #     port: http | ||||
|           # readinessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: / | ||||
|           #     port: http | ||||
|           env: | ||||
|             - name: KAFKA_ADVERTISED_HOST | ||||
|               value: {{ .Values.service.als_kafka.hostname | quote }} | ||||
|             - name: KAFKA_CLIENT_PORT | ||||
|               value: {{ .Values.service.als_kafka.port | quote }} | ||||
|             - name: KAFKA_MAX_REQUEST_SIZE | ||||
|               value: {{ .Values.service.als_kafka.max_request_size | quote | replace ":" "" }} | ||||
|           resources: | ||||
|             {{- toYaml .Values.resources.als_kafka | nindent 12 }} | ||||
|       imagePullSecrets: | ||||
|         - name: container-repo-secret | ||||
|       {{- with .Values.nodeSelector }} | ||||
|       nodeSelector: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.affinity }} | ||||
|       affinity: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.tolerations }} | ||||
|       tolerations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
							
								
								
									
										94
									
								
								infra/afc/templates/deployment-als-siphon.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										94
									
								
								infra/afc/templates/deployment-als-siphon.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,94 @@ | ||||
| apiVersion: apps/v1 | ||||
| kind: Deployment | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-{{ .Values.deployments.als_siphon.name }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   {{- if not .Values.autoscaling.enabled }} | ||||
|   replicas: {{ .Values.replicaCount.als_siphon }} | ||||
|   {{- end }} | ||||
|   selector: | ||||
|     matchLabels: | ||||
|       {{- include "afc.selectorLabels" . | nindent 6 }} | ||||
|   template: | ||||
|     metadata: | ||||
|       {{- with .Values.podAnnotations }} | ||||
|       annotations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       labels: | ||||
|         {{- include "afc.selectorLabels" . | nindent 8 }} | ||||
|         afc: als-siphon | ||||
|     spec: | ||||
|       {{- with .Values.imagePullSecrets }} | ||||
|       imagePullSecrets: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       serviceAccountName: {{ include "afc.serviceAccountName" . }} | ||||
|       securityContext: | ||||
|         {{- toYaml .Values.podSecurityContext | nindent 8 }} | ||||
|       containers: | ||||
|         - name: {{ .Chart.Name }}-{{ .Values.deployments.als_siphon.name }} | ||||
|           securityContext: | ||||
|             {{- toYaml .Values.securityContext | nindent 12 }} | ||||
|           image: "{{ .Values.image.als_siphon.repository }}:{{ .Values.image.als_siphon.tag | default .Chart.AppVersion }}" | ||||
|           imagePullPolicy: {{ .Values.image.als_siphon.pullPolicy }} | ||||
|           env: | ||||
|             - name: KAFKA_SERVERS | ||||
|               value: "{{ .Values.service.als_kafka.hostname }}:{{ .Values.service.als_kafka.port }}" | ||||
|             - name: POSTGRES_HOST | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-postgres-secret | ||||
|                   key: POSTGRES_HOST | ||||
|             - name: INIT_IF_EXISTS | ||||
|               value: {{ .Values.deployments.als_siphon.init_if_exists | quote }} | ||||
|             - name: KAFKA_MAX_REQUEST_SIZE | ||||
|               value: {{ .Values.service.als_kafka.max_request_size | quote | replace ":" "" }} | ||||
|             - name: POSTGRES_INIT_USER | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-postgres-secret | ||||
|                   key: POSTGRES_USER | ||||
|             - name: POSTGRES_INIT_PASSWORD | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-postgres-secret | ||||
|                   key: POSTGRES_PASSWORD | ||||
|             - name: POSTGRES_ALS_USER | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-postgres-secret | ||||
|                   key: POSTGRES_USER | ||||
|             - name: POSTGRES_ALS_PASSWORD | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-postgres-secret | ||||
|                   key: POSTGRES_PASSWORD | ||||
|             - name: POSTGRES_LOG_USER | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-postgres-secret | ||||
|                   key: POSTGRES_USER | ||||
|             - name: POSTGRES_LOG_PASSWORD | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-postgres-secret | ||||
|                   key: POSTGRES_PASSWORD | ||||
|           resources: | ||||
|             {{- toYaml .Values.resources.als_siphon | nindent 12 }} | ||||
|       imagePullSecrets: | ||||
|         - name: container-repo-secret | ||||
|       {{- with .Values.nodeSelector }} | ||||
|       nodeSelector: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.affinity }} | ||||
|       affinity: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.tolerations }} | ||||
|       tolerations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
							
								
								
									
										110
									
								
								infra/afc/templates/deployment-msghnd.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										110
									
								
								infra/afc/templates/deployment-msghnd.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,110 @@ | ||||
| apiVersion: apps/v1 | ||||
| kind: Deployment | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-{{ .Values.service.msghnd.hostname }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   {{- if not .Values.autoscaling.enabled }} | ||||
|   replicas: {{ .Values.replicaCount.msghnd }} | ||||
|   {{- end }} | ||||
|   selector: | ||||
|     matchLabels: | ||||
|       {{- include "afc.selectorLabels" . | nindent 6 }} | ||||
|   template: | ||||
|     metadata: | ||||
|       {{- with .Values.podAnnotations }} | ||||
|       annotations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       labels: | ||||
|         {{- include "afc.selectorLabels" . | nindent 8 }} | ||||
|         afc: msghnd | ||||
|     spec: | ||||
|       {{- with .Values.imagePullSecrets }} | ||||
|       imagePullSecrets: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       serviceAccountName: {{ include "afc.serviceAccountName" . }} | ||||
|       securityContext: | ||||
|         {{- toYaml .Values.podSecurityContext | nindent 8 }} | ||||
|       containers: | ||||
|         - name: {{ .Chart.Name }}-{{ .Values.service.msghnd.hostname }} | ||||
|           securityContext: | ||||
|             {{- toYaml .Values.securityContext | nindent 12 }} | ||||
|           image: "{{ .Values.image.msghnd.repository }}:{{ .Values.image.msghnd.tag | default .Chart.AppVersion }}" | ||||
|           imagePullPolicy: {{ .Values.image.msghnd.pullPolicy }} | ||||
|           ports: | ||||
|             - name: http | ||||
|               containerPort: {{ .Values.service.msghnd.containerPort | int }} | ||||
|               protocol: TCP | ||||
|           # livenessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: /fbrat/www/index.html | ||||
|           #     port: http | ||||
|           # readinessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: / | ||||
|           #     port: http | ||||
|           volumeMounts: | ||||
|             - name: {{ .Chart.Name }}-msghnd-rat-api-secret | ||||
|               mountPath: /etc/xdg/fbrat/ratapi.conf | ||||
|               subPath: ratapi.conf | ||||
|           env: | ||||
|             # RabbitMQ server name: | ||||
|             - name: BROKER_TYPE | ||||
|               value: "external" | ||||
|             - name: BROKER_FQDN | ||||
|               value: {{ .Values.service.rmq.hostname | quote }} | ||||
|             # Filestorage params: | ||||
|             - name: AFC_OBJST_HOST | ||||
|               value: {{ .Values.service.objst.hostname | quote }} | ||||
|             - name: AFC_OBJST_PORT | ||||
|               value: {{ .Values.service.objst.fileStoragePort | quote }} | ||||
|             - name: AFC_OBJST_SCHEME | ||||
|               value: {{ .Values.service.objst.scheme | quote }} | ||||
|             # ALS params | ||||
|             - name: ALS_KAFKA_SERVER_ID | ||||
|               value: {{ .Values.service.msghnd.hostname | quote }} | ||||
|             - name: ALS_KAFKA_CLIENT_BOOTSTRAP_SERVERS | ||||
|               value: "{{ .Values.service.als_kafka.hostname }}:{{ .Values.service.als_kafka.port }}" | ||||
|             - name: ALS_KAFKA_MAX_REQUEST_SIZE | ||||
|               value: {{ .Values.service.als_kafka.max_request_size | quote | replace ":" "" }} | ||||
|             # Rcache parameters | ||||
|             - name: RCACHE_ENABLED | ||||
|               value: {{ .Values.service.rcache.is_enabled | quote }} | ||||
|             - name: RCACHE_POSTGRES_DSN | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-postgres-secret | ||||
|                   key: RCACHE_POSTGRES_DSN | ||||
|             - name: RCACHE_SERVICE_URL | ||||
|               value: "http://{{ .Values.service.rcache.hostname }}:{{ .Values.service.rcache.port }}" | ||||
|             - name: RCACHE_RMQ_DSN | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-rmq-rcache-secret | ||||
|                   key: RCACHE_RMQ_DSN | ||||
|             # own msghnd parameters | ||||
|             - name: AFC_MSGHND_WORKERS | ||||
|               value: {{ .Values.service.msghnd.threads_per_pod | quote }} | ||||
|           resources: | ||||
|             {{- toYaml .Values.resources.msghnd | nindent 12 }} | ||||
|       volumes: | ||||
|         - name: {{ .Chart.Name }}-msghnd-rat-api-secret | ||||
|           secret: | ||||
|             secretName: {{ .Chart.Name }}-msghnd-rat-api-secret | ||||
|       imagePullSecrets: | ||||
|         - name: container-repo-secret | ||||
|       {{- with .Values.nodeSelector }} | ||||
|       nodeSelector: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.affinity }} | ||||
|       affinity: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.tolerations }} | ||||
|       tolerations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
							
								
								
									
										72
									
								
								infra/afc/templates/deployment-objst.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										72
									
								
								infra/afc/templates/deployment-objst.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| apiVersion: apps/v1 | ||||
| kind: Deployment | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-{{ .Values.service.objst.hostname }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   {{- if not .Values.autoscaling.enabled }} | ||||
|   replicas: {{ .Values.replicaCount.objst }} | ||||
|   {{- end }} | ||||
|   selector: | ||||
|     matchLabels: | ||||
|       {{- include "afc.selectorLabels" . | nindent 6 }} | ||||
|   template: | ||||
|     metadata: | ||||
|       {{- with .Values.podAnnotations }} | ||||
|       annotations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       labels: | ||||
|         {{- include "afc.selectorLabels" . | nindent 8 }} | ||||
|         afc: objst | ||||
|     spec: | ||||
|       {{- with .Values.imagePullSecrets }} | ||||
|       imagePullSecrets: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       serviceAccountName: {{ include "afc.serviceAccountName" . }} | ||||
|       securityContext: | ||||
|         {{- toYaml .Values.podSecurityContext | nindent 8 }} | ||||
|       containers: | ||||
|         - name: {{ .Chart.Name }}-{{ .Values.service.objst.hostname }} | ||||
|           securityContext: | ||||
|             {{- toYaml .Values.securityContext | nindent 12 }} | ||||
|           image:  "{{ .Values.image.objst.repository }}:{{ .Values.image.objst.tag | default .Chart.AppVersion }}" | ||||
|           imagePullPolicy: {{ .Values.image.objst.pullPolicy }} | ||||
|           ports: | ||||
|             - name: afc-objst-port | ||||
|               containerPort: {{ .Values.service.objst.fileStoragePort }} | ||||
|               protocol: TCP | ||||
|             - name: afc-objst-hist | ||||
|               containerPort: {{ .Values.service.objst.historyViewPort }} | ||||
|               protocol: TCP | ||||
|           volumeMounts: | ||||
|             - mountPath: {{ .Values.deployments.global.mountPath | quote }} | ||||
|               name: cont-confs | ||||
|           env: | ||||
|             # Filestorage params: | ||||
|             - name: AFC_OBJST_PORT | ||||
|               value: {{ .Values.service.objst.fileStoragePort | quote }} | ||||
|             - name: AFC_OBJST_HIST_PORT | ||||
|               value: {{ .Values.service.objst.historyViewPort | quote }} | ||||
|             - name: AFC_OBJST_LOCAL_DIR | ||||
|               value: "{{ .Values.deployments.global.mountPath }}/storage" | ||||
|           resources: | ||||
|             {{- toYaml .Values.resources.objst | nindent 12 }} | ||||
|       volumes: | ||||
|         - name: cont-confs | ||||
|           persistentVolumeClaim: | ||||
|             claimName: cont-confs-claim | ||||
|       {{- with .Values.nodeSelector }} | ||||
|       nodeSelector: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.affinity }} | ||||
|       affinity: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.tolerations }} | ||||
|       tolerations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
							
								
								
									
										81
									
								
								infra/afc/templates/deployment-rcache.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										81
									
								
								infra/afc/templates/deployment-rcache.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,81 @@ | ||||
| apiVersion: apps/v1 | ||||
| kind: Deployment | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-{{ .Values.service.rcache.hostname }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   {{- if not .Values.autoscaling.enabled }} | ||||
|   replicas: {{ .Values.replicaCount.rcache }} | ||||
|   {{- end }} | ||||
|   selector: | ||||
|     matchLabels: | ||||
|       {{- include "afc.selectorLabels" . | nindent 6 }} | ||||
|   template: | ||||
|     metadata: | ||||
|       {{- with .Values.podAnnotations }} | ||||
|       annotations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       labels: | ||||
|         {{- include "afc.selectorLabels" . | nindent 8 }} | ||||
|         afc: rcache | ||||
|     spec: | ||||
|       {{- with .Values.imagePullSecrets }} | ||||
|       imagePullSecrets: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       serviceAccountName: {{ include "afc.serviceAccountName" . }} | ||||
|       securityContext: | ||||
|         {{- toYaml .Values.podSecurityContext | nindent 8 }} | ||||
|       containers: | ||||
|         - name: {{ .Chart.Name }}-{{ .Values.service.rcache.hostname }} | ||||
|           securityContext: | ||||
|             {{- toYaml .Values.securityContext | nindent 12 }} | ||||
|           image: "{{ .Values.image.rcache.repository }}:{{ .Values.image.rcache.tag | default .Chart.AppVersion }}" | ||||
|           imagePullPolicy: {{ .Values.image.rcache.pullPolicy }} | ||||
|           ports: | ||||
|             - name: rcache-port | ||||
|               containerPort: {{ .Values.service.rcache.port | int }} | ||||
|               protocol: TCP | ||||
|           # livenessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: /fbrat/www/index.html | ||||
|           #     port: http | ||||
|           # readinessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: / | ||||
|           #     port: http | ||||
|           env: | ||||
|             # R-Cache params | ||||
|             - name: RCACHE_ENABLED | ||||
|               value: {{ .Values.service.rcache.is_enabled | quote }} | ||||
|             - name: RCACHE_CLIENT_PORT | ||||
|               value: {{ .Values.service.rcache.port | quote }} | ||||
|             - name: RCACHE_POSTGRES_DSN | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-postgres-secret | ||||
|                   key: RCACHE_POSTGRES_DSN | ||||
|             - name: RCACHE_AFC_REQ_URL | ||||
|               value: "http://{{ .Values.service.msghnd.hostname }}:{{ .Values.service.msghnd.port }}/fbrat/ap-afc/availableSpectrumInquiry?nocache=True" | ||||
|             - name: RCACHE_RULESETS_URL | ||||
|               value: "http://{{ .Values.service.msghnd.hostname }}/fbrat/ratapi/v1/GetRulesetIDs" | ||||
|             - name: RCACHE_CONFIG_RETRIEVAL_URL | ||||
|               value: "http://{{ .Values.service.msghnd.hostname }}/fbrat/ratapi/v1/GetAfcConfigByRulesetID" | ||||
|           resources: | ||||
|             {{- toYaml .Values.resources.rcache | nindent 12 }} | ||||
|       imagePullSecrets: | ||||
|         - name: container-repo-secret | ||||
|       {{- with .Values.nodeSelector }} | ||||
|       nodeSelector: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.affinity }} | ||||
|       affinity: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.tolerations }} | ||||
|       tolerations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
							
								
								
									
										62
									
								
								infra/afc/templates/deployment-rmq.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								infra/afc/templates/deployment-rmq.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,62 @@ | ||||
| apiVersion: apps/v1 | ||||
| kind: Deployment | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-{{ .Values.service.rmq.hostname }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   {{- if not .Values.autoscaling.enabled }} | ||||
|   replicas: {{ .Values.replicaCount.rmq }} | ||||
|   {{- end }} | ||||
|   selector: | ||||
|     matchLabels: | ||||
|       {{- include "afc.selectorLabels" . | nindent 6 }} | ||||
|   template: | ||||
|     metadata: | ||||
|       {{- with .Values.podAnnotations }} | ||||
|       annotations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       labels: | ||||
|         {{- include "afc.selectorLabels" . | nindent 8 }} | ||||
|         afc: rmq | ||||
|     spec: | ||||
|       {{- with .Values.imagePullSecrets }} | ||||
|       imagePullSecrets: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       serviceAccountName: {{ include "afc.serviceAccountName" . }} | ||||
|       securityContext: | ||||
|         {{- toYaml .Values.podSecurityContext | nindent 8 }} | ||||
|       containers: | ||||
|         - name: {{ .Chart.Name }}-{{ .Values.service.rmq.hostname }} | ||||
|           securityContext: | ||||
|             {{- toYaml .Values.securityContext | nindent 12 }} | ||||
|           image:  "{{ .Values.image.rmq.repository }}:{{ .Values.image.rmq.tag  | default .Chart.AppVersion }}" | ||||
|           imagePullPolicy: {{ .Values.image.rmq.pullPolicy }} | ||||
|           ports: | ||||
|             - name: rmqp | ||||
|               containerPort: {{ .Values.service.rmq.port }} | ||||
|               protocol: TCP | ||||
|           # livenessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: / | ||||
|           #     port: rmqp | ||||
|           # readinessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: / | ||||
|           #     port: rmqp | ||||
|           resources: | ||||
|             {{- toYaml .Values.resources.rmq | nindent 12 }} | ||||
|       {{- with .Values.nodeSelector }} | ||||
|       nodeSelector: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.affinity }} | ||||
|       affinity: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.tolerations }} | ||||
|       tolerations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
							
								
								
									
										112
									
								
								infra/afc/templates/deployment-webui.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										112
									
								
								infra/afc/templates/deployment-webui.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,112 @@ | ||||
| apiVersion: apps/v1 | ||||
| kind: Deployment | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-{{ .Values.service.webui.hostname }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   {{- if not .Values.autoscaling.enabled }} | ||||
|   replicas: {{ .Values.replicaCount.webui }} | ||||
|   {{- end }} | ||||
|   selector: | ||||
|     matchLabels: | ||||
|       {{- include "afc.selectorLabels" . | nindent 6 }} | ||||
|   template: | ||||
|     metadata: | ||||
|       {{- with .Values.podAnnotations }} | ||||
|       annotations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       labels: | ||||
|         {{- include "afc.selectorLabels" . | nindent 8 }} | ||||
|         afc: webui | ||||
|     spec: | ||||
|       {{- with .Values.imagePullSecrets }} | ||||
|       imagePullSecrets: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       serviceAccountName: {{ include "afc.serviceAccountName" . }} | ||||
|       securityContext: | ||||
|         {{- toYaml .Values.podSecurityContext | nindent 8 }} | ||||
|       containers: | ||||
|         - name: {{ .Chart.Name }}-{{ .Values.service.webui.hostname }} | ||||
|           securityContext: | ||||
|             {{- toYaml .Values.securityContext | nindent 12 }} | ||||
|           image: "{{ .Values.image.webui.repository }}:{{ .Values.image.webui.tag | default .Chart.AppVersion }}" | ||||
|           imagePullPolicy: {{ .Values.image.webui.pullPolicy }} | ||||
|           ports: | ||||
|             - name: http | ||||
|               containerPort: {{ .Values.service.webui.port }} | ||||
|               protocol: TCP | ||||
|           # livenessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: /fbrat/www/index.html | ||||
|           #     port: http | ||||
|           # readinessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: / | ||||
|           #     port: http | ||||
|           volumeMounts: | ||||
|             - mountPath: {{ .Values.deployments.global.mountPath | quote }} | ||||
|               name: cont-confs | ||||
|             - name: {{ .Chart.Name }}-webui-rat-api-secret | ||||
|               mountPath: /etc/xdg/fbrat/ratapi.conf | ||||
|               subPath: ratapi.conf | ||||
|           env: | ||||
|             # RabbitMQ server name: | ||||
|             - name: BROKER_TYPE | ||||
|               value: "external" | ||||
|             - name: BROKER_FQDN | ||||
|               value: {{ .Values.service.rmq.hostname | quote }} | ||||
|             # Filestorage params: | ||||
|             - name: AFC_OBJST_HOST | ||||
|               value: {{ .Values.service.objst.hostname | quote }} | ||||
|             - name: AFC_OBJST_PORT | ||||
|               value: {{ .Values.service.objst.fileStoragePort | quote }} | ||||
|             - name: AFC_OBJST_SCHEME | ||||
|               value: {{ .Values.service.objst.scheme | quote }} | ||||
|             # ALS params | ||||
|             - name: ALS_KAFKA_SERVER_ID | ||||
|               value: {{ .Values.service.webui.hostname | quote }} | ||||
|             - name: ALS_KAFKA_CLIENT_BOOTSTRAP_SERVERS | ||||
|               value: "{{ .Values.service.als_kafka.hostname }}:{{ .Values.service.als_kafka.port }}" | ||||
|             - name: ALS_KAFKA_MAX_REQUEST_SIZE | ||||
|               value: {{ .Values.service.als_kafka.max_request_size | quote | replace ":" "" }} | ||||
|             # Rcache parameters | ||||
|             - name: RCACHE_ENABLED | ||||
|               value: {{ .Values.service.rcache.is_enabled | quote }} | ||||
|             - name: RCACHE_POSTGRES_DSN | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-postgres-secret | ||||
|                   key: RCACHE_POSTGRES_DSN | ||||
|             - name: RCACHE_SERVICE_URL | ||||
|               value: "http://{{ .Values.service.rcache.hostname }}:{{ .Values.service.rcache.port }}" | ||||
|             - name: RCACHE_RMQ_DSN | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-rmq-rcache-secret | ||||
|                   key: RCACHE_RMQ_DSN | ||||
|           resources: | ||||
|             {{- toYaml .Values.resources.webui | nindent 12 }} | ||||
|       volumes: | ||||
|         - name: cont-confs | ||||
|           persistentVolumeClaim: | ||||
|             claimName: cont-confs-claim | ||||
|         - name: {{ .Chart.Name }}-webui-rat-api-secret | ||||
|           secret: | ||||
|             secretName: {{ .Chart.Name }}-webui-rat-api-secret | ||||
|       imagePullSecrets: | ||||
|         - name: container-repo-secret | ||||
|       {{- with .Values.nodeSelector }} | ||||
|       nodeSelector: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.affinity }} | ||||
|       affinity: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.tolerations }} | ||||
|       tolerations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
							
								
								
									
										111
									
								
								infra/afc/templates/deployment-worker.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										111
									
								
								infra/afc/templates/deployment-worker.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,111 @@ | ||||
| apiVersion: apps/v1 | ||||
| kind: Deployment | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-{{ .Values.deployments.worker.name }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   {{- if not .Values.autoscaling.enabled }} | ||||
|   replicas: {{ .Values.replicaCount.worker }} | ||||
|   {{- end }} | ||||
|   selector: | ||||
|     matchLabels: | ||||
|       {{- include "afc.selectorLabels" . | nindent 6 }} | ||||
|   template: | ||||
|     metadata: | ||||
|       {{- with .Values.podAnnotations }} | ||||
|       annotations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       labels: | ||||
|         {{- include "afc.selectorLabels" . | nindent 8 }} | ||||
|         afc: worker | ||||
|     spec: | ||||
|       {{- with .Values.imagePullSecrets }} | ||||
|       imagePullSecrets: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       serviceAccountName: {{ include "afc.serviceAccountName" . }} | ||||
|       securityContext: | ||||
|         {{- toYaml .Values.podSecurityContext | nindent 8 }} | ||||
|       containers: | ||||
|         - name: {{ .Chart.Name }}-{{ .Values.deployments.worker.name }} | ||||
|           securityContext: | ||||
|             {{- toYaml .Values.securityContext | nindent 12 }} | ||||
|           image: "{{ .Values.image.worker.repository }}:{{ .Values.image.worker.tag | default .Chart.AppVersion }}" | ||||
|           imagePullPolicy: {{ .Values.image.worker.pullPolicy }} | ||||
|           # livenessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: / | ||||
|           #     port: rmqp | ||||
|           # readinessProbe: | ||||
|           #   httpGet: | ||||
|           #     path: / | ||||
|           #     port: rmqp | ||||
|           volumeMounts: | ||||
|             - mountPath: {{ .Values.deployments.global.mountPath | quote }} | ||||
|               name: cont-confs | ||||
|           env: | ||||
|             # Filestorage params: | ||||
|             - name: AFC_OBJST_HOST | ||||
|               value: {{ .Values.service.objst.hostname | quote }} | ||||
|             - name: AFC_OBJST_PORT | ||||
|               value: {{ .Values.service.objst.fileStoragePort | quote }} | ||||
|             - name: AFC_OBJST_SCHEME | ||||
|               value: {{ .Values.service.objst.scheme | quote }} | ||||
|             # celery params | ||||
|             - name: AFC_WORKER_CELERY_WORKERS | ||||
|               value: "rat_worker" | ||||
|             - name: AFC_WORKER_CELERY_OPTS | ||||
|               value: "" | ||||
|             - name: AFC_WORKER_CELERY_CONCURRENCY | ||||
|               value: {{ .Values.deployments.worker.celery_concurrency | quote }} | ||||
|             # RabbitMQ server name: | ||||
|             - name: BROKER_TYPE | ||||
|               value: "external" | ||||
|             - name: BROKER_FQDN | ||||
|               value: {{ .Values.service.rmq.hostname | quote }} | ||||
|             # afc-engine preload lib params | ||||
|             - name: AFC_AEP_ENABLE | ||||
|               value: {{ .Values.deployments.worker.afc_aep_enable | quote }} | ||||
|             - name: AFC_AEP_DEBUG | ||||
|               value: {{ .Values.deployments.worker.afc_aep_debug | quote }} | ||||
|             - name: AFC_AEP_REAL_MOUNTPOINT | ||||
|               value: "{{ .Values.deployments.global.mountPath }}/{{ .Values.deployments.worker.afc_aep_real_mountpoint_relative}}" | ||||
|             # Rcache parameters | ||||
|             - name: RCACHE_ENABLED | ||||
|               value: {{ .Values.service.rcache.is_enabled | quote }} | ||||
|             - name: RCACHE_SERVICE_URL | ||||
|               value: "http://{{ .Values.service.rcache.hostname }}:{{ .Values.service.rcache.port }}" | ||||
|             - name: RCACHE_RMQ_DSN | ||||
|               valueFrom: | ||||
|                 secretKeyRef: | ||||
|                   name: {{ .Chart.Name }}-rmq-rcache-secret | ||||
|                   key: RCACHE_RMQ_DSN | ||||
|             # ALS params | ||||
|             - name: ALS_KAFKA_SERVER_ID | ||||
|               value: {{ .Values.deployments.worker.name | quote }} | ||||
|             - name: ALS_KAFKA_CLIENT_BOOTSTRAP_SERVERS | ||||
|               value: "{{ .Values.service.als_kafka.hostname }}:{{ .Values.service.als_kafka.port }}" | ||||
|             - name: ALS_KAFKA_MAX_REQUEST_SIZE | ||||
|               value: {{ .Values.service.als_kafka.max_request_size | quote | replace ":" "" }} | ||||
|           resources: | ||||
|             {{- toYaml .Values.resources.worker | nindent 12 }} | ||||
|       volumes: | ||||
|         - name: cont-confs | ||||
|           persistentVolumeClaim: | ||||
|             claimName: cont-confs-claim | ||||
|       imagePullSecrets: | ||||
|         - name: container-repo-secret | ||||
|       {{- with .Values.nodeSelector }} | ||||
|       nodeSelector: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.affinity }} | ||||
|       affinity: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
|       {{- with .Values.tolerations }} | ||||
|       tolerations: | ||||
|         {{- toYaml . | nindent 8 }} | ||||
|       {{- end }} | ||||
							
								
								
									
										28
									
								
								infra/afc/templates/hpa.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								infra/afc/templates/hpa.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | ||||
| {{- if .Values.autoscaling.enabled }} | ||||
| apiVersion: autoscaling/v2beta1 | ||||
| kind: HorizontalPodAutoscaler | ||||
| metadata: | ||||
|   name: {{ include "afc.fullname" . }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   scaleTargetRef: | ||||
|     apiVersion: apps/v1 | ||||
|     kind: Deployment | ||||
|     name: {{ include "afc.fullname" . }} | ||||
|   minReplicas: {{ .Values.autoscaling.minReplicas }} | ||||
|   maxReplicas: {{ .Values.autoscaling.maxReplicas }} | ||||
|   metrics: | ||||
|     {{- if .Values.autoscaling.targetCPUUtilizationPercentage }} | ||||
|     - type: Resource | ||||
|       resource: | ||||
|         name: cpu | ||||
|         targetAverageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }} | ||||
|     {{- end }} | ||||
|     {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }} | ||||
|     - type: Resource | ||||
|       resource: | ||||
|         name: memory | ||||
|         targetAverageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }} | ||||
|     {{- end }} | ||||
| {{- end }} | ||||
							
								
								
									
										89
									
								
								infra/afc/templates/ingress-nginx-int.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										89
									
								
								infra/afc/templates/ingress-nginx-int.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,89 @@ | ||||
| apiVersion: networking.k8s.io/v1 | ||||
| kind: Ingress | ||||
| metadata: | ||||
|   name: afc-ingress | ||||
|   annotations: | ||||
|     # Use annotations to configure specific ingress-nginx behaviors like SSL, timeouts, etc. | ||||
|     nginx.ingress.kubernetes.io/rewrite-target: / | ||||
|     nginx.ingress.kubernetes.io/configuration-snippet: | | ||||
|       if ($request_uri = "/") { | ||||
|         return 301 $scheme://$http_host/fbrat; | ||||
|       } | ||||
|     # Add other necessary annotations based on your specific requirements. | ||||
| spec: | ||||
|   ingressClassName: nginx | ||||
|   rules: | ||||
|   #- host: {{ .Values.service.ingress_ngnix.hostname | quote }} | ||||
|   - http: | ||||
|       paths: | ||||
|       # should be behind auth or mTLS | ||||
|       - path: /fbrat/ap-afc/availableSpectrumInquirySec | ||||
|         pathType: Prefix | ||||
|         backend: | ||||
|           service: | ||||
|             name: {{ .Values.service.webui.hostname | quote }} | ||||
|             port: | ||||
|               number: {{ .Values.service.webui.port }} | ||||
|       # should be behind auth or mTLS | ||||
|       - path: /fbrat/ap-afc/availableSpectrumInquiry | ||||
|         pathType: Prefix | ||||
|         backend: | ||||
|           service: | ||||
|             name: {{ .Values.service.msghnd.hostname | quote }} | ||||
|             port: | ||||
|                number: {{ .Values.service.msghnd.port }} | ||||
|       # should be accessible only from internal network | ||||
|       # | ||||
|       # - path: /fbrat/ap-afc/availableSpectrumInquiryInternal | ||||
|       #   pathType: Prefix | ||||
|       #   backend: | ||||
|       #     service: | ||||
|       #       name: {{ .Values.service.msghnd.hostname | quote }} | ||||
|       #       port: | ||||
|       #         number: {{ .Values.service.msghnd.port }} | ||||
|       # | ||||
|       # -------------------------------------------------------------------- | ||||
|       # need to forbid webdav methods other than GET | ||||
|       # | ||||
|       # - path: /fbrat/ratapi/v1/files | ||||
|       #   pathType: Prefix | ||||
|       #   backend: | ||||
|       #     service: | ||||
|       #       name: {{ .Values.service.webui.hostname | quote }} | ||||
|       #       port: | ||||
|       #         number: {{ .Values.service.webui.port }} | ||||
|       # | ||||
|       # -------------------------------------------------------------------- | ||||
|       # should be accessible only from internal network | ||||
|       # | ||||
|       # - path: /fbrat/ratapi/v1/GetAfcConfigByRulesetID | ||||
|       #   pathType: Prefix | ||||
|       #   backend: | ||||
|       #     service: | ||||
|       #       name: {{ .Values.service.msghnd.hostname | quote }} | ||||
|       #       port: | ||||
|       #         number: {{ .Values.service.msghnd.port }} | ||||
|       # | ||||
|       # -------------------------------------------------------------------- | ||||
|       # should be accessible only from internal network | ||||
|       # | ||||
|       # - path: /fbrat/ratapi/v1/GetRulesetIDs | ||||
|       #   pathType: Prefix | ||||
|       #   backend: | ||||
|       #     service: | ||||
|       #       name: {{ .Values.service.msghnd.hostname | quote }} | ||||
|       #       port: | ||||
|       #         number: {{ .Values.service.msghnd.port }} | ||||
|       - path: / | ||||
|         pathType: Prefix | ||||
|         backend: | ||||
|           service: | ||||
|             name: {{ .Values.service.webui.hostname | quote }} | ||||
|             port: | ||||
|               number: {{ .Values.service.webui.port }} | ||||
|       # Add other paths as needed. | ||||
|   # Add TLS configuration if you're using HTTPS. | ||||
| #   tls: | ||||
| #   - hosts: | ||||
| #     - {{ .Values.service.ingress_ngnix.hostname | quote }} | ||||
| #     secretName: your-tls-secret | ||||
							
								
								
									
										61
									
								
								infra/afc/templates/ingress.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								infra/afc/templates/ingress.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,61 @@ | ||||
| {{- if .Values.ingress.enabled -}} | ||||
| {{- $fullName := include "afc.fullname" . -}} | ||||
| {{- $svcPort := .Values.service.port -}} | ||||
| {{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }} | ||||
|   {{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }} | ||||
|   {{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}} | ||||
|   {{- end }} | ||||
| {{- end }} | ||||
| {{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}} | ||||
| apiVersion: networking.k8s.io/v1 | ||||
| {{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} | ||||
| apiVersion: networking.k8s.io/v1beta1 | ||||
| {{- else -}} | ||||
| apiVersion: extensions/v1beta1 | ||||
| {{- end }} | ||||
| kind: Ingress | ||||
| metadata: | ||||
|   name: {{ $fullName }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
|   {{- with .Values.ingress.annotations }} | ||||
|   annotations: | ||||
|     {{- toYaml . | nindent 4 }} | ||||
|   {{- end }} | ||||
| spec: | ||||
|   {{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }} | ||||
|   ingressClassName: {{ .Values.ingress.className }} | ||||
|   {{- end }} | ||||
|   {{- if .Values.ingress.tls }} | ||||
|   tls: | ||||
|     {{- range .Values.ingress.tls }} | ||||
|     - hosts: | ||||
|         {{- range .hosts }} | ||||
|         - {{ . | quote }} | ||||
|         {{- end }} | ||||
|       secretName: {{ .secretName }} | ||||
|     {{- end }} | ||||
|   {{- end }} | ||||
|   rules: | ||||
|     {{- range .Values.ingress.hosts }} | ||||
|     - host: {{ .host | quote }} | ||||
|       http: | ||||
|         paths: | ||||
|           {{- range .paths }} | ||||
|           - path: {{ .path }} | ||||
|             {{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }} | ||||
|             pathType: {{ .pathType }} | ||||
|             {{- end }} | ||||
|             backend: | ||||
|               {{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }} | ||||
|               service: | ||||
|                 name: {{ $fullName }} | ||||
|                 port: | ||||
|                   number: {{ $svcPort }} | ||||
|               {{- else }} | ||||
|               serviceName: {{ $fullName }} | ||||
|               servicePort: {{ $svcPort }} | ||||
|               {{- end }} | ||||
|           {{- end }} | ||||
|     {{- end }} | ||||
| {{- end }} | ||||
							
								
								
									
										24
									
								
								infra/afc/templates/msghnd.ratapi.secret.yaml.example
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								infra/afc/templates/msghnd.ratapi.secret.yaml.example
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| apiVersion: v1 | ||||
| kind: Secret | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-msghnd-rat-api-secret | ||||
| type: Opaque | ||||
| stringData: | ||||
|   ratapi.conf: | | ||||
|     # Flask settings | ||||
|     DEBUG = False | ||||
|     PROPAGATE_EXCEPTIONS = False | ||||
|     LOG_LEVEL = 'WARNING' | ||||
|     SECRET_KEY = 'ABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGH' | ||||
|  | ||||
|     # Flask-SQLAlchemy settings | ||||
|     SQLALCHEMY_DATABASE_URI = 'postgresql://postgres_user:psql_password@psql_hostname/vhost_name' | ||||
|  | ||||
|     # Flask-User settings | ||||
|     USER_EMAIL_SENDER_EMAIL = 'admin@example.com' | ||||
|  | ||||
|     # RAT settings | ||||
|     GOOGLE_APIKEY = 'ABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLM' | ||||
|     HISTORY_DIR = '/mnt/nfs/rat_transfer/history' | ||||
|     DEFAULT_ULS_DIR = '/mnt/nfs/rat_transfer/ULS_Database' | ||||
|     AFC_APP_TYPE = 'msghnd' | ||||
							
								
								
									
										14
									
								
								infra/afc/templates/postgres-rcache.secret.yaml.example
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								infra/afc/templates/postgres-rcache.secret.yaml.example
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | ||||
| apiVersion: v1 | ||||
| kind: Secret | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-postgres-secret | ||||
| type: Opaque | ||||
| data: | ||||
|   # base64 encoded postgresql DSN for rcache user | ||||
|   RCACHE_POSTGRES_DSN: cG9zdGdyZXNxbDovL3JjYWNoZV9wb3N0Z3Jlc191c2VyOnBzcWxfcGFzc3dvcmRAcHNxbF9ob3N0bmFtZS92aG9zdF9uYW1l | ||||
|   # base64 encoded postgresql hostname | ||||
|   POSTGRES_HOST: cHNxbF9ob3N0bmFtZQ== | ||||
|   # base64 encoded postgresql username | ||||
|   POSTGRES_USER: cG9zdGdyZXNfdXNlcg== | ||||
|   # base64 encoded postgresql password | ||||
|   POSTGRES_PASSWORD: cG9zdGdyZXNfdXNlcl9zX3BzcWxfcGFzc3dvcmQ= | ||||
							
								
								
									
										8
									
								
								infra/afc/templates/rmq-rcache.secret.yaml.example
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								infra/afc/templates/rmq-rcache.secret.yaml.example
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,8 @@ | ||||
| apiVersion: v1 | ||||
| kind: Secret | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-rmq-rcache-secret | ||||
| type: Opaque | ||||
| data: | ||||
|   # base64 encoded amqp connection string for rcache | ||||
|   RCACHE_RMQ_DSN: YW1xcDovL3JjYWNoZV91c2VyOnBhc3N3b3JkQHJhYmJpdF9tcV9ob3N0OjU2NzIvaG9zdA== | ||||
							
								
								
									
										19
									
								
								infra/afc/templates/scaledobject-worker.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								infra/afc/templates/scaledobject-worker.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | ||||
| apiVersion: keda.sh/v1alpha1 | ||||
| kind: ScaledObject | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-{{ .Values.deployments.worker.name }}-so | ||||
|   namespace: default | ||||
| spec: | ||||
|   scaleTargetRef: | ||||
|     name: {{ .Chart.Name }}-{{ .Values.deployments.worker.name }} | ||||
|   minReplicaCount: 2 | ||||
|   pollingInterval: 5 # Optional. Default: 30 seconds | ||||
|   cooldownPeriod: 300 # Optional. Default: 300 seconds | ||||
|   triggers: | ||||
|     - type: rabbitmq | ||||
|       metadata: | ||||
|         queueName: celery | ||||
|         mode: QueueLength | ||||
|         value: {{ .Values.deployments.worker.queue_length | quote }} | ||||
|       authenticationRef: | ||||
|         name: {{ .Chart.Name }}-{{ .Values.deployments.worker.name }}-trigger | ||||
							
								
								
									
										16
									
								
								infra/afc/templates/service-als-kafka.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								infra/afc/templates/service-als-kafka.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| apiVersion: v1 | ||||
| kind: Service | ||||
| metadata: | ||||
|   name: {{ .Values.service.als_kafka.hostname | quote }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   type: {{ .Values.service.als_kafka.type }} | ||||
|   ports: | ||||
|     - name: als-kafka-port | ||||
|       port: {{ .Values.service.als_kafka.port }} | ||||
|       protocol: TCP | ||||
|       targetPort: als-kafka-port | ||||
|   selector: | ||||
|     {{- include "afc.selectorLabels" . | nindent 4 }} | ||||
|     afc: als-kafka | ||||
							
								
								
									
										16
									
								
								infra/afc/templates/service-msghnd.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								infra/afc/templates/service-msghnd.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| apiVersion: v1 | ||||
| kind: Service | ||||
| metadata: | ||||
|   name: {{ .Values.service.msghnd.hostname | quote }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   type: {{ .Values.service.msghnd.type }} | ||||
|   ports: | ||||
|     - port:  {{ .Values.service.msghnd.port }} | ||||
|       targetPort: http | ||||
|       protocol: TCP | ||||
|       name: http | ||||
|   selector: | ||||
|     {{- include "afc.selectorLabels" . | nindent 4 }} | ||||
|     afc: msghnd | ||||
							
								
								
									
										23
									
								
								infra/afc/templates/service-objst.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								infra/afc/templates/service-objst.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| apiVersion: v1 | ||||
| kind: Service | ||||
| metadata: | ||||
|   name: {{ .Values.service.objst.hostname | quote }} | ||||
|   annotations: | ||||
|     cloud.google.com/load-balancer-type: "Internal" | ||||
|     networking.gke.io/internal-load-balancer-allow-global-access: "true" | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   type: {{ .Values.service.objst.type }} | ||||
|   ports: | ||||
|     - port: {{ .Values.service.objst.fileStoragePort }} | ||||
|       targetPort: afc-objst-port | ||||
|       protocol: TCP | ||||
|       name: afc-objst-port | ||||
|     - port: {{ .Values.service.objst.historyViewPort }} | ||||
|       targetPort: afc-objst-hist | ||||
|       protocol: TCP | ||||
|       name: afc-objst-hist | ||||
|   selector: | ||||
|     {{- include "afc.selectorLabels" . | nindent 4 }} | ||||
|     afc: objst | ||||
							
								
								
									
										16
									
								
								infra/afc/templates/service-rcache.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								infra/afc/templates/service-rcache.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| apiVersion: v1 | ||||
| kind: Service | ||||
| metadata: | ||||
|   name: {{ .Values.service.rcache.hostname | quote }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   type:  {{ .Values.service.rcache.type }} | ||||
|   ports: | ||||
|     - name: rcache-port | ||||
|       port: {{ .Values.service.rcache.port }} | ||||
|       protocol: TCP | ||||
|       targetPort: rcache-port | ||||
|   selector: | ||||
|     {{- include "afc.selectorLabels" . | nindent 4 }} | ||||
|     afc: rcache | ||||
							
								
								
									
										19
									
								
								infra/afc/templates/service-rmq.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								infra/afc/templates/service-rmq.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | ||||
| apiVersion: v1 | ||||
| kind: Service | ||||
| metadata: | ||||
|   name: {{ .Values.service.rmq.hostname | quote }} | ||||
|   annotations: | ||||
|     cloud.google.com/load-balancer-type: "Internal" | ||||
|     networking.gke.io/internal-load-balancer-allow-global-access: "true" | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   type:  {{ .Values.service.rmq.type }} | ||||
|   ports: | ||||
|     - port: {{ .Values.service.rmq.port }} | ||||
|       targetPort: rmqp | ||||
|       protocol: TCP | ||||
|       name: rmqp | ||||
|   selector: | ||||
|     {{- include "afc.selectorLabels" . | nindent 4 }} | ||||
|     afc: rmq | ||||
							
								
								
									
										16
									
								
								infra/afc/templates/service-webui.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								infra/afc/templates/service-webui.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| apiVersion: v1 | ||||
| kind: Service | ||||
| metadata: | ||||
|   name: {{ .Values.service.webui.hostname | quote }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
| spec: | ||||
|   type: {{ .Values.service.webui.type }} | ||||
|   ports: | ||||
|     - port: {{ .Values.service.webui.port }} | ||||
|       targetPort: http | ||||
|       protocol: TCP | ||||
|       name: http | ||||
|   selector: | ||||
|     {{- include "afc.selectorLabels" . | nindent 4 }} | ||||
|     afc: webui | ||||
							
								
								
									
										12
									
								
								infra/afc/templates/serviceaccount.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								infra/afc/templates/serviceaccount.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| {{- if .Values.serviceAccount.create -}} | ||||
| apiVersion: v1 | ||||
| kind: ServiceAccount | ||||
| metadata: | ||||
|   name: {{ include "afc.serviceAccountName" . }} | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
|   {{- with .Values.serviceAccount.annotations }} | ||||
|   annotations: | ||||
|     {{- toYaml . | nindent 4 }} | ||||
|   {{- end }} | ||||
| {{- end }} | ||||
							
								
								
									
										11
									
								
								infra/afc/templates/storageclass-b-ssd.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								infra/afc/templates/storageclass-b-ssd.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | ||||
| apiVersion: storage.k8s.io/v1 | ||||
| kind: StorageClass | ||||
| metadata: | ||||
|   name: b-ssd | ||||
| provisioner: filestore.csi.storage.gke.io | ||||
| reclaimPolicy: Retain | ||||
| volumeBindingMode: Immediate | ||||
| allowVolumeExpansion: false | ||||
| parameters: | ||||
|   tier: BASIC_SSD | ||||
|   connect-mode: PRIVATE_SERVICE_ACCESS | ||||
							
								
								
									
										15
									
								
								infra/afc/templates/tests/test-connection.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								infra/afc/templates/tests/test-connection.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| apiVersion: v1 | ||||
| kind: Pod | ||||
| metadata: | ||||
|   name: "{{ include "afc.fullname" . }}-test-connection" | ||||
|   labels: | ||||
|     {{- include "afc.labels" . | nindent 4 }} | ||||
|   annotations: | ||||
|     "helm.sh/hook": test | ||||
| spec: | ||||
|   containers: | ||||
|     - name: wget | ||||
|       image: busybox | ||||
|       command: ['wget'] | ||||
|       args: ['{{ include "afc.fullname" . }}:{{ .Values.service.port }}'] | ||||
|   restartPolicy: Never | ||||
							
								
								
									
										10
									
								
								infra/afc/templates/trigger-auth-worker.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								infra/afc/templates/trigger-auth-worker.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | ||||
| apiVersion: keda.sh/v1alpha1 | ||||
| kind: TriggerAuthentication | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-{{ .Values.deployments.worker.name }}-trigger | ||||
|   namespace: default | ||||
| spec: | ||||
|   secretTargetRef: | ||||
|     - parameter: host | ||||
|       name: {{ .Chart.Name }}-rabbitmq-consumer-secret | ||||
|       key: RabbitMqHost | ||||
							
								
								
									
										11
									
								
								infra/afc/templates/vol-claim-afc-engine.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								infra/afc/templates/vol-claim-afc-engine.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | ||||
| apiVersion: v1 | ||||
| kind: PersistentVolumeClaim | ||||
| metadata: | ||||
|   name: cont-confs-claim | ||||
| spec: | ||||
|   accessModes: | ||||
|     - ReadWriteMany | ||||
|   storageClassName: b-ssd | ||||
|   resources: | ||||
|     requests: | ||||
|       storage: 2.5Ti | ||||
							
								
								
									
										24
									
								
								infra/afc/templates/webui.ratapi.secret.yaml.example
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								infra/afc/templates/webui.ratapi.secret.yaml.example
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| apiVersion: v1 | ||||
| kind: Secret | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-webui-rat-api-secret | ||||
| type: Opaque | ||||
| stringData: | ||||
|   ratapi.conf: | | ||||
|     # Flask settings | ||||
|     DEBUG = False | ||||
|     PROPAGATE_EXCEPTIONS = False | ||||
|     LOG_LEVEL = 'WARNING' | ||||
|     SECRET_KEY = 'ABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGH' | ||||
|  | ||||
|     # Flask-SQLAlchemy settings | ||||
|     SQLALCHEMY_DATABASE_URI = 'postgresql://postgres_user:psql_password@psql_hostname/vhost_name' | ||||
|  | ||||
|     # Flask-User settings | ||||
|     USER_EMAIL_SENDER_EMAIL = 'admin@example.com' | ||||
|  | ||||
|     # RAT settings | ||||
|     GOOGLE_APIKEY = 'ABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLM' | ||||
|     HISTORY_DIR = '/mnt/nfs/rat_transfer/history' | ||||
|     DEFAULT_ULS_DIR = '/mnt/nfs/rat_transfer/ULS_Database' | ||||
|     AFC_APP_TYPE = 'server' | ||||
| @@ -0,0 +1,7 @@ | ||||
| apiVersion: v1 | ||||
| kind: Secret | ||||
| metadata: | ||||
|   name: {{ .Chart.Name }}-rabbitmq-consumer-secret | ||||
| data: | ||||
|   # base64 encoded amqp connection string | ||||
|   RabbitMqHost: YW1xcDovL3VzZXI6cGFzc3dvcmRAcmFiYml0X21xX2hvc3Q6NTY3Mi9ob3N0 | ||||
							
								
								
									
										193
									
								
								infra/afc/values.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										193
									
								
								infra/afc/values.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,193 @@ | ||||
| # Default values for afc. | ||||
| # This is a YAML-formatted file. | ||||
| # Declare variables to be passed into your templates. | ||||
|  | ||||
| replicaCount: | ||||
|   msghnd: 1 | ||||
|   objst: 1 | ||||
|   rmq: 1 | ||||
|   worker: 1 | ||||
|   webui: 1 | ||||
|   rcache: 1 | ||||
|   als_kafka: 1 | ||||
|   als_siphon: 1 | ||||
|  | ||||
|  | ||||
| image: | ||||
|   msghnd: | ||||
|     repository: 110738915961.dkr.ecr.us-east-1.amazonaws.com/afc-msghnd | ||||
|     pullPolicy: Always | ||||
|     # Overrides the image tag whose default is the chart appVersion. | ||||
|     #tag: "3.8.15.0" | ||||
|   webui: | ||||
|     repository: 110738915961.dkr.ecr.us-east-1.amazonaws.com/afc-server | ||||
|     pullPolicy: Always | ||||
|     # Overrides the image tag whose default is the chart appVersion. | ||||
|     #tag: "3.8.15.0" | ||||
|   objst: | ||||
|     repository: public.ecr.aws/w9v6y1o0/openafc/objstorage-image | ||||
|     pullPolicy: Always | ||||
|     # Overrides the image tag whose default is the chart appVersion. | ||||
|     #tag: "3.8.15.0" | ||||
|   worker: | ||||
|     repository: 110738915961.dkr.ecr.us-east-1.amazonaws.com/afc-worker | ||||
|     pullPolicy: Always | ||||
|     # Overrides the image tag whose default is the chart appVersion. | ||||
|     #tag: "3.8.15.0" | ||||
|   rmq: | ||||
|     repository: public.ecr.aws/w9v6y1o0/openafc/rmq-image | ||||
|     pullPolicy: Always | ||||
|     # Overrides the image tag whose default is the chart appVersion. | ||||
|     #tag: "3.8.15.0" | ||||
|   rcache: | ||||
|     repository: public.ecr.aws/w9v6y1o0/openafc/rcache-image | ||||
|     pullPolicy: Always | ||||
|     # Overrides the image tag whose default is the chart appVersion. | ||||
|     #tag: "3.8.15.0" | ||||
|   als_kafka: | ||||
|     repository: public.ecr.aws/w9v6y1o0/openafc/als-kafka-image | ||||
|     pullPolicy: Always | ||||
|     # Overrides the image tag whose default is the chart appVersion. | ||||
|     #tag: "3.8.15.0" | ||||
|   als_siphon: | ||||
|     repository: public.ecr.aws/w9v6y1o0/openafc/als-siphon-image | ||||
|     pullPolicy: Always | ||||
|     # Overrides the image tag whose default is the chart appVersion. | ||||
|     #tag: "3.8.15.0" | ||||
|  | ||||
|  | ||||
| imagePullSecrets: [] | ||||
| nameOverride: "afc-app" | ||||
| #fullnameOverride: "afc-chart" | ||||
|  | ||||
| serviceAccount: | ||||
|   # Specifies whether a service account should be created | ||||
|   create: true | ||||
|   # Annotations to add to the service account | ||||
|   annotations: {} | ||||
|   # The name of the service account to use. | ||||
|   # If not set and create is true, a name is generated using the fullname template | ||||
|   name: "" | ||||
|  | ||||
| podAnnotations: {} | ||||
|  | ||||
| podSecurityContext: {} | ||||
|   # fsGroup: 2000 | ||||
|  | ||||
| securityContext: {} | ||||
|   # capabilities: | ||||
|   #   drop: | ||||
|   #   - ALL | ||||
|   # readOnlyRootFilesystem: true | ||||
|   # runAsNonRoot: true | ||||
|   # runAsUser: 1000 | ||||
|  | ||||
| service: | ||||
|   msghnd: | ||||
|     hostname: msghnd | ||||
|     type: ClusterIP | ||||
|     port: 80 | ||||
|     containerPort: 8000 | ||||
|     threads_per_pod: 2 | ||||
|   webui: | ||||
|     hostname: webui | ||||
|     type: ClusterIP | ||||
|     port: 80 | ||||
|   rmq: | ||||
|     hostname: rmq | ||||
|     type: ClusterIP | ||||
|     port: 5672 | ||||
|   objst: | ||||
|     hostname: objst | ||||
|     type: ClusterIP | ||||
|     fileStoragePort: 5000 | ||||
|     historyViewPort: 4999 | ||||
|     scheme: "HTTP" | ||||
|   als_kafka: | ||||
|     hostname: als-kafka | ||||
|     type: ClusterIP | ||||
|     port: 9092 | ||||
|     max_request_size: ":10485760" # ":" is a part of workaroud of this bug in helm https://github.com/helm/helm/issues/1707 | ||||
|   rcache: | ||||
|     hostname: rcache | ||||
|     type: ClusterIP | ||||
|     port: 8000 | ||||
|     is_enabled: "TRUE" | ||||
|   ingress_ngnix: | ||||
|     hostname: "" | ||||
|  | ||||
| deployments: | ||||
|   global: | ||||
|     mountPath: "/mnt/nfs" | ||||
|   als_siphon: | ||||
|     name: als-siphon | ||||
|     init_if_exists: "skip" | ||||
|   worker: | ||||
|     name: worker | ||||
|     afc_aep_enable: "1" | ||||
|     afc_aep_debug: "1" | ||||
|     afc_aep_real_mountpoint_relative: "rat_transfer/3dep/1_arcsec" | ||||
|     celery_concurrency: 2 | ||||
|     queue_length: 3 | ||||
|  | ||||
|  | ||||
| ingress: | ||||
|   enabled: false | ||||
|   className: "" | ||||
|   annotations: {} | ||||
|     # kubernetes.io/ingress.class: nginx | ||||
|     # kubernetes.io/tls-acme: "true" | ||||
|   hosts: | ||||
|     - host: chart-example.local | ||||
|       paths: | ||||
|         - path: / | ||||
|           pathType: ImplementationSpecific | ||||
|   tls: [] | ||||
|   #  - secretName: chart-example-tls | ||||
|   #    hosts: | ||||
|   #      - chart-example.local | ||||
|  | ||||
| resources: | ||||
|   # limits: | ||||
|   #   cpu: 100m | ||||
|   #   memory: 128Mi | ||||
|   # requests: | ||||
|   #   cpu: 100m | ||||
|   #   memory: 128Mi | ||||
|   msghnd: | ||||
|     requests: | ||||
|       memory: 1200Mi | ||||
|   objst: | ||||
|     requests: | ||||
|       memory: 500Mi | ||||
|   rmq: | ||||
|     requests: | ||||
|       memory: 200Mi | ||||
|   worker: | ||||
|     requests: | ||||
|       memory: 4500Mi | ||||
|   webui: | ||||
|     requests: | ||||
|       memory: 200Mi | ||||
|   rcache: | ||||
|     requests: | ||||
|       memory: 100Mi | ||||
|   als_kafka: | ||||
|     requests: | ||||
|       memory: 500Mi | ||||
|   als_siphon: | ||||
|     requests: | ||||
|       memory: 100Mi | ||||
|  | ||||
| autoscaling: | ||||
|   enabled: false | ||||
|   minReplicas: 1 | ||||
|   maxReplicas: 1 | ||||
|   targetCPUUtilizationPercentage: 80 | ||||
|   # targetMemoryUtilizationPercentage: 80 | ||||
|  | ||||
| nodeSelector: {} | ||||
|  | ||||
| tolerations: [] | ||||
|  | ||||
| affinity: {} | ||||
							
								
								
									
										17
									
								
								infra/deploy_afc.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										17
									
								
								infra/deploy_afc.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,17 @@ | ||||
| #!/bin/sh | ||||
|  | ||||
| kubectl get all | ||||
|  | ||||
| helm repo add bitnami https://charts.bitnami.com/bitnami | ||||
| helm repo add kedacore https://kedacore.github.io/charts | ||||
| helm repo update | ||||
| helm install keda kedacore/keda --namespace keda --create-namespace | ||||
|  | ||||
| helm upgrade --install ingress-nginx ingress-nginx --repo https://kubernetes.github.io/ingress-nginx --namespace ingress-nginx --create-namespace | ||||
|  | ||||
| kubectl wait --namespace ingress-nginx \ | ||||
|   --for=condition=ready pod \ | ||||
|   --selector=app.kubernetes.io/component=controller \ | ||||
|   --timeout=120s | ||||
|  | ||||
| helm install test-internal afc/ -f afc/values.yaml | ||||
							
								
								
									
										90
									
								
								msghnd/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										90
									
								
								msghnd/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,90 @@ | ||||
| # | ||||
| # Copyright 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
| # Install required packages | ||||
| # | ||||
| FROM alpine:3.18 as msghnd.preinstall | ||||
| ENV PYTHONUNBUFFERED=1 | ||||
| RUN apk add --update --no-cache python3 && ln -sf python3 /usr/bin/python && \ | ||||
| apk add --update --no-cache py3-six py3-numpy py3-cryptography py3-sqlalchemy \ | ||||
| py3-requests py3-flask py3-psycopg2 py3-pydantic=~1.10 && \ | ||||
| apk add --repository=http://dl-cdn.alpinelinux.org/alpine/edge/testing/ \ | ||||
| py3-confluent-kafka && \ | ||||
| python3 -m ensurepip && \ | ||||
| pip3 install --no-cache --upgrade pip setuptools | ||||
| COPY msghnd/requirements.txt /wd/ | ||||
| RUN pip3 install -r /wd/requirements.txt && mkdir -p /run/gunicorn /etc/xdg/fbrat | ||||
| COPY gunicorn/wsgi.py /wd/ | ||||
| COPY config/ratapi.conf /etc/xdg/fbrat/ | ||||
| RUN echo "AFC_APP_TYPE = 'msghnd'" >> /etc/xdg/fbrat/ratapi.conf | ||||
| # | ||||
| # Build Message Handler application | ||||
| # | ||||
| FROM alpine:3.18 as msghnd.build | ||||
| ENV PYTHONUNBUFFERED=1 | ||||
| COPY --from=msghnd.preinstall / / | ||||
| # Development env | ||||
| RUN apk add --update --no-cache cmake ninja | ||||
| # | ||||
| COPY CMakeLists.txt LICENSE.txt version.txt Doxyfile.in /wd/ | ||||
| COPY cmake /wd/cmake/ | ||||
| COPY pkg /wd/pkg/ | ||||
| COPY src /wd/src/ | ||||
| RUN mkdir -p -m 777 /wd/build | ||||
| ARG BUILDREV=localbuild | ||||
| RUN cd /wd/build && \ | ||||
| cmake -DCMAKE_INSTALL_PREFIX=/wd/__install -DCMAKE_PREFIX_PATH=/usr -DCMAKE_BUILD_TYPE=RatapiRelease -DSVN_LAST_REVISION=$BUILDREV -G Ninja /wd && \ | ||||
| ninja -j$(nproc) install | ||||
| # | ||||
| # Install Message Handler application | ||||
| # | ||||
| FROM alpine:3.18 as msghnd.install | ||||
| ENV PYTHONUNBUFFERED=1 | ||||
| COPY --from=msghnd.preinstall / / | ||||
| COPY --from=msghnd.build /wd/__install /usr/ | ||||
| # | ||||
| COPY src/afc-packages /wd/afc-packages | ||||
| RUN pip3 install --use-pep517 --root-user-action=ignore \ | ||||
|         -r /wd/afc-packages/pkgs.msghnd \ | ||||
|     && rm -rf /wd/afc-packages | ||||
| # | ||||
| RUN mkdir -m 750 -p /var/lib/fbrat/AntennaPatterns && \ | ||||
| mkdir -m 755 -p /var/spool/fbrat /var/lib/fbrat /var/celery /var/run/celery /var/log/celery | ||||
| # Add user and group | ||||
| RUN addgroup -g 1003 fbrat && \ | ||||
| adduser -g '' -D -u 1003 -G fbrat -h /var/lib/fbrat -s /sbin/nologin fbrat && \ | ||||
| chown fbrat:fbrat /var/lib/fbrat/AntennaPatterns /var/spool/fbrat /var/lib/fbrat /var/celery | ||||
| # | ||||
| LABEL revision="afc-msghnd" | ||||
| WORKDIR /wd | ||||
| EXPOSE 8000 | ||||
| COPY msghnd/entrypoint.sh / | ||||
|  | ||||
| # Prometheus stuff | ||||
| COPY gunicorn/config.py /wd/gunicorn_config.py | ||||
| # Directory for Prometheus's multiprocess housekeeping | ||||
| ENV PROMETHEUS_MULTIPROC_DIR=/wd/prometheus_multiproc_dir | ||||
| RUN mkdir -p $PROMETHEUS_MULTIPROC_DIR | ||||
|  | ||||
| # Add debugging env if configured | ||||
| ARG AFC_DEVEL_ENV=${AFC_DEVEL_ENV:-production} | ||||
| COPY msghnd/devel.sh /wd/ | ||||
| RUN chmod +x /wd/devel.sh | ||||
| RUN /wd/devel.sh | ||||
| # msghnd environment variables default values | ||||
| ENV AFC_MSGHND_PORT=${AFC_MSGHND_PORT:-"8000"} | ||||
| ENV AFC_MSGHND_BIND=${AFC_MSGHND_BIND:-"0.0.0.0"} | ||||
| ENV AFC_MSGHND_PID=${AFC_MSGHND_PID:-"/run/gunicorn/openafc_app.pid"} | ||||
| ENV AFC_MSGHND_ACCESS_LOG= | ||||
| ENV AFC_MSGHND_ERROR_LOG=${AFC_MSGHND_ERROR_LOG:-"/proc/self/fd/2"} | ||||
| ENV AFC_MSGHND_TIMEOUT=${AFC_MSGHND_TIMEOUT:-180} | ||||
| ENV AFC_MSGHND_WORKERS=${AFC_MSGHND_WORKERS:-20} | ||||
| ENV AFC_DEVEL_ENV=${AFC_DEVEL_ENV:-production} | ||||
| ENV AFC_MSGHND_RATAFC_TOUT=${AFC_MSGHND_RATAFC_TOUT:-600} | ||||
| RUN chmod +x /entrypoint.sh | ||||
| CMD ["/entrypoint.sh"] | ||||
| HEALTHCHECK CMD wget --no-verbose --tries=1 --spider \ | ||||
|     http://localhost:${AFC_MSGHND_PORT}/fbrat/ap-afc/healthy || exit 1 | ||||
							
								
								
									
										23
									
								
								msghnd/devel.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								msghnd/devel.sh
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| #!/bin/sh | ||||
| # | ||||
| # Copyright 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
| AFC_DEVEL_ENV=${AFC_DEVEL_ENV:-production} | ||||
| case "$AFC_DEVEL_ENV" in | ||||
|   "devel") | ||||
|     echo "Debug profile"  | ||||
|     export NODE_OPTIONS='--openssl-legacy-provider' | ||||
|     apk add --update --no-cache cmake ninja yarn bash | ||||
|     ;; | ||||
|   "production") | ||||
|     echo "Production profile" | ||||
|     ;; | ||||
|   *) | ||||
|     echo "Uknown profile" | ||||
|     ;; | ||||
| esac | ||||
|  | ||||
| exit $? | ||||
							
								
								
									
										47
									
								
								msghnd/entrypoint.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								msghnd/entrypoint.sh
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,47 @@ | ||||
| #!/bin/sh | ||||
| # | ||||
| # Copyright (C) 2022 Broadcom. All rights reserved. The term "Broadcom" | ||||
| # refers solely to the Broadcom Inc. corporate affiliate that owns | ||||
| # the software below. This work is licensed under the OpenAFC Project License, | ||||
| # a copy of which is included with this software program | ||||
| # | ||||
| AFC_DEVEL_ENV=${AFC_DEVEL_ENV:-production} | ||||
| case "$AFC_DEVEL_ENV" in | ||||
|   "devel") | ||||
|     echo "Running debug profile"  | ||||
|     AFC_MSGHND_LOG_LEVEL="debug" | ||||
|     echo "AFC_MSGHND_PORT = ${AFC_MSGHND_PORT}" | ||||
|     echo "AFC_MSGHND_BIND = ${AFC_MSGHND_BIND}" | ||||
|     echo "AFC_MSGHND_PID = ${AFC_MSGHND_PID}" | ||||
|     echo "AFC_MSGHND_ACCESS_LOG = ${AFC_MSGHND_ACCESS_LOG}" | ||||
|     echo "AFC_MSGHND_ERROR_LOG = ${AFC_MSGHND_ERROR_LOG}" | ||||
|     echo "AFC_MSGHND_TIMEOUT = ${AFC_MSGHND_TIMEOUT}" | ||||
|     echo "AFC_MSGHND_WORKERS = ${AFC_MSGHND_WORKERS}" | ||||
|     echo "AFC_MSGHND_LOG_LEVEL = ${AFC_MSGHND_LOG_LEVEL}" | ||||
|     echo "AFC_MSGHND_RATAFC_TOUT = ${AFC_MSGHND_RATAFC_TOUT}" | ||||
|     ;; | ||||
|   "production") | ||||
|     echo "Running production profile" | ||||
|     AFC_MSGHND_LOG_LEVEL="info" | ||||
|     ;; | ||||
|   *) | ||||
|     echo "Uknown profile" | ||||
|     AFC_MSGHND_LOG_LEVEL="info" | ||||
|     ;; | ||||
| esac | ||||
|  | ||||
| gunicorn \ | ||||
| --bind "${AFC_MSGHND_BIND}:${AFC_MSGHND_PORT}" \ | ||||
| --pid "${AFC_MSGHND_PID}" \ | ||||
| --workers "${AFC_MSGHND_WORKERS}" \ | ||||
| --timeout "${AFC_MSGHND_TIMEOUT}" \ | ||||
| ${AFC_MSGHND_ACCESS_LOG:+--access-logfile "$AFC_MSGHND_ACCESS_LOG"} \ | ||||
| --error-logfile "${AFC_MSGHND_ERROR_LOG}" \ | ||||
| --log-level "${AFC_MSGHND_LOG_LEVEL}" \ | ||||
| --worker-class gevent \ | ||||
| wsgi:app | ||||
|  | ||||
| # | ||||
| sleep infinity | ||||
|  | ||||
| exit $? | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user
	 Michael Gelman
					Michael Gelman