Python bindings: just enough to validate some YANG data

This commit is contained in:
Jan Kundrát
2022-06-30 23:55:21 +02:00
parent 41ecb91b00
commit 3a458f2318
11 changed files with 911 additions and 26 deletions

View File

@@ -6,29 +6,47 @@ name: CI
jobs:
build:
name: ${{ matrix.name }}
name: 'Build: ${{ matrix.name }}'
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include:
- name: "Ubuntu 22.04"
- name: "Ubuntu 22.04 Python 3.10"
os: ubuntu-22.04
triplet: ""
build_type: Release
generators: Ninja
python-version: "3.10"
upload-sdist: true
- name: "Ubuntu 20.04 with GCC 10"
- name: "Ubuntu 20.04 with GCC 10 Python 3.8"
os: ubuntu-20.04
triplet: ""
build_type: Release
generators: Ninja
wheel: manylinux_2_31_x86_64
python-version: "3.8"
- name: "Windows 2022 MSVC"
- name: "Ubuntu 20.04 with GCC 10 Python 3.9"
os: ubuntu-20.04
build_type: Release
generators: Ninja
wheel: manylinux_2_31_x86_64
python-version: "3.9"
- name: "Ubuntu 20.04 with GCC 10 Python 3.10"
os: ubuntu-20.04
build_type: Release
generators: Ninja
wheel: manylinux_2_31_x86_64
python-version: "3.10"
- name: "Windows 2022 MSVC Python 3.10"
os: windows-2022
triplet: x64-windows
build_type: Release
generators: "Visual Studio 17 2022"
wheel: win_amd64
python-version: "3.10"
steps:
- name: Unix line endings in git
@@ -46,6 +64,10 @@ jobs:
id: cpu-cores
uses: SimenB/github-actions-cpu-cores@5e7112c2e8c5b63b649a678bc2fb5920d0c8202e
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: change vcpkg
if: startsWith(matrix.os, 'windows')
shell: bash
@@ -90,6 +112,7 @@ jobs:
run: |
echo '${{ github.workspace }}/'../target/bin >> $GITHUB_PATH
echo '${{ github.workspace }}/'../target/lib >> $GITHUB_PATH
echo 'C:/Program Files/Microsoft Visual Studio/2022/Enterprise/SDK/ScopeCppSDK/vc15/VC/bin' >> $GITHUB_PATH
echo ${VCPKG_INSTALLATION_ROOT//\\//}'/installed/${{ matrix.triplet }}/bin' >> $GITHUB_PATH
echo EXTRA_VCPKG_TARGET_TRIPLET=-DVCPKG_TARGET_TRIPLET=${{ matrix.triplet }} >> $GITHUB_ENV
echo EXTRA_CMAKE_TOOLCHAIN_FILE=-DCMAKE_TOOLCHAIN_FILE=${VCPKG_INSTALLATION_ROOT//\\//}/scripts/buildsystems/vcpkg.cmake >> $GITHUB_ENV
@@ -127,6 +150,12 @@ jobs:
if: startsWith(matrix.os, 'ubuntu')
run: sudo apt install -y libpcre2-dev libcmocka-dev doctest-dev pybind11-dev ninja-build
- name: Fix pybind on the oldest Ubuntu
if: startsWith(matrix.os, 'ubuntu-20')
run: |
sudo apt remove -y pybind11-dev
sudo pip install pybind11[global]
- name: configure libyang
shell: bash
run: |
@@ -152,8 +181,8 @@ jobs:
working-directory: '${{ github.workspace }}/../build-libyang'
run: cmake --install . --strip
- name: test the installed yanglint
run: yanglint -f tree ${{ github.workspace }}/libyang/tests/modules/yang/ietf-interfaces@2014-05-08.yang
# - name: test the installed yanglint
# run: yanglint -f tree ${{ github.workspace }}/libyang/tests/modules/yang/ietf-interfaces@2014-05-08.yang
- name: configure libyang-cpp
shell: bash
@@ -181,3 +210,191 @@ jobs:
- name: install libyang-cpp
working-directory: '${{ github.workspace }}/../build-libyang-cpp'
run: cmake --install . --strip
- name: quick install pytest for the test suite
if: startsWith(matrix.os, 'ubuntu-22')
run: pip install pytest
- name: quick configure oopt-gnpy-libyang
if: startsWith(matrix.os, 'ubuntu-22')
shell: bash
run: |
set -ex
cmake \
-DSHOW_SO_DEPENDENCIES=ON \
-DDONT_WARN_ABOUT_SETUP_PY=ON \
-S '${{ github.workspace }}/' \
-B '${{ github.workspace }}/'../build-oopt-gnpy-libyang \
-G '${{ matrix.generators }}' \
-DCMAKE_BUILD_TYPE=${{ matrix.build_type }} \
${EXTRA_VCPKG_TARGET_TRIPLET:+${EXTRA_VCPKG_TARGET_TRIPLET}} \
${EXTRA_CMAKE_TOOLCHAIN_FILE:+${EXTRA_CMAKE_TOOLCHAIN_FILE}} \
${EXTRA_PKG_CONFIG_EXECUTABLE:+${EXTRA_PKG_CONFIG_EXECUTABLE}} \
-DCMAKE_PREFIX_PATH:PATH=${GITHUB_WORKSPACE//\\//}/../target \
-DCMAKE_INSTALL_PREFIX:PATH='${{ github.workspace }}/'../target
- name: quick build oopt-gnpy-libyang
if: startsWith(matrix.os, 'ubuntu-22')
working-directory: '${{ github.workspace }}/../build-oopt-gnpy-libyang'
run: cmake --build . -j${{ steps.cpu-cores.outputs.count }} --config ${{ matrix.build_type }}
- name: quick install oopt-gnpy-libyang
if: startsWith(matrix.os, 'ubuntu-22')
working-directory: '${{ github.workspace }}/../build-oopt-gnpy-libyang'
run: cmake --install . --strip
- name: quick test oopt-gnpy-libyang
if: startsWith(matrix.os, 'ubuntu-22')
working-directory: '${{ github.workspace }}/../build-oopt-gnpy-libyang'
run: ctest --output-on-failure -j${{ steps.cpu-cores.outputs.count }} --build-config ${{ matrix.build_type }}
- name: prepare wheel building
run: pip install wheel setuptools build setuptools_scm[toml]
- name: show Python version
run: python --version
- name: show Python3 version
run: python3 --version
- name: show pip version
run: pip --version
# Don't pass neither --sdist nor --wheel, which means "build sdist and from that, build the wheel". Oh well.
- name: build the sdist and wheel
shell: bash
run: |
set -ex
export CMAKE_ARGS="${EXTRA_VCPKG_TARGET_TRIPLET:+${EXTRA_VCPKG_TARGET_TRIPLET} }${EXTRA_CMAKE_TOOLCHAIN_FILE:+${EXTRA_CMAKE_TOOLCHAIN_FILE} }${EXTRA_PKG_CONFIG_EXECUTABLE:+${EXTRA_PKG_CONFIG_EXECUTABLE}}"
python3 -m build --skip-dependency-check --outdir '${{ github.workspace }}'/build-wheel '${{ github.workspace }}/'
- name: fix the wheel, oldest Ubuntu
if: startsWith(matrix.os, 'ubuntu-20')
shell: bash
run: |
pip install auditwheel
find '${{ github.workspace }}'/build-wheel -name '*.whl'
auditwheel repair --plat '${{ matrix.wheel }}' '${{ github.workspace }}'/build-wheel/*.whl
- name: fix the wheel, Windows
if: startsWith(matrix.os, 'windows')
shell: bash
run: |
pip install delvewheel
delvewheel show '${{ github.workspace }}'/build-wheel/*.whl
delvewheel repair '${{ github.workspace }}'/build-wheel/*.whl
- name: upload the wheel
if: matrix.wheel
uses: actions/upload-artifact@v3
with:
name: 'wheel-${{ matrix.wheel }}-${{ matrix.python-version }}'
path: |
${{ github.workspace }}/wheelhouse/*.whl
- name: upload the sdist
if: matrix.upload-sdist
uses: actions/upload-artifact@v3
with:
name: sdist
path: |
${{ github.workspace }}/build-wheel/*.tar.gz
test:
needs: build
name: 'Test: ${{ matrix.name }}'
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include:
- name: "Ubuntu 22.04 Python 3.10"
os: ubuntu-22.04
python-version: "3.10"
wheel: manylinux_2_31_x86_64
- name: "Ubuntu 20.04 with GCC 10 Python 3.8"
os: ubuntu-20.04
wheel: manylinux_2_31_x86_64
python-version: "3.8"
- name: "Ubuntu 20.04 with GCC 10 Python 3.9"
os: ubuntu-20.04
wheel: manylinux_2_31_x86_64
python-version: "3.9"
- name: "Ubuntu 20.04 with GCC 10 Python 3.10"
os: ubuntu-20.04
wheel: manylinux_2_31_x86_64
python-version: "3.10"
- name: "Windows 2022 MSVC Python 3.10"
os: windows-2022
wheel: win_amd64
python-version: "3.10"
steps:
- name: Unix line endings in git
if: startsWith(matrix.os, 'windows')
run: |
git config --global core.autocrlf input
git config --global core.eol lf
- uses: actions/checkout@v2
with:
fetch-depth: 0
submodules: recursive
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: mkdir wheelhouse
shell: bash
run: |
mkdir ${GITHUB_WORKSPACE//\\//}/wheelhouse
- name: download the wheel
uses: actions/download-artifact@v3
id: download-wheel
with:
name: 'wheel-${{ matrix.wheel }}-${{ matrix.python-version }}'
path: ${{ github.workspace }}/wheelhouse
- name: install the wheel
shell: bash
run: |
pip install --only-binary :all: --no-index --find-links=${GITHUB_WORKSPACE//\\//}/wheelhouse oopt-gnpy-libyang
- name: install pytest
run: pip install pytest
- name: run pytest
working-directory: ${{ github.workspace }}
run: pytest -vv
upload:
if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository_owner == 'Telecominfraproject' }}
needs:
- build
- test
name: Upload to PyPI
runs-on: ubuntu-22.04
steps:
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
path: ${{ github.workspace }}/all-artifacts
- name: rearrange the wheels
run: |
set -ex
mkdir pypi
mv -v ${{ github.workspace }}/all-artifacts/*/* pypi/
- name: upload to pypi
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
packages_dir: pypi/

129
CMakeLists.txt Normal file
View File

@@ -0,0 +1,129 @@
cmake_policy(SET CMP0011 NEW)
cmake_policy(SET CMP0012 NEW)
cmake_policy(SET CMP0054 NEW)
cmake_policy(SET CMP0057 NEW) # vcpkg, pybind11 and MSVC
project(oopt-gnpy-libyang LANGUAGES CXX)
cmake_minimum_required(VERSION 3.21)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_POSITION_INDEPENDENT_CODE TRUE)
include(GNUInstallDirs)
include(CTest)
if(NOT MSVC)
set(CMAKE_CXX_FLAGS_DEBUG "-Werror ${CMAKE_CXX_FLAGS_DEBUG}")
set(CMAKE_CXX_FLAGS "-Wall -Wextra -pedantic -Woverloaded-virtual -Wimplicit-fallthrough -Wsuggest-override ${CMAKE_CXX_FLAGS}")
endif()
find_package(PkgConfig)
pkg_check_modules(LIBYANG-CPP REQUIRED libyang-cpp>=1.0.0 IMPORTED_TARGET)
set(PYBIND11_FINDPYTHON ON)
find_package(Python COMPONENTS Interpreter Development.Module)
find_package(pybind11 2.9.1 REQUIRED)
pybind11_add_module(oopt_gnpy_libyang ${CMAKE_CURRENT_SOURCE_DIR}/oopt-gnpy-libyang.cpp)
target_link_libraries(oopt_gnpy_libyang PRIVATE PkgConfig::LIBYANG-CPP)
function(oopt_gnpy_libyang_add_test name)
add_test(NAME ${name} COMMAND Python::Interpreter -m pytest -vv ${CMAKE_CURRENT_SOURCE_DIR}/tests/${name}.py)
set_property(TEST ${name} APPEND PROPERTY ENVIRONMENT
"PYTHONPATH=$<TARGET_FILE_DIR:oopt_gnpy_libyang>"
"CMAKE_CURRENT_SOURCE_DIR=${CMAKE_CURRENT_SOURCE_DIR}")
endfunction()
oopt_gnpy_libyang_add_test(test_validation)
oopt_gnpy_libyang_add_test(test_context)
option(SHOW_SO_DEPENDENCIES "Show all required \"non-standard\" libraries")
option(DONT_WARN_ABOUT_SETUP_PY "magic build system hack")
if(NOT DONT_WARN_ABOUT_SETUP_PY)
message(FATAL_ERROR "This package is designed to be built via Python's build system which invokes CMake with some magic arguments. Please read the README.md and build this via `python3 -m build --wheel`, not via invoking CMake manually.")
endif()
set(sanitizer_active OFF)
# FIXME: this just sucks. The detection is very unreliable (one could use something like
# -fsanitize=undefined,address and we are screwed), and especially clang's query for preload
# is obviously unportable because we hardcode host's architecture.
# This is super-ugly. Perhaps it would be better just to outright disable everything, but hey,
# I need to test this on my laptop where I'm using ASAN by default, and it kinda-almost-works
# there with just one patch to libyang :).
if (${CMAKE_CXX_FLAGS} MATCHES "-fsanitize=address")
set(sanitizer_active ON)
set(gcc_sanitizer_preload libasan.so)
set(clang_sanitizer_preload libclang_rt.asan-x86_64.so)
endif()
if (sanitizer_active)
if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -print-file-name=${clang_sanitizer_preload}
OUTPUT_VARIABLE LIBxSAN_FULL_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
execute_process(COMMAND ${CMAKE_CXX_COMPILER} -print-file-name=${gcc_sanitizer_preload}
OUTPUT_VARIABLE LIBxSAN_FULL_PATH OUTPUT_STRIP_TRAILING_WHITESPACE)
else()
message(ERROR "Cannot determine correct sanitizer library for LD_PRELOAD")
endif()
foreach(a_test IN LISTS TESTS)
set_property(TEST a_test APPEND PROPERTY ENVIRONMENT
"LD_PRELOAD=${LIBxSAN_FULL_PATH}"
ASAN_OPTIONS=detect_leaks=0 # they look harmless, but they are annoying
)
endforeach()
endif()
if(SHOW_SO_DEPENDENCIES)
if(MSVC)
set(code_libraries [[
list(APPEND ignored_libraries "^python")
list(APPEND ignored_libraries "^api-ms-")
list(APPEND ignored_libraries "^ext-ms-")
list(APPEND ignored_libraries_post "^C:/Windows/")
list(APPEND ignored_libraries_post "^C:\\\\Windows\\\\")
set(extra_directories $ENV{PATH})
]])
else()
set(code_libraries [[
list(APPEND ignored_libraries "^lib(c|dl|m|stdc\\+\\+)\\.so")
list(APPEND ignored_libraries "^ld-linux-")
list(APPEND ignored_libraries "^libgcc_s\\.")
set(extra_directories "")
]])
endif()
install(CODE [[
message(STATUS "Resolving runtime dependencies of $<TARGET_FILE:oopt_gnpy_libyang>")
set(ignored_libraries)
set(ignored_libraries_post)
]])
install(CODE ${code_libraries})
# Unfortunately, $<TARGET_RUNTIME_DLLS:oopt_gnpy_libyang> only lists python310.dll, so we have to pull a bigger gun.
install(CODE [[
file(GET_RUNTIME_DEPENDENCIES
LIBRARIES $<TARGET_FILE:oopt_gnpy_libyang>
RESOLVED_DEPENDENCIES_VAR libyang_cpp_deps
UNRESOLVED_DEPENDENCIES_VAR libyang_cpp_unresolved
CONFLICTING_DEPENDENCIES_PREFIX libyang_cpp_conflicting
PRE_EXCLUDE_REGEXES ${ignored_libraries}
POST_EXCLUDE_REGEXES ${ignored_libraries_post}
DIRECTORIES ${extra_directories}
)
if(libyang_cpp_unresolved)
message(STATUS " Cannot find the following required libraries to bundle them:")
foreach(one_library IN LISTS libyang_cpp_unresolved)
message(STATUS " ${one_library}")
endforeach()
endif()
if(libyang_cpp_conflicting_FILENAMES)
message(STATUS " Multiple candidate libraries found for ${libyang_cpp_conflicting_FILENAMES}")
endif()
if(libyang_cpp_deps)
message(STATUS " Candidates for bundling:")
foreach(one_library IN LISTS libyang_cpp_deps)
message(STATUS " ${one_library}")
endforeach()
endif()
]])
endif()
install(TARGETS oopt_gnpy_libyang LIBRARY DESTINATION ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}/)

4
MANIFEST.in Normal file
View File

@@ -0,0 +1,4 @@
prune libyang*
graft tests/yang
include CMakeLists.txt oopt-gnpy-libyang.cpp
global-exclude *.py[cod]

122
README.md
View File

@@ -1,11 +1,133 @@
# Opinionated Python bindings for the `libyang` library
[![Install via pip](https://img.shields.io/pypi/v/oopt-gnpy-libyang)](https://pypi.org/project/oopt-gnpy-libyang/)
[![Python versions](https://img.shields.io/pypi/pyversions/oopt-gnpy-libyang)](https://pypi.org/project/oopt-gnpy-libyang/)
[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/Telecominfraproject/oopt-gnpy-libyang/CI)](https://github.com/Telecominfraproject/oopt-gnpy-libyang/actions/workflows/ci.yaml)
Python bindings and packaging of [`libyang`](https://github.com/CESNET/libyang).
We're focusing on parsing, validating and accessing YANG-modeled JSON data trees.
Essentially, just enough to get [`gnpy`](https://github.com/Telecominfraproject/oopt-gnpy) going.
Want more?
Patches welcome.
Compared to the [CFFI libyang bindings](https://github.com/CESNET/libyang-python), this wrapper takes care of low-level memory management.
This means no more `node.free()` and `ctx.destroy()`.
We also produce prebuilt binary [wheels](https://realpython.com/python-wheels/) to make installation very simple.
## Usage
### Loading YANG data
```python
import oopt_gnpy_libyang as ly
c = ly.Context('tests/yang', ly.ContextOptions.AllImplemented | ly.ContextOptions.NoYangLibrary)
for m in ('iana-if-type', 'ietf-interfaces', 'ietf-ip'):
c.load_module(m)
blob = '''{
"ietf-interfaces:interfaces": {
"interface": [
{
"name": "lo",
"type": "iana-if-type:softwareLoopback",
"ietf-ip:ipv4": {
"address": [
{
"ip": "127.0.0.1",
"prefix-length": 8
}
]
},
"ietf-ip:ipv6": {
"address": [
{
"ip": "::1",
"prefix-length": 128
}
]
}
},
{
"name": "eth0",
"type": "iana-if-type:ethernetCsmacd"
}
]
}
}'''
data = c.parse_data_str(blob,
ly.DataFormat.JSON, ly.ParseOptions.Strict | ly.ParseOptions.Ordered,
ly.ValidationOptions.Present | ly.ValidationOptions.NoState)
```
### Working with data
Libyang works with forests (sets of trees), this is how to process all the data:
```python
for x in data.siblings():
print(f'a sibling: {x.path}')
for xx in x.childrenDfs():
print(f' {"term " if xx.is_term else "child"}: {xx.path}')
if xx.is_term:
print(f' {xx.as_term()} {" (default)" if xx.as_term().is_default_value else ""}')
```
Data can be accessed via their known paths, of course. Either as a full, multi-level XPath:
```python
data["interface[name='lo']/ietf-ip:ipv6/address[ip='::1']/prefix-length"].as_term().value == 128
```
Or individually, one item per index:
```python
data["interface[name='lo']"]["ietf-ip:ipv6"]["address[ip='::1']"]["prefix-length"].as_term().value
```
Everything is an XPath, so it's possible to take a shortcut and skip specifying keys for single-element lists:
```python
data["interface[name='lo']"]["ietf-ip:ipv6"]["address"]["prefix-length"].as_term().value == 128
```
The data are provided as native Python types:
```python
type(data["interface[name='lo']"]["ietf-ip:ipv6"]["address"]["prefix-length"]
.as_term().value) == int
```
### Validation errors
In libyang, if an operation fails, error details are available via `context.errors()`:
```python
import json
wrong = json.loads(blob)
wrong["ietf-interfaces:interfaces"]["interface"][0]\
["ietf-ip:ipv6"]["address"][0]["prefix-length"] = 666
try:
data = c.parse_data_str(json.dumps(wrong),
ly.DataFormat.JSON, ly.ParseOptions.Strict | ly.ParseOptions.Ordered,
ly.ValidationOptions.Present | ly.ValidationOptions.NoState)
assert False
except ly.Error:
for error in c.errors():
assert error.path == "Schema location \"/ietf-interfaces:interfaces/interface/ietf-ip:ipv6/address/prefix-length\", data location \"/ietf-ip:address[ip='::1']\", line number 1."
assert error.message == 'Value "666" is out of type uint8 min/max bounds.'
```
## Installing
We're producing wheels for many popular platforms.
The installation is as simple as:
```console-session
$ pip install oopt-gnpy-libyang
```
### Building from source
Since this library is a Python wrapper around a C++ wrapper around a C library, source-based builds are more complex.
They require:
- a C++20 compiler (e.g., GCC 10+, clang 10+, MSVC 17.2+)
- [`libyang`](https://github.com/CESNET/libyang) and its dependencies
- [`libyang-cpp`](https://github.com/CESNET/libyang-cpp/) and its dependencies
- [CMake](https://cmake.org/) 3.21+
Unlike the wheels already bundle all the required libraries, when building from source, `libyang`, `libyang-cpp` and all their dependencies will have to be installed first.
Also, in a from-source build these won't be bundled into the resulting package.
For an inspiration, consult our [GitHub packaging recipes](./.github/workflows/ci.yaml).
## License
Copyright © 2021-2022 Telecom Infra Project and GNPy contributors.

189
oopt-gnpy-libyang.cpp Normal file
View File

@@ -0,0 +1,189 @@
#include <pybind11/functional.h>
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include <libyang-cpp/Context.hpp>
using namespace std::literals;
using namespace pybind11::literals;
PYBIND11_MODULE(oopt_gnpy_libyang, m) {
m.doc() = "Opinionated Python bindings for the libyang library";
pybind11::enum_<libyang::ContextOptions>(m, "ContextOptions")
.value("AllImplemented", libyang::ContextOptions::AllImplemented)
.value("RefImplemented", libyang::ContextOptions::RefImplemented)
.value("NoYangLibrary", libyang::ContextOptions::NoYangLibrary)
.value("DisableSearchDirs", libyang::ContextOptions::DisableSearchDirs)
.value("DisableSearchCwd", libyang::ContextOptions::DisableSearchCwd)
.value("PreferSearchDirs", libyang::ContextOptions::PreferSearchDirs)
.value("SetPrivParsed", libyang::ContextOptions::SetPrivParsed)
.value("ExplicitCompile", libyang::ContextOptions::ExplicitCompile)
.def("__or__", [](libyang::ContextOptions a, libyang::ContextOptions b){ return a | b; })
;
pybind11::enum_<libyang::LogLevel>(m, "LogLevel")
.value("Error", libyang::LogLevel::Error)
.value("Warning", libyang::LogLevel::Warning)
.value("Verbose", libyang::LogLevel::Verbose)
.value("Debug", libyang::LogLevel::Debug)
;
pybind11::enum_<libyang::ErrorCode>(m, "ErrorCode")
.value("Success", libyang::ErrorCode::Success)
.value("MemoryFailure", libyang::ErrorCode::MemoryFailure)
.value("SyscallFail", libyang::ErrorCode::SyscallFail)
.value("InvalidValue", libyang::ErrorCode::InvalidValue)
.value("ItemAlreadyExists", libyang::ErrorCode::ItemAlreadyExists)
.value("NotFound", libyang::ErrorCode::NotFound)
.value("InternalError", libyang::ErrorCode::InternalError)
.value("ValidationFailure", libyang::ErrorCode::ValidationFailure)
.value("OperationDenied", libyang::ErrorCode::OperationDenied)
.value("OperationIncomplete", libyang::ErrorCode::OperationIncomplete)
.value("RecompileRequired", libyang::ErrorCode::RecompileRequired)
.value("Negative", libyang::ErrorCode::Negative)
.value("Unknown", libyang::ErrorCode::Unknown)
.value("PluginError", libyang::ErrorCode::PluginError)
;
pybind11::enum_<libyang::ValidationErrorCode>(m, "ValidationErrorCode")
.value("Success", libyang::ValidationErrorCode::Success)
.value("Syntax", libyang::ValidationErrorCode::Syntax)
.value("YangSyntax", libyang::ValidationErrorCode::YangSyntax)
.value("YinSyntax", libyang::ValidationErrorCode::YinSyntax)
.value("Reference", libyang::ValidationErrorCode::Reference)
.value("Xpath", libyang::ValidationErrorCode::Xpath)
.value("Semantics", libyang::ValidationErrorCode::Semantics)
.value("XmlSyntax", libyang::ValidationErrorCode::XmlSyntax)
.value("JsonSyntax", libyang::ValidationErrorCode::JsonSyntax)
.value("Data", libyang::ValidationErrorCode::Data)
.value("Other", libyang::ValidationErrorCode::Other)
;
pybind11::enum_<libyang::DataFormat>(m, "DataFormat")
.value("Detect", libyang::DataFormat::Detect)
.value("JSON", libyang::DataFormat::JSON)
.value("XML", libyang::DataFormat::XML)
;
pybind11::enum_<libyang::ParseOptions>(m, "ParseOptions")
.value("ParseOnly", libyang::ParseOptions::ParseOnly)
.value("Strict", libyang::ParseOptions::Strict)
.value("Opaque", libyang::ParseOptions::Opaque)
.value("NoState", libyang::ParseOptions::NoState)
.value("LybModUpdate", libyang::ParseOptions::LybModUpdate)
.value("Ordered", libyang::ParseOptions::Ordered)
.def("__or__", [](libyang::ParseOptions a, libyang::ParseOptions b){ return a | b; })
;
pybind11::enum_<libyang::ValidationOptions>(m, "ValidationOptions")
.value("NoState", libyang::ValidationOptions::NoState)
.value("Present", libyang::ValidationOptions::Present)
.def("__or__", [](libyang::ValidationOptions a, libyang::ValidationOptions b){ return a | b; })
;
pybind11::enum_<libyang::PrintFlags>(m, "PrintFlags")
.value("WithDefaultsExplicit", libyang::PrintFlags::WithDefaultsExplicit)
.value("WithSiblings", libyang::PrintFlags::WithSiblings)
.value("Shrink", libyang::PrintFlags::Shrink)
.value("KeepEmptyCont", libyang::PrintFlags::KeepEmptyCont)
.value("WithDefaultsTrim", libyang::PrintFlags::WithDefaultsTrim)
.value("WithDefaultsAll", libyang::PrintFlags::WithDefaultsAll)
.value("WithDefaultsAllTag", libyang::PrintFlags::WithDefaultsAllTag)
.value("WithDefaultsImplicitTag", libyang::PrintFlags::WithDefaultsImplicitTag)
.value("WithDefaultsMask", libyang::PrintFlags::WithDefaultsMask)
.def("__or__", [](libyang::PrintFlags a, libyang::PrintFlags b){ return a | b; })
;
auto eError = pybind11::register_local_exception<libyang::Error>(m, "Error", PyExc_RuntimeError);
pybind11::register_local_exception<libyang::ErrorWithCode>(m, "ErrorWithCode", eError);
/* FIXME: cannot do .def_property("code", &libyang::ErrorWithCode::code, nullptr) */
pybind11::register_local_exception<libyang::ParsedInfoUnavailable>(m, "ParsedInfoUnavailable", eError);
pybind11::class_<libyang::Feature>(m, "Feature")
.def_property("name", &libyang::Feature::name, nullptr)
;
pybind11::class_<libyang::Module>(m, "Module")
.def_property("name", &libyang::Module::name, nullptr)
.def_property("revision", &libyang::Module::revision, nullptr)
.def_property("implemented", &libyang::Module::implemented, nullptr)
.def_property("features", &libyang::Module::features, nullptr)
.def("set_implemented", static_cast<void (libyang::Module::*)()>(&libyang::Module::setImplemented))
.def("feature_enabled", &libyang::Module::featureEnabled)
.def("set_implemented_with_features", static_cast<void (libyang::Module::*)(std::vector<std::string>)>(&libyang::Module::setImplemented), "features"_a)
.def("set_implemented_all_features", [](libyang::Module& mod) { mod.setImplemented(libyang::AllFeatures{}); })
// FIXME: identities
// FIXME: childInstantiables
;
pybind11::class_<libyang::ErrorInfo>(m, "ErrorInfo")
.def_readonly("app_tag", &libyang::ErrorInfo::appTag)
.def_readonly("level", &libyang::ErrorInfo::level)
.def_readonly("message", &libyang::ErrorInfo::message)
.def_readonly("code", &libyang::ErrorInfo::code)
.def_readonly("path", &libyang::ErrorInfo::path)
.def_readonly("validation_code", &libyang::ErrorInfo::validationCode)
;
pybind11::class_<libyang::DataNode>(m, "DataNode")
.def_property("path", &libyang::DataNode::path, nullptr)
.def_property("is_term", &libyang::DataNode::isTerm, nullptr)
.def("as_term", &libyang::DataNode::asTerm)
.def("print", &libyang::DataNode::printStr, "format"_a, "flags"_a)
.def("siblings", &libyang::DataNode::siblings)
.def("child", &libyang::DataNode::child)
.def("childrenDfs", &libyang::DataNode::childrenDfs)
.def("immediateChildren", &libyang::DataNode::immediateChildren)
.def("__getitem__",
[](const libyang::DataNode& node, const std::string& key) {
auto set = node.findXPath(key);
if (set.empty()) {
throw pybind11::key_error{"No such data node below '" + node.path() + "' for '" + key + "'"};
} else if (set.size() > 1) {
throw pybind11::key_error{"Too many nodes below '" + node.path() + "' for '" + key + "'"};
} else {
return *set.begin();
}
})
;
pybind11::class_<libyang::DataNodeTerm, libyang::DataNode>(m, "DataNodeTerm")
.def_property("is_default_value", &libyang::DataNodeTerm::isDefaultValue, nullptr)
.def_property("value", &libyang::DataNodeTerm::value, nullptr)
.def("__str__", &libyang::DataNodeTerm::valueStr)
;
using Collection_DataNode_Siblings = libyang::Collection<libyang::DataNode, libyang::IterationType::Sibling>;
pybind11::class_<Collection_DataNode_Siblings>(m, "_Collection_DataNode_Siblings")
.def("__iter__",
[](const Collection_DataNode_Siblings &s) { return pybind11::make_iterator(s.begin(), s.end()); },
pybind11::keep_alive<0, 1>())
;
using Collection_DataNode_Dfs = libyang::Collection<libyang::DataNode, libyang::IterationType::Dfs>;
pybind11::class_<Collection_DataNode_Dfs>(m, "_Collection_DataNode_Dfs")
.def("__iter__",
[](const Collection_DataNode_Dfs &s) { return pybind11::make_iterator(s.begin(), s.end()); },
pybind11::keep_alive<0, 1>())
;
pybind11::class_<libyang::Context>(m, "Context")
.def(pybind11::init<const std::optional<std::string>&, const std::optional<libyang::ContextOptions>>(), "searchPath"_a=std::nullopt, "options"_a=std::nullopt)
.def("load_module", &libyang::Context::loadModule, "name"_a, "revision"_a=std::nullopt, "features"_a=std::vector<std::string>{})
.def("modules", &libyang::Context::modules)
.def("get_module", &libyang::Context::getModule, "name"_a, "revision"_a=std::nullopt)
.def("get_module_implemented", &libyang::Context::getModuleImplemented, "name"_a)
.def("errors", &libyang::Context::getErrors)
.def("clean_all_errors", &libyang::Context::cleanAllErrors)
.def("parse_data_str", &libyang::Context::parseDataMem, "data"_a, "format"_a, "parse_options"_a=std::nullopt, "validation_options"_a=std::nullopt)
// is this actually needed? looks like parseDataMem() does that just fine
/* .def("validate_data_str", */
/* [](const libyang::Context& ctx, const std::string& data, const libyang::DataFormat format, const libyang::ParseOptions parseOptions, const libyang::ValidationOptions validationOptions) { */
/* auto x = ctx.parseDataMem(data, format, parseOptions, validationOptions); */
/* libyang::validateAll(x, validationOptions); */
/* return x; */
/* }, */
/* "data"_a, "format"_a=libyang::DataFormat::Auto, "parse_options"_a=std::nullopt, "validation_options"_a=std::nullopt) */
;
}

View File

@@ -1,14 +1,14 @@
[build-system]
requires = [
"setuptools>=42",
"setuptools>=45",
"wheel",
"ninja",
"cmake>=3.12",
"setuptools_scm[toml]>=6.2",
]
build-backend = "setuptools.build_meta"
[tool.isort]
profile = "black"
[tool.setuptools_scm]
[tool.pytest.ini_options]
minversion = "6.0"
@@ -17,9 +17,9 @@ xfail_strict = true
filterwarnings = ["error"]
testpaths = ["tests"]
[tool.cibuildwheel]
test-command = "pytest {project}/tests"
test-extras = ["test"]
test-skip = ["*universal2:arm64"]
# Setuptools bug causes collision between pypy and cpython artifacts
before-build = "rm -rf {project}/build"
# [tool.cibuildwheel]
# test-command = "pytest {project}/tests"
# test-extras = ["test"]
# test-skip = ["*universal2:arm64"]
# # Setuptools bug causes collision between pypy and cpython artifacts
# before-build = "rm -rf {project}/build"

View File

@@ -3,6 +3,7 @@ import re
import subprocess
import sys
from pathlib import Path
from setuptools import Extension, setup
from setuptools.command.build_ext import build_ext
@@ -46,6 +47,7 @@ class CMakeBuild(build_ext):
f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={extdir}",
f"-DPYTHON_EXECUTABLE={sys.executable}",
f"-DCMAKE_BUILD_TYPE={cfg}", # not used on MSVC, but no harm
"-DDONT_WARN_ABOUT_SETUP_PY=ON",
]
build_args = []
# Adding CMake arguments set as environment variable
@@ -121,15 +123,31 @@ class CMakeBuild(build_ext):
# The information here can also be placed in setup.cfg - better separation of
# logic and declaration, and simpler if you include description/version in a file.
setup(
name="cmake_example",
version="0.0.1",
author="Dean Moldovan",
author_email="dean0x7d@gmail.com",
description="A test project using pybind11 and CMake",
long_description="",
ext_modules=[CMakeExtension("cmake_example")],
name="oopt-gnpy-libyang",
url="https://github.com/Telecominfraproject/oopt-gnpy-libyang",
author="Telecom Infra Project",
author_email="jan.kundrat@telecominfraproject.com",
description="Opinionated Python bindings for the libyang library",
long_description=(Path(__file__).parent / "README.md").read_text(),
long_description_content_type="text/markdown; variant=GFM",
ext_modules=[CMakeExtension("oopt_gnpy_libyang")],
cmdclass={"build_ext": CMakeBuild},
zip_safe=False,
extras_require={"test": ["pytest>=6.0"]},
python_requires=">=3.6",
python_requires=">=3.8",
license='BSD-3-Clause',
download_url='https://pypi.org/project/oopt-gnpy-libyang/',
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Intended Audience :: Telecommunications Industry',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: System :: Networking',
),
)

20
tests/conftest.py Normal file
View File

@@ -0,0 +1,20 @@
import oopt_gnpy_libyang as ly
import os
from pathlib import Path
import pytest
@pytest.fixture(scope='function')
def context_no_libyang():
return ly.Context(str(Path(os.environ.get('CMAKE_CURRENT_SOURCE_DIR', os.getcwd())) / 'tests' / 'yang'),
ly.ContextOptions.AllImplemented | ly.ContextOptions.NoYangLibrary)
@pytest.fixture(scope='function')
def context_with_modules(context_no_libyang):
'''A context with all relevant YANG modules added and implemented'''
c = context_no_libyang
c.load_module('iana-if-type')
m = c.load_module('ietf-interfaces')
m.set_implemented_all_features()
c.load_module('ietf-ip')
c.load_module('ietf-hardware')
return c

31
tests/test_context.py Normal file
View File

@@ -0,0 +1,31 @@
import pytest
import oopt_gnpy_libyang as ly
def test_no_module(context_no_libyang):
with pytest.raises(ly.Error, match="Can't load module 'yay'"):
context_no_libyang.load_module('yay')
def test_empty():
c = ly.Context()
with pytest.raises(ly.Error, match="Can't load module 'yay'"):
c.load_module('yay')
def test_features(context_no_libyang):
context_no_libyang.load_module('iana-if-type')
m = context_no_libyang.load_module('ietf-interfaces')
with pytest.raises(ly.Error, match="Couldn't set module 'ietf-interfaces' to implemented: LY_EINVAL"):
m.set_implemented_with_features(['arbotrary-names'])
errors = [(e.level, e.code, e.message, e.path, e.validation_code, e.app_tag) for e in context_no_libyang.errors()]
assert errors == [
(ly.LogLevel.Error, ly.ErrorCode.InvalidValue, 'Feature "arbotrary-names" not found in module "ietf-interfaces".',
None, ly.ValidationErrorCode.Success, None)
]
m.set_implemented_with_features(['arbitrary-names'])
for feature in m.features:
assert m.feature_enabled(feature.name) == (feature.name == 'arbitrary-names')
m.set_implemented_all_features()
for feature in m.features:
assert m.feature_enabled(feature.name)
def test_no_shared_errors(context_no_libyang):
assert context_no_libyang.errors() == []

155
tests/test_validation.py Normal file
View File

@@ -0,0 +1,155 @@
import pytest
import oopt_gnpy_libyang as ly
def test_ietf_interfaces(context_with_modules):
blob = '''{
"ietf-interfaces:interfaces": {
"interface": [
{
"name": "lo",
"type": "iana-if-type:softwareLoopback",
"ietf-ip:ipv4": {
"address": [
{
"ip": "127.0.0.1",
"prefix-length": 8
}
]
},
"ietf-ip:ipv6": {
"address": [
{
"ip": "::1",
"prefix-length": 128
}
]
}
},
{
"name": "eth0",
"type": "iana-if-type:ethernetCsmacd"
},
{
"name": "eth1",
"type": "iana-if-type:ethernetCsmacd"
},
{
"name": "br0",
"type": "iana-if-type:bridge",
"ietf-ip:ipv4": {
"address": [
{
"ip": "10.0.0.2",
"prefix-length": 24
}
],
"neighbor": [
{
"ip": "10.0.0.1",
"link-layer-address": "cc:2d:e0:52:30:9c"
},
{
"ip": "10.0.1.1",
"link-layer-address": "cc:2d:e0:52:30:9c"
}
]
},
"ietf-ip:ipv6": {
"address": [
{
"ip": "fe80::211:17ff:fe01:61",
"prefix-length": 64
}
]
}
}
]
},
"ietf-hardware:hardware": {
}
}
'''
data = context_with_modules.parse_data_str(blob, ly.DataFormat.JSON, ly.ParseOptions.Strict | ly.ParseOptions.Ordered, ly.ValidationOptions.Present | ly.ValidationOptions.NoState)
assert data.path == '/ietf-interfaces:interfaces'
assert [x.path for x in data.siblings()] == ['/ietf-interfaces:interfaces', '/ietf-hardware:hardware']
assert [inner.path for top in data.siblings() for inner in top.childrenDfs()] == [
'/ietf-interfaces:interfaces',
"/ietf-interfaces:interfaces/interface[name='lo']",
"/ietf-interfaces:interfaces/interface[name='lo']/name",
"/ietf-interfaces:interfaces/interface[name='lo']/type",
"/ietf-interfaces:interfaces/interface[name='lo']/enabled",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv4",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv4/enabled",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv4/forwarding",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv4/address[ip='127.0.0.1']",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv4/address[ip='127.0.0.1']/ip",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv4/address[ip='127.0.0.1']/prefix-length",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv6",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv6/enabled",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv6/forwarding",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv6/address[ip='::1']",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv6/address[ip='::1']/ip",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv6/address[ip='::1']/prefix-length",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv6/dup-addr-detect-transmits",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv6/autoconf",
"/ietf-interfaces:interfaces/interface[name='lo']/ietf-ip:ipv6/autoconf/create-global-addresses",
"/ietf-interfaces:interfaces/interface[name='eth0']",
"/ietf-interfaces:interfaces/interface[name='eth0']/name",
"/ietf-interfaces:interfaces/interface[name='eth0']/type",
"/ietf-interfaces:interfaces/interface[name='eth0']/enabled",
"/ietf-interfaces:interfaces/interface[name='eth1']",
"/ietf-interfaces:interfaces/interface[name='eth1']/name",
"/ietf-interfaces:interfaces/interface[name='eth1']/type",
"/ietf-interfaces:interfaces/interface[name='eth1']/enabled",
"/ietf-interfaces:interfaces/interface[name='br0']",
"/ietf-interfaces:interfaces/interface[name='br0']/name",
"/ietf-interfaces:interfaces/interface[name='br0']/type",
"/ietf-interfaces:interfaces/interface[name='br0']/enabled",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4/enabled",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4/forwarding",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4/address[ip='10.0.0.2']",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4/address[ip='10.0.0.2']/ip",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4/address[ip='10.0.0.2']/prefix-length",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4/neighbor[ip='10.0.0.1']",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4/neighbor[ip='10.0.0.1']/ip",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4/neighbor[ip='10.0.0.1']/link-layer-address",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4/neighbor[ip='10.0.1.1']",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4/neighbor[ip='10.0.1.1']/ip",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv4/neighbor[ip='10.0.1.1']/link-layer-address",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv6",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv6/enabled",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv6/forwarding",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv6/address[ip='fe80::211:17ff:fe01:61']",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv6/address[ip='fe80::211:17ff:fe01:61']/ip",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv6/address[ip='fe80::211:17ff:fe01:61']/prefix-length",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv6/dup-addr-detect-transmits",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv6/autoconf",
"/ietf-interfaces:interfaces/interface[name='br0']/ietf-ip:ipv6/autoconf/create-global-addresses",
'/ietf-hardware:hardware',
]
assert [inner.path for top in data.siblings() for inner in top.immediateChildren()] == [
f"/ietf-interfaces:interfaces/interface[name='{if_}']" for if_ in ('lo', 'eth0', 'eth1', 'br0')
]
# quoting doesn't matter
assert data["interface[name='lo']"].path == "/ietf-interfaces:interfaces/interface[name='lo']"
assert data['interface[name="lo"]'].path == "/ietf-interfaces:interfaces/interface[name='lo']"
with pytest.raises(KeyError, match='No such data node below.*'):
data['interface[name="ZZZ"]']
with pytest.raises(KeyError, match="Too many nodes below '/ietf-interfaces:interfaces' for 'interface/enabled'"):
data["interface/enabled"]
# one multi-level XPath with all keys
assert data["interface[name='lo']/ietf-ip:ipv6/address[ip='::1']/prefix-length"].as_term().value == 128
# one item per index, all key values specified
assert data["interface[name='lo']"]["ietf-ip:ipv6"]["address[ip='::1']"]["prefix-length"].as_term().value == 128
# one item per index. single-item list keys without their values
assert data["interface[name='lo']"]["ietf-ip:ipv6"]["address"]["prefix-length"].as_term().value == 128
assert isinstance(data["interface[name='lo']"]["ietf-ip:ipv6"]["address"]["prefix-length"].as_term().value, int)