9 Commits

Author SHA1 Message Date
EstherLerouzic
6a0e73e332 add an example request (with answers)
Signed-off-by: EstherLerouzic <esther.lerouzic@orange.com>
Change-Id: Id75fff88cd3b03fcf965c22763075ac3dbea41c6
2020-11-06 18:20:13 +01:00
EstherLerouzic
fa6b8c87e4 add 'gnpy-api:' context when reading the content of the request
in order to be compliant with yang

Signed-off-by: EstherLerouzic <esther.lerouzic@orange.com>
Change-Id: Ifa6ab93025b18a5a678b625e42e3d351499c69d7
2020-11-06 17:39:03 +01:00
EstherLerouzic
801c66aae2 adding yang corresponding to the json inputs
Signed-off-by: EstherLerouzic <esther.lerouzic@orange.com>
Change-Id: I75b0cc3c3ce84dc724e588f918bddf0a5a97225d
2020-11-06 17:39:03 +01:00
EstherLerouzic
f60c347a48 support missing trx_mode in request instead of null value
Signed-off-by: EstherLerouzic <esther.lerouzic@orange.com>
Change-Id: I5c05b17b0b134c7782a08e86015dc30c7c9b3713
2020-11-06 17:39:03 +01:00
EstherLerouzic
649bb3bd0f Change N values from 0 to None in case of NO_SPECTRUM
in case spectrum can not be assigned default values for N is set to 0,
which is not correct (N is a meaningfull value for
center frequency index). This changes replaces this default
value with None

Signed-off-by: EstherLerouzic <esther.lerouzic@orange.com>
Change-Id: Ibe642682e48d09f340d53e2092f172de6aa7cc90
2020-11-06 17:38:28 +01:00
EstherLerouzic
6051ad54bc Enabling the reading of N and M value from the json request
For this commit only the first element from the {N, M} list is read
and assigned.

This is better than not reading this value at all.

the commit also updates test_files and test data files with correct
values for the effective_freq_slot attribute

Signed-off-by: EstherLerouzic <esther.lerouzic@orange.com>
Change-Id: I1e60fe833ca1092b40de27c8cbfb13083810414e
2020-11-06 14:44:13 +01:00
EstherLerouzic
95d24d8e20 Avoid overwriting blocking reason
When a path is blocked for 'NO_FEASIBLE_MODE' reason, and bidir is true,
the request attributes are filled with the last explored mode values
(baudrate notably), and the reversed path is propagated with this last
explored mode specs. if this reversed path is also not feasible the blocking
reason was overwritten with a 'MODE_NOT_FESIBLE' reasonn, because
baudrate is filled in the request attribute.

This change ensure that the blocking reason (if it exists) is not overwritten.

Signed-off-by: EstherLerouzic <esther.lerouzic@orange.com>
Change-Id: If80a37d77e2b967a327562c733a44e7f78f1c544
2020-11-06 14:43:54 +01:00
manuedelf
6c449edece docker image update + readme 2020-10-16 23:34:22 +02:00
manuedelf
d051f93d55 Rest api for GNPy 2020-10-15 16:27:18 +02:00
252 changed files with 82908 additions and 175960 deletions

View File

@@ -1,9 +1 @@
comment: off
coverage:
status:
project:
default:
threshold: 5%
patch:
default:
only_pulls: true

View File

@@ -1,3 +1,3 @@
#!/bin/bash
cp -nr /oopt-gnpy/gnpy/example-data /shared
cp -nr /opt/application/oopt-gnpy/gnpy/example-data /shared
exec "$@"

47
.docker-travis.sh Executable file
View File

@@ -0,0 +1,47 @@
#!/bin/bash
set -e
IMAGE_NAME=telecominfraproject/oopt-gnpy
IMAGE_TAG=$(git describe --tags)
ALREADY_FOUND=0
docker pull ${IMAGE_NAME}:${IMAGE_TAG} && ALREADY_FOUND=1
if [[ $ALREADY_FOUND == 0 ]]; then
docker build . -t ${IMAGE_NAME}
docker tag ${IMAGE_NAME} ${IMAGE_NAME}:${IMAGE_TAG}
# shared directory setup: do not clobber the real data
mkdir trash
cd trash
docker run -it --rm --volume $(pwd):/shared ${IMAGE_NAME} gnpy-transmission-example
else
echo "Image ${IMAGE_NAME}:${IMAGE_TAG} already available, will just update the other tags"
fi
docker images
do_docker_login() {
echo "${DOCKER_PASSWORD}" | docker login -u "${DOCKER_USERNAME}" --password-stdin
}
if [[ "${TRAVIS_PULL_REQUEST}" == "false" ]]; then
if [[ "${TRAVIS_BRANCH}" == "develop" || "${TRAVIS_BRANCH}" == "docker" ]]; then
echo "Publishing latest"
docker tag ${IMAGE_NAME}:${IMAGE_TAG} ${IMAGE_NAME}:latest
do_docker_login
if [[ $ALREADY_FOUND == 0 ]]; then
docker push ${IMAGE_NAME}:${IMAGE_TAG}
fi
docker push ${IMAGE_NAME}:latest
elif [[ "${TRAVIS_BRANCH}" == "master" ]]; then
echo "Publishing stable"
docker tag ${IMAGE_NAME}:${IMAGE_TAG} ${IMAGE_NAME}:stable
do_docker_login
if [[ $ALREADY_FOUND == 0 ]]; then
docker push ${IMAGE_NAME}:${IMAGE_TAG}
fi
docker push ${IMAGE_NAME}:stable
fi
fi

View File

@@ -1,7 +0,0 @@
# Thanks for contributing to GNPy
If it isn't much trouble, please send your contribution as patches to our Gerrit.
Here's [how to submit patches](https://review.gerrithub.io/Documentation/intro-gerrit-walkthrough-github.html), and here's a [list of stuff we are currently working on](https://review.gerrithub.io/q/project:Telecominfraproject/oopt-gnpy+status:open).
Just sign in via your existing GitHub account.
However, if you feel more comfortable with filing GitHub PRs, we can work with that too.

View File

@@ -1,147 +0,0 @@
on:
push:
pull_request:
branches:
- master
name: CI
jobs:
build:
name: Tox test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: fedora-python/tox-github-action@v37.0
with:
tox_env: ${{ matrix.tox_env }}
dnf_install: ${{ matrix.dnf_install }}
- uses: codecov/codecov-action@v3.1.1
if: ${{ endswith(matrix.tox_env, '-cover') }}
with:
files: ${{ github.workspace }}/cover/coverage.xml
strategy:
fail-fast: false
matrix:
tox_env:
- py38
- py39
- py310
- py311
- py312-cover
include:
- tox_env: docs
dnf_install: graphviz
pypi:
needs: build
if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository_owner == 'Telecominfraproject' }}
name: PyPI packaging
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: actions/setup-python@v4
name: Install Python
with:
python-version: '3.12'
- uses: casperdcl/deploy-pypi@bb869aafd89f657ceaafe9561d3b5584766c0f95
with:
password: ${{ secrets.PYPI_API_TOKEN }}
pip: wheel -w dist/ --no-deps .
upload: true
docker:
needs: build
if: ${{ github.event_name == 'push' && (github.ref == 'refs/heads/master' || startsWith(github.ref, 'refs/tags/v')) && github.repository_owner == 'Telecominfraproject' }}
name: Docker image
runs-on: ubuntu-latest
steps:
- name: Log in to Docker Hub
uses: docker/login-action@v1
with:
username: jktjkt
password: ${{ secrets.DOCKERHUB_TOKEN }}
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Extract tag name
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }}
id: extract_pretty_git
run: echo ::set-output name=GIT_DESC::$(git describe --tags)
- name: Build and push a container
uses: docker/build-push-action@v2
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }}
with:
context: .
push: true
tags: |
telecominfraproject/oopt-gnpy:${{ steps.extract_pretty_git.outputs.GIT_DESC }}
telecominfraproject/oopt-gnpy:master
- name: Extract tag name
if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') }}
id: extract_tag_name
run: echo ::set-output name=GIT_DESC::${GITHUB_REF/refs\/tags\//}
- name: Build and push a container
uses: docker/build-push-action@v2
if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') }}
with:
context: .
push: true
tags: |
telecominfraproject/oopt-gnpy:${{ steps.extract_tag_name.outputs.GIT_DESC }}
telecominfraproject/oopt-gnpy:latest
other-platforms:
name: Tests on other platforms
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python_version }}
- run: |
pip install --editable .[tests]
pytest -vv
strategy:
fail-fast: false
matrix:
include:
- os: windows-2022
python_version: "3.11"
- os: windows-2022
python_version: "3.12"
- os: windows-2025
python_version: "3.11"
- os: windows-2025
python_version: "3.12"
- os: macos-13
python_version: "3.12"
- os: macos-14
python_version: "3.12"
paywalled-platforms:
name: Tests on paywalled platforms
if: github.repository_owner == 'Telecominfraproject'
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python_version }}
- run: |
pip install --editable .[tests]
pytest -vv
strategy:
fail-fast: false
matrix:
include:
- os: macos-13-xlarge # Apple M1 CPU
python_version: "3.12"

View File

@@ -2,3 +2,4 @@
host=review.gerrithub.io
project=Telecominfraproject/oopt-gnpy
defaultrebase=0
defaultbranch=develop

View File

@@ -1,3 +0,0 @@
queries:
- exclude: py/clear-text-logging-sensitive-data
- exclude: py/clear-text-storage-sensitive-data

View File

@@ -1,17 +1,4 @@
version: 2
build:
os: ubuntu-22.04
tools:
python: "3.12"
apt_packages:
- graphviz
image: latest
python:
install:
- method: pip
path: .
extra_requirements:
- docs
sphinx:
configuration: docs/conf.py
version: 3.6

24
.travis.yml Normal file
View File

@@ -0,0 +1,24 @@
dist: xenial
sudo: false
language: python
services: docker
python:
- "3.6"
- "3.7"
install: skip
script:
- python setup.py develop
- pip install pytest-cov rstcheck
- pytest --cov-report=xml --cov=gnpy -v
- rstcheck --ignore-roles cite *.rst
- sphinx-build -W --keep-going docs/ x-throwaway-location
after_success:
- bash <(curl -s https://codecov.io/bash)
jobs:
include:
- stage: test
name: Docker image
script:
- git fetch --unshallow
- ./.docker-travis.sh
- docker images

View File

@@ -2,33 +2,23 @@
- project:
check:
jobs:
- tox-py38:
vars:
ensure_tox_version: '<4'
- tox-py39:
vars:
ensure_tox_version: '<4'
- tox-py310-cover:
vars:
ensure_tox_version: '<4'
- tox-docs-f36:
vars:
ensure_tox_version: '<4'
- tox-py36-cover
- coverage-diff:
voting: false
dependencies:
- tox-py310-cover-previous
- tox-py310-cover
- tox-py36-cover-previous
- tox-py36-cover
vars:
coverage_job_name_previous: tox-py310-cover-previous
coverage_job_name_current: tox-py310-cover
- tox-linters-diff-n-report:
coverage_job_name_previous: tox-py36-cover-previous
coverage_job_name_current: tox-py36-cover
- tox-linters-diff:
voting: false
vars:
ensure_tox_version: '<4'
- tox-py310-cover-previous:
vars:
ensure_tox_version: '<4'
- tox-docs-el8
- tox-py36-cover-previous
gate:
jobs:
- tox-py36-el8
- tox-docs-el8
tag:
jobs:
- oopt-release-python:

View File

@@ -7,30 +7,22 @@ To learn how to contribute, please see CONTRIBUTING.md
- Alessio Ferrari (Politecnico di Torino) <alessio.ferrari@polito.it>
- Anders Lindgren (Telia Company) <Anders.X.Lindgren@teliacompany.com>
- Andrea D'Amico (NEC) <adamico@nec-labs.com>
- Arturo Mayoral (Telecom Infra Project) <amayoral@telecominfraproject.com>
- Andrea D'Amico (Politecnico di Torino) <andrea.damico@polito.it>
- Brian Taylor (Facebook) <briantaylor@fb.com>
- David Boertjes (Ciena) <dboertje@ciena.com>
- Diego Landa (Facebook) <dlanda@fb.com>
- Emmanuelle Delfour (Orange) <WEDE7391@orange.com>
- Esther Le Rouzic (Orange) <esther.lerouzic@orange.com>
- Florian Frank (Orange) <florian1.frank@orange.com>
- Gabriele Galimberti (Cisco) <ggalimbe@cisco.com>
- Gert Grammel (Juniper Networks) <ggrammel@juniper.net>
- Giacomo Borraccini (NEC Laboratories America) <gborraccini@nec-labs.com>
- Gilad Goldfarb (Facebook) <giladg@fb.com>
- James Powell (Telecom Infra Project) <james.powell@telecominfraproject.com>
- Jan Kundrát (Telecom Infra Project) <jkt@jankundrat.com>
- Jan Kundrát (Telecom Infra Project) <jan.kundrat@telecominfraproject.com>
- Jeanluc Augé (Orange) <jeanluc.auge@orange.com>
- Jenny L'Escop (Orange) <jenny.lescop@orange.com>
- Jonas Mårtensson (RISE) <jonas.martensson@ri.se>
- Mattia Cantono (Politecnico di Torino) <mattia.cantono@polito.it>
- Miguel Garrich (University Catalunya) <miquel.garrich@upct.es>
- Raj Nagarajan (Lumentum) <raj.nagarajan@lumentum.com>
- Renato Ambrosone (Politecnico di Torino) <renato.ambrosone@polito.it>
- Roberts Miculens (Lattelecom) <roberts.miculens@lattelecom.lv>
- Rodrigo Sasse David (Orange) <rodrigo.sassedavid@orange.com>
- Sami Alavi (NUST) <sami.mansooralavi1999@gmail.com>
- Shengxiang Zhu (University of Arizona) <szhu@email.arizona.edu>
- Stefan Melin (Telia Company) <Stefan.Melin@teliacompany.com>
- Vittorio Curri (Politecnico di Torino) <vittorio.curri@polito.it>

View File

@@ -1,8 +1,15 @@
FROM python:3.9-slim
COPY . /oopt-gnpy
WORKDIR /oopt-gnpy
RUN apt update; apt install -y git
RUN pip install .
WORKDIR /shared/example-data
ENTRYPOINT ["/oopt-gnpy/.docker-entry.sh"]
FROM python:3.7-slim
WORKDIR /opt/application/oopt-gnpy
RUN mkdir -p /shared/example-data \
&& groupadd gnpy \
&& useradd -g gnpy -m gnpy \
&& apt-get update \
&& apt-get install git -y \
&& rm -rf /var/lib/apt/lists/*
COPY . /opt/application/oopt-gnpy
WORKDIR /opt/application/oopt-gnpy
RUN pip install . \
&& chown -Rc gnpy:gnpy /opt/application/oopt-gnpy /shared/example-data
USER gnpy
ENTRYPOINT ["/opt/application/oopt-gnpy/.docker-entry.sh"]
CMD ["/bin/bash"]

View File

@@ -1,35 +0,0 @@
# GNPy: Optical Route Planning and DWDM Network Optimization
[![Install via pip](https://img.shields.io/pypi/v/gnpy)](https://pypi.org/project/gnpy/)
[![Python versions](https://img.shields.io/pypi/pyversions/gnpy)](https://pypi.org/project/gnpy/)
[![Documentation status](https://readthedocs.org/projects/gnpy/badge/?version=master)](http://gnpy.readthedocs.io/en/master/?badge=master)
[![GitHub Workflow Status](https://img.shields.io/github/actions/workflow/status/Telecominfraproject/oopt-gnpy/main.yml)](https://github.com/Telecominfraproject/oopt-gnpy/actions/workflows/main.yml)
[![Gerrit](https://img.shields.io/badge/patches-via%20Gerrit-blue)](https://review.gerrithub.io/q/project:Telecominfraproject/oopt-gnpy+is:open)
[![Contributors](https://img.shields.io/github/contributors-anon/Telecominfraproject/oopt-gnpy)](https://github.com/Telecominfraproject/oopt-gnpy/graphs/contributors)
[![Code Coverage via codecov](https://img.shields.io/codecov/c/github/Telecominfraproject/oopt-gnpy)](https://codecov.io/gh/Telecominfraproject/oopt-gnpy)
[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3458319.svg)](https://doi.org/10.5281/zenodo.3458319)
[![Matrix chat](https://img.shields.io/matrix/oopt-gnpy:matrix.org)](https://matrix.to/#/%23oopt-gnpy%3Amatrix.org?via=matrix.org)
GNPy is an open-source, community-developed library for building route planning and optimization tools in real-world mesh optical networks.
We are a consortium of operators, vendors, and academic researchers sponsored via the [Telecom Infra Project](http://telecominfraproject.com)'s [OOPT/PSE](https://telecominfraproject.com/open-optical-packet-transport) working group.
Together, we are building this tool for rapid development of production-grade route planning tools which is easily extensible to include custom network elements and performant to the scale of real-world mesh optical networks.
![GNPy with an OLS system](docs/images/GNPy-banner.png)
## Quick Start
Install either via [Docker](https://gnpy.readthedocs.io/en/master/install.html#using-prebuilt-docker-images), or as a [Python package](https://gnpy.readthedocs.io/en/master/install.html#using-python-on-your-computer).
Read our [documentation](https://gnpy.readthedocs.io/), learn from the demos, and [get in touch with us](https://github.com/Telecominfraproject/oopt-gnpy/discussions).
This example demonstrates how GNPy can be used to check the expected SNR at the end of the line by varying the channel input power:
![Running a simple simulation example](docs/images/gnpy-transmission-example.svg)
GNPy can do much more, including acting as a Path Computation Engine, tracking bandwidth requests, or advising the SDN controller about a best possible path through a large DWDM network.
Learn more about this [in the documentation](https://gnpy.readthedocs.io/), or give it a [try online at `gnpy.app`](https://gnpy.app/):
[![Path propagation at gnpy.app](docs/images/2022-04-12-gnpy-app.png)](https://gnpy.app/)
## Project Calendar
See upcoming meetings on the [Project Calendar](https://telecominfraproject.github.io/oopt-gnpy/calendar.html). The calendar is embedded from Google Calendar and updates automatically.

260
README.rst Normal file
View File

@@ -0,0 +1,260 @@
.. image:: docs/images/GNPy-banner.png
:width: 100%
:align: left
:alt: GNPy with an OLS system
====================================================================
`gnpy`: mesh optical network route planning and optimization library
====================================================================
|docs| |travis| |doi| |contributors| |codacy-quality| |codecov|
**`gnpy` is an open-source, community-developed library for building route
planning and optimization tools in real-world mesh optical networks.**
`gnpy <http://github.com/telecominfraproject/oopt-gnpy>`__ is:
--------------------------------------------------------------
- a sponsored project of the `OOPT/PSE <https://telecominfraproject.com/open-optical-packet-transport/>`_ working group of the `Telecom Infra Project <http://telecominfraproject.com>`_
- fully community-driven, fully open source library
- driven by a consortium of operators, vendors, and academic researchers
- intended for rapid development of production-grade route planning tools
- easily extensible to include custom network elements
- performant to the scale of real-world mesh optical networks
Documentation: https://gnpy.readthedocs.io
Get In Touch
~~~~~~~~~~~~
There are `weekly calls <https://telecominfraproject.workplace.com/events/702894886867547/>`__ about our progress.
Newcomers, users and telecom operators are especially welcome there.
We encourage all interested people outside the TIP to `join the project <https://telecominfraproject.com/apply-for-membership/>`__.
How to Install
--------------
Install either via `Docker <docs/install.rst#install-docker>`__, or as a `Python package <docs/install.rst#install-pip>`__.
Instructions for First Use
--------------------------
``gnpy`` is a library for building route planning and optimization tools.
It ships with a number of example programs. Release versions will ship with
fully-functional programs.
**Note**: *If you are a network operator or involved in route planning and
optimization for your organization, please contact project maintainer Jan
Kundrát <jan.kundrat@telecominfraproject.com>. gnpy is looking for users with
specific, delineated use cases to drive requirements for future
development.*
This example demonstrates how GNPy can be used to check the expected SNR at the end of the line by varying the channel input power:
.. image:: https://telecominfraproject.github.io/oopt-gnpy/docs/images/transmission_main_example.svg
:width: 100%
:align: left
:alt: Running a simple simulation example
:target: https://asciinema.org/a/252295
By default, this script operates on a single span network defined in
`gnpy/example-data/edfa_example_network.json <gnpy/example-data/edfa_example_network.json>`_
You can specify a different network at the command line as follows. For
example, to use the CORONET Global network defined in
`gnpy/example-data/CORONET_Global_Topology.json <gnpy/example-data/CORONET_Global_Topology.json>`_:
.. code-block:: shell-session
$ gnpy-transmission-example $(gnpy-example-data)/CORONET_Global_Topology.json
It is also possible to use an Excel file input (for example
`gnpy/example-data/CORONET_Global_Topology.xlsx <gnpy/example-data/CORONET_Global_Topology.xlsx>`_).
The Excel file will be processed into a JSON file with the same prefix.
Further details about the Excel data structure are available `in the documentation <docs/excel.rst>`__.
The main transmission example will calculate the average signal OSNR and SNR
across network elements (transceiver, ROADMs, fibers, and amplifiers)
between two transceivers selected by the user. Additional details are provided by doing ``gnpy-transmission-example -h``. (By default, for the CORONET Global
network, it will show the transmission of spectral information between Abilene and Albany)
This script calculates the average signal OSNR = |OSNR| and SNR = |SNR|.
.. |OSNR| replace:: P\ :sub:`ch`\ /P\ :sub:`ase`
.. |SNR| replace:: P\ :sub:`ch`\ /(P\ :sub:`nli`\ +\ P\ :sub:`ase`)
|Pase| is the amplified spontaneous emission noise, and |Pnli| the non-linear
interference noise.
.. |Pase| replace:: P\ :sub:`ase`
.. |Pnli| replace:: P\ :sub:`nli`
Further Instructions for Use
----------------------------
Simulations are driven by a set of `JSON <docs/json.rst>`__ or `XLS <docs/excel.rst>`__ files.
The ``gnpy-transmission-example`` script propagates a spectrum of channels at 32 Gbaud, 50 GHz spacing and 0 dBm/channel.
Launch power can be overridden by using the ``--power`` argument.
Spectrum information is not yet parametrized but can be modified directly in the ``eqpt_config.json`` (via the ``SpectralInformation`` -SI- structure) to accommodate any baud rate or spacing.
The number of channel is computed based on ``spacing`` and ``f_min``, ``f_max`` values.
An experimental support for Raman amplification is available:
.. code-block:: shell-session
$ gnpy-transmission-example \
$(gnpy-example-data)/raman_edfa_example_network.json \
--sim $(gnpy-example-data)/sim_params.json --show-channels
Configuration of Raman pumps (their frequencies, power and pumping direction) is done via the `RamanFiber element in the network topology <gnpy/example-data/raman_edfa_example_network.json>`_.
General numeric parameters for simulaiton control are provided in the `gnpy/example-data/sim_params.json <gnpy/example-data/sim_params.json>`_.
Use ``gnpy-path-request`` to request several paths at once:
.. code-block:: shell-session
$ cd $(gnpy-example-data)
$ gnpy-path-request -o output_file.json \
meshTopologyExampleV2.xls meshTopologyExampleV2_services.json
This program operates on a network topology (`JSON <docs/json.rst>`__ or `Excel <docs/excel.rst>`__ format), processing the list of service requests (JSON or XLS again).
The service requests and reply formats are based on the `draft-ietf-teas-yang-path-computation-01 <https://tools.ietf.org/html/draft-ietf-teas-yang-path-computation-01>`__ with custom extensions (e.g., for transponder modes).
An example of the JSON input is provided in file `service-template.json`, while results are shown in `path_result_template.json`.
Important note: ``gnpy-path-request`` is not a network dimensionning tool: each service does not reserve spectrum, or occupy ressources such as transponders. It only computes path feasibility assuming the spectrum (between defined frequencies) is loaded with "nb of channels" spaced by "spacing" values as specified in the system parameters input in the service file, each cannel having the same characteristics in terms of baudrate, format,... as the service transponder. The transceiver element acts as a "logical starting/stopping point" for the spectral information propagation. At that point it is not meant to represent the capacity of add drop ports.
As a result transponder type is not part of the network info. it is related to the list of services requests.
The current version includes a spectrum assigment features that enables to compute a candidate spectrum assignment for each service based on a first fit policy. Spectrum is assigned based on service specified spacing value, path_bandwidth value and selected mode for the transceiver. This spectrum assignment includes a basic capacity planning capability so that the spectrum resource is limited by the frequency min and max values defined for the links. If the requested services reach the link spectrum capacity, additional services feasibility are computed but marked as blocked due to spectrum reason.
REST API (experimental)
-----------------------
``gnpy`` provides an experimental api for requesting several paths at once. It is based on Flask server.
You can run it through command line or Docker.
.. code-block:: shell-session
$ gnpy-rest
.. code-block:: shell-session
$ docker run -p 8080:8080 -dit xxxx gnpy-rest
After starting the api server, you can lauch a request
.. code-block:: shell-session
$ curl -v -X POST -H "Content-Type: application/json" -d @<PATH_TO_JSON_REQUEST_FILE> http://localhost:8080/api/v1/path-computation
Contributing
------------
``gnpy`` is looking for additional contributors, especially those with experience
planning and maintaining large-scale, real-world mesh optical networks.
To get involved, please contact Jan Kundrát
<jan.kundrat@telecominfraproject.com> or Gert Grammel <ggrammel@juniper.net>.
``gnpy`` contributions are currently limited to members of `TIP
<http://telecominfraproject.com>`_. Membership is free and open to all.
See the `Onboarding Guide
<https://github.com/Telecominfraproject/gnpy/wiki/Onboarding-Guide>`_ for
specific details on code contributions.
See `AUTHORS.rst <AUTHORS.rst>`_ for past and present contributors.
Project Background
------------------
Data Centers are built upon interchangeable, highly standardized node and
network architectures rather than a sum of isolated solutions. This also
translates to optical networking. It leads to a push in enabling multi-vendor
optical network by disaggregating HW and SW functions and focusing on
interoperability. In this paradigm, the burden of responsibility for ensuring
the performance of such disaggregated open optical systems falls on the
operators. Consequently, operators and vendors are collaborating in defining
control models that can be readily used by off-the-shelf controllers. However,
node and network models are only part of the answer. To take reasonable
decisions, controllers need to incorporate logic to simulate and assess optical
performance. Hence, a vendor-independent optical quality estimator is required.
Given its vendor-agnostic nature, such an estimator needs to be driven by a
consortium of operators, system and component suppliers.
Founded in February 2016, the Telecom Infra Project (TIP) is an
engineering-focused initiative which is operator driven, but features
collaboration across operators, suppliers, developers, integrators, and
startups with the goal of disaggregating the traditional network deployment
approach. The groups ultimate goal is to help provide better connectivity for
communities all over the world as more people come on-line and demand more
bandwidth- intensive experiences like video, virtual reality and augmented
reality.
Within TIP, the Open Optical Packet Transport (OOPT) project group is chartered
with unbundling monolithic packet-optical network technologies in order to
unlock innovation and support new, more flexible connectivity paradigms.
The key to unbundling is the ability to accurately plan and predict the
performance of optical line systems based on an accurate simulation of optical
parameters. Under that OOPT umbrella, the Physical Simulation Environment (PSE)
working group set out to disrupt the planning landscape by providing an open
source simulation model which can be used freely across multiple vendor
implementations.
.. |docs| image:: https://readthedocs.org/projects/gnpy/badge/?version=master
:target: http://gnpy.readthedocs.io/en/master/?badge=master
:alt: Documentation Status
:scale: 100%
.. |travis| image:: https://travis-ci.com/Telecominfraproject/oopt-gnpy.svg?branch=master
:target: https://travis-ci.com/Telecominfraproject/oopt-gnpy
:alt: Build Status via Travis CI
:scale: 100%
.. |doi| image:: https://zenodo.org/badge/96894149.svg
:target: https://zenodo.org/badge/latestdoi/96894149
:alt: DOI
:scale: 100%
.. |contributors| image:: https://img.shields.io/github/contributors-anon/Telecominfraproject/oopt-gnpy
:target: https://github.com/Telecominfraproject/oopt-gnpy/graphs/contributors
:alt: Code Contributors via GitHub
:scale: 100%
.. |codacy-quality| image:: https://img.shields.io/lgtm/grade/python/github/Telecominfraproject/oopt-gnpy
:target: https://lgtm.com/projects/g/Telecominfraproject/oopt-gnpy/
:alt: Code Quality via LGTM.com
:scale: 100%
.. |codecov| image:: https://img.shields.io/codecov/c/github/Telecominfraproject/oopt-gnpy
:target: https://codecov.io/gh/Telecominfraproject/oopt-gnpy
:alt: Code Coverage via codecov
:scale: 100%
TIP OOPT/PSE & PSE WG Charter
-----------------------------
We believe that openly sharing ideas, specifications, and other intellectual
property is the key to maximizing innovation and reducing complexity
TIP OOPT/PSE's goal is to build an end-to-end simulation environment which
defines the network models of the optical device transfer functions and their
parameters. This environment will provide validation of the optical
performance requirements for the TIP OLS building blocks.
- The model may be approximate or complete depending on the network complexity.
Each model shall be validated against the proposed network scenario.
- The environment must be able to process network models from multiple vendors,
and also allow users to pick any implementation in an open source framework.
- The PSE will influence and benefit from the innovation of the DTC, API, and
OLS working groups.
- The PSE represents a step along the journey towards multi-layer optimization.
License
-------
``gnpy`` is distributed under a standard BSD 3-Clause License.
See `LICENSE <LICENSE>`__ for more details.

View File

@@ -1 +0,0 @@
graphviz

View File

@@ -1,4 +0,0 @@
.wy-table-responsive table td, .wy-table-responsive table th {
white-space: normal;
}

View File

@@ -1,60 +0,0 @@
(about-gnpy)=
# About the project
GNPy is a sponsored project of the [OOPT/PSE](https://telecominfraproject.com/open-optical-packet-transport/) working group of the [Telecom Infra Project](http://telecominfraproject.com).
There are weekly calls about our progress.
Newcomers, users and telecom operators are especially welcome there.
We encourage all interested people outside the TIP to [join the project](https://telecominfraproject.com/apply-for-membership/) and especially to [get in touch with us](https://github.com/Telecominfraproject/oopt-gnpy/discussions).
(contributing)=
## Contributing
`gnpy` is looking for additional contributors, especially those with experience planning and maintaining large-scale, real-world mesh optical networks.
To get involved, please contact [Esther Le Rouzic](mailto:esther.lerouzic@orange.com) or
[Andrea d'Amico](mailto:adamico@nec-labs.com) or [Gert Grammel](mailto:ggrammel@juniper.net).
`gnpy` contributions are currently limited to members of [TIP](http://telecominfraproject.com).
Membership is free and open to all.
See the [Onboarding Guide](https://github.com/Telecominfraproject/gnpy/wiki/Onboarding-Guide) for specific details on code contributions, or just [upload patches to our Gerrit](https://review.gerrithub.io/Documentation/intro-gerrit-walkthrough-github.html).
Here is [what we are currently working on](https://review.gerrithub.io/q/project:Telecominfraproject/oopt-gnpy+status:open).
## Project Background
Data Centers are built upon interchangeable, highly standardized node and network architectures rather than a sum of isolated solutions.
This also translates to optical networking.
It leads to a push in enabling multi-vendor optical network by disaggregating HW and SW functions and focusing on interoperability.
In this paradigm, the burden of responsibility for ensuring the performance of such disaggregated open optical systems falls on the operators.
Consequently, operators and vendors are collaborating in defining control models that can be readily used by off-the-shelf controllers.
However, node and network models are only part of the answer.
To take reasonable decisions, controllers need to incorporate logic to simulate and assess optical performance.
Hence, a vendor-independent optical quality estimator is required.
Given its vendor-agnostic nature, such an estimator needs to be driven by a consortium of operators, system and component suppliers.
Founded in February 2016, the Telecom Infra Project (TIP) is an engineering-focused initiative which is operator driven, but features collaboration across operators, suppliers, developers, integrators, and startups with the goal of disaggregating the traditional network deployment approach.
The groups ultimate goal is to help provide better connectivity for communities all over the world as more people come on-line and demand more bandwidth-intensive experiences like video, virtual reality and augmented reality.
Within TIP, the Open Optical Packet Transport (OOPT) project group is chartered with unbundling monolithic packet-optical network technologies in order to unlock innovation and support new, more flexible connectivity paradigms.
The key to unbundling is the ability to accurately plan and predict the performance of optical line systems based on an accurate simulation of optical parameters.
Under that OOPT umbrella, the Physical Simulation Environment (PSE) working group set out to disrupt the planning landscape by providing an open source simulation model which can be used freely across multiple vendor implementations.
## TIP OOPT/PSE & PSE WG Charter
We believe that openly sharing ideas, specifications, and other intellectual property is the key to maximizing innovation and reducing complexity
TIP OOPT/PSE's goal is to build an end-to-end simulation environment which defines the network models of the optical device transfer functions and their parameters.
This environment will provide validation of the optical performance requirements for the TIP OLS building blocks.
- The model may be approximate or complete depending on the network complexity.
Each model shall be validated against the proposed network scenario.
- The environment must be able to process network models from multiple vendors, and also allow users to pick any implementation in an open source framework.
- The PSE will influence and benefit from the innovation of the DTC, API, and OLS working groups.
- The PSE represents a step along the journey towards multi-layer optimization.
License
-------
GNPy is distributed under a standard BSD 3-Clause License.

View File

@@ -1848,177 +1848,3 @@ month={Sept},}
title = {Telecom Infra Project},
url = {https://www.telecominfraproject.com},
}
@ARTICLE{DAmicoJLT2022,
author={DAmico, Andrea and Correia, Bruno and London, Elliot and Virgillito,
Emanuele and Borraccini, Giacomo and Napoli, Antonio and Curri, Vittorio},
journal={Journal of Lightwave Technology},
title={Scalable and Disaggregated GGN Approximation Applied to a C+L+S Optical Network},
year={2022},
volume={40},
number={11},
pages={3499-3511},
doi={10.1109/JLT.2022.3162134}
}
@inproceedings{grammel2018physical,
title={Physical simulation environment of the telecommunications infrastructure project (TIP)},
author={Grammel, Gert and Curri, Vittorio and Auge, Jean-Luc},
booktitle={Optical Fiber Communication Conference},
pages={M1D--3},
year={2018},
organization={Optica Publishing Group}
}
@inproceedings{taylor2018towards,
title={Towards a route planning tool for open optical networks in the telecom infrastructure project},
author={Taylor, Brian D and Goldfarb, Gilad and Bandyopadhyay, Saumil and Curri, Vittorio and Schmidtke, Hans-Juergen},
booktitle={Optical Fiber Communication Conference},
pages={Tu3E--4},
year={2018},
organization={Optica Publishing Group}
}
@article{filer2018multi,
title={Multi-vendor experimental validation of an open source QoT estimator for optical networks},
author={Filer, Mark and Cantono, Mattia and Ferrari, Alessio and Grammel, Gert and Galimberti, Gabriele and Curri, Vittorio},
journal={Journal of Lightwave Technology},
volume={36},
number={15},
pages={3073--3082},
year={2018},
publisher={IEEE}
}
@inproceedings{auge2019open,
title={Open optical network planning demonstration},
author={Auge, Jean-Luc and Grammel, Gert and Le Rouzic, Esther and Curri, Vittorio and Galimberti, Gabriele and Powell, James},
booktitle={Optical Fiber Communication Conference},
pages={M3Z--9},
year={2019},
organization={Optica Publishing Group}
}
@inproceedings{kundrat2020physical,
title={Physical-layer awareness: GNPy and ONOS for end-to-end circuits in disaggregated networks},
author={Kundr{\'a}t, Jan and Campanella, Andrea and Le Rouzic, Esther and Ferrari, Alessio and Havli{\v{s}}, Ond{\v{r}}ej and Ha{\v{z}}linsk{\`y}, Michal and Grammel, Gert and Galimberti, Gabriele and Curri, Vittorio},
booktitle={2020 Optical Fiber Communications Conference and Exhibition (OFC)},
pages={1--3},
year={2020},
organization={IEEE}
}
@inproceedings{ferrari2020experimental,
title={Experimental validation of an open source quality of transmission estimator for open optical networks},
author={Ferrari, Alessio and Filer, Mark and Balasubramanian, Karthikeyan and Yin, Yawei and Le Rouzic, Esther and Kundr{\'a}t, Jan and Grammel, Gert and Galimberti, Gabriele and Curri, Vittorio},
booktitle={2020 Optical Fiber Communications Conference and Exhibition (OFC)},
pages={1--3},
year={2020},
organization={IEEE}
}
@article{ferrari2020gnpy,
title={GNPy: an open source application for physical layer aware open optical networks},
author={Ferrari, Alessio and Filer, Mark and Balasubramanian, Karthikeyan and Yin, Yawei and Le Rouzic, Esther and Kundr{\'a}t, Jan and Grammel, Gert and Galimberti, Gabriele and Curri, Vittorio},
journal={Journal of Optical Communications and Networking},
volume={12},
number={6},
pages={C31--C40},
year={2020},
publisher={Optica Publishing Group}
}
@inproceedings{ferrari2020softwarized,
title={Softwarized optical transport QoT in production optical network: a Brownfield validation},
author={Ferrari, Alessio and Balasubramanian, Karthikeyan and Filer, Mark and Yin, Yawei and Le Rouzic, Esther and Kundr{\'a}t, Jan and Grammel, Gert and Galimberti, Gabriele and Curri, Vittorio},
booktitle={2020 European Conference on Optical Communications (ECOC)},
pages={1--4},
year={2020},
organization={IEEE}
}
@article{ferrari2021assessment,
title={Assessment on the in-field lightpath QoT computation including connector loss uncertainties},
author={Ferrari, Alessio and Balasubramanian, Karthikeyan and Filer, Mark and Yin, Yawei and Le Rouzic, Esther and Kundr{\'a}t, Jan and Grammel, Gert and Galimberti, Gabriele and Curri, Vittorio},
journal={Journal of Optical Communications and Networking},
volume={13},
number={2},
pages={A156--A164},
year={2021},
publisher={Optica Publishing Group}
}
@inproceedings{kundrat2021gnpy,
title={GNPy \& YANG: open APIs for end-to-end service provisioning in optical networks},
author={Kundr{\'a}t, Jan and Le Rouzic, Esther and M{\aa}rtensson, Jonas and Campanella, Andrea and Havli{\v{s}}, Ond{\v{r}}ej and DAmico, Andrea and Grammel, Gert and Galimberti, Gabriele and Curri, Vittorio and Vojt{\v{e}}ch, Josef},
booktitle={Optical Fiber Communication Conference},
pages={M1B--6},
year={2021},
organization={Optica Publishing Group}
}
@inproceedings{d2021gnpy,
title={GNPy experimental validation on flex-grid, flex-rate WDM optical transport scenarios},
author={DAmico, Andrea and London, Elliot and Le Guyader, Bertrand and Frank, Florian and Le Rouzic, Esther and Pincemin, Erwan and Brochier, Nicolas and Curri, Vittorio},
booktitle={Optical fiber communication conference},
pages={W1G--2},
year={2021},
organization={Optica Publishing Group}
}
@inproceedings{virgillito2021testing,
title={Testing TIP open source solutions in deployed optical networks},
author={Virgillito, Emanuele and Braun, Ralf-Peter and Breuer, Dirk and Gladisch, Andreas and Curri, Vittorio and Grammel, Gert},
booktitle={Optical Fiber Communication Conference},
pages={F1C--3},
year={2021},
organization={Optica Publishing Group}
}
@article{d2022experimental,
title={Experimental validation of GNPy in a multi-vendor flex-grid flex-rate WDM optical transport scenario},
author={DAmico, Andrea and London, Elliot and Le Guyader, Bertrand and Frank, Florian and Le Rouzic, Esther and Pincemin, Erwan and Brochier, Nicolas and Curri, Vittorio},
journal={Journal of Optical Communications and Networking},
volume={14},
number={3},
pages={79--88},
year={2022},
publisher={Optica Publishing Group}
}
@inproceedings{mano2022accuracy,
title={Accuracy of nonlinear interference estimation on launch power optimization in short-reach systems with field trial},
author={Mano, Toru and DAmico, Andrea and Virgillito, Emanuele and Borraccini, Giacomo and Huang, Yue-Kai and Kitamura, Kei and Anazawa, Kazuya and Masuda, Akira and Nishizawa, Hideki and Wang, Ting and others},
booktitle={European Conference and Exhibition on Optical Communication},
pages={We3B--1},
year={2022},
organization={Optica Publishing Group}
}
@inproceedings{kundrat2022gnpy,
title={GNPy: Lessons learned and future plans},
author={Kundr{\'a}t, Jan and Le Rouzic, Esther and M{\aa}rtensson, Jonas and Melin, Stefan and DAmico, Andrea and Grammel, Gert and Galimberti, Gabriele and Curri, Vittorio},
booktitle={European Conference and Exhibition on Optical Communication},
pages={We3B--6},
year={2022},
organization={Optica Publishing Group}
}
@inproceedings{grammel2023open,
title={Open Optical Networks: the good, the bad and the ugly},
author={Grammel, Gert and Kundrat, Jan and Le Rouzic, Esther and Melin, Stefan and Curri, Vittorio and d'Amico, Andrea and Manzotti, Roberto},
booktitle={49th European Conference on Optical Communications (ECOC 2023)},
volume={2023},
pages={1585--1588},
year={2023},
organization={IET}
}
@inproceedings{d2024gnpy,
title={GNPy Experimental Validation in a C+ L Multiband Optical Multiplex Section},
author={DAmico, Andrea and Gatto, Vittorio and Nespola, Antonino and Borraccini, Giacomo and Jiang, Yanchao and Poggiolini, Pierluigi and Le Rouzic, Esther and de Lerma, Arturo Mayoral L{\'o}pez and Grammel, Gert and Manzotti, Roberto and others},
booktitle={2024 24th International Conference on Transparent Optical Networks (ICTON)},
pages={1--4},
year={2024},
organization={IEEE}
}

View File

@@ -1,29 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>Project Calendar</title>
<style>
body { font-family: system-ui, -apple-system, Segoe UI, Roboto, Helvetica, Arial, sans-serif; margin: 20px; }
.container { max-width: 1000px; margin: 0 auto; }
h1 { font-size: 1.8rem; margin-bottom: 1rem; }
iframe { border: 0; width: 100%; height: 800px; }
.note { color: #555; margin-top: 1rem; font-size: 0.9rem; }
</style>
</head>
<body>
<div class="container">
<h1>Project Calendar</h1>
<p>This page embeds the public project calendar. It updates automatically when events change in Google Calendar.</p>
<iframe
src="https://calendar.google.com/calendar/embed?src=c_0895d13d880537c3e54db61ba95e9df167db19a49b96d41e42e2c6d842f30a6a%40group.calendar.google.com&ctz=Europe%2FMadrid"
frameborder="0"
scrolling="no"
></iframe>
<p class="note">Timezone: Europe/Madrid. If you prefer your local timezone, add <code>&amp;ctz=Your%2FTimezone</code> to the URL.</p>
</div>
</body>
</html>

View File

@@ -1,297 +0,0 @@
.. _cli-options:
***********************************************************
`gnpy-path-request` and `gnpy-transmission-example` scripts
***********************************************************
Common options
==============
**Option**: `--no-insert-edfas`
-------------------------------
**Purpose**: Disables the automatic insertion of EDFAs after ROADMs and fibers, as well as the splitting
of fibers during the auto-design process.
The `--no-insert-edfas` option is a command-line argument available in GNPy that allows users to control the
automatic insertion of amplifiers during the network design process. This option provides flexibility for
users who may want to manually manage amplifier placements or who have specific design requirements that
do not necessitate automatic amplification.
To use the `--no-insert-edfas` option, simply include it in the command line when running your GNPy program. For example:
.. code-block:: shell-session
gnpy-transmission-example my_network.json --no-insert-edfas
When the `--no-insert-edfas` option is specified:
1. **No Automatic Amplifiers**: The program will not automatically add EDFAs to the network topology after
ROADMs or fiber elements. This means that if the network design requires amplification, users must ensure
that amplifiers are manually defined in the network topology file. Users should be aware that disabling
automatic amplifier insertion may lead to insufficient amplification in the network if not managed properly.
It is essential to ensure that the network topology includes the necessary amplifiers to meet performance requirements.
2. **No Fiber Splitting**: The option also prevents the automatic splitting of fibers during the design process.
This is particularly useful for users who want to maintain specific fiber lengths or configurations without
the program altering them.
**Option**: `--equipment`, `-e`
-------------------------------
**Description**: Specifies the equipment library file.
**Usage**:
.. code-block:: shell-session
gnpy-transmission-example my_network.json --equipment <FILE.json>
**Default**: Uses the default equipment configuration in the example-data folder if not specified.
**Functionality**: This option allows users to load a specific equipment configuration that defines the characteristics of the network elements.
**Option**: `--extra-equipment` and `--extra-config`
----------------------------------------------------
The `--extra-equipment` and `--extra-config` options allow users to extend the default equipment library and configuration
settings used by the GNPy program. This feature is particularly useful for users who need to incorporate additional
equipment types or specific configurations that are not included in the standard equipment library (such as third party pluggables).
**Usage**:
.. code-block:: shell-session
--extra-equipment <file1.json> [<file2.json> ...]
**Parameters**:
- `<file1.json>`: Path to the first additional equipment file.
- `<file2.json>`: Path to any subsequent additional equipment files (optional).
**Functionality**:
- The program will merge the equipment definitions from the specified files into the main equipment library.
- If an equipment type defined in the additional files has the same name as one in the main library, the program
will issue a warning about the duplicate entry and will include ony the last definition.
- This allows for flexibility in defining equipment that may be specific to certain use cases or vendor-specific models.
**`--extra-config`**:
**Description**: This option allows users to specify additional configuration files that can override
or extend the default configuration settings used by the program. This is useful for customizing simulation
parameters or equipment settings. To set an amplifier with a specific such config, it must be defined in the
library with the keyword "default_config_from_json" filled with the file name containing the config in the case of
"variable_gain" amplifier or with the "advanced_config_from_json" for the "advanced_model" amplifier.
**Usage**:
.. code-block:: shell-session
--extra-config <file1.json> [<file2.json> ...]
**Parameters**:
- `<file1.json>`: Path to the first additional configuration file.
- `<file2.json>`: Path to any subsequent additional configuration files (optional).
**Functionality**:
The program will load the configurations from the specified files and consider them instead of the
default configurations for the amplifiers that use the "default_config_from_json" or "advanced_config_from_json" keywords.
To run the program with additional equipment and configuration files, you can use the following command:
.. code-block:: shell-session
gnpy-transmission-example --equipment main_equipment.json \
--extra-equipment additional_equipment1.json additional_equipment2.json \
--extra-config additional_config1.json
In this example:
- `main_equipment.json` is the primary equipment file.
- `additional_equipment1.json` and `additional_equipment2.json` are additional equipment files that will be merged into the main library.
- `additional_config1.json` is an additional configuration file that will override the default settings for the amplifiers pointing to it.
**Option**: `--save-network`
----------------------------
**Description**: Saves the final network configuration to a specified JSON file.
**Usage**:
.. code-block:: shell-session
--save-network <FILE.json>
**Functionality**: This option allows users to save the network state after the simulation, which can be useful for future reference or analysis.
**Option**: `--save-network-before-autodesign`
----------------------------------------------
**Description**: Dumps the network into a JSON file prior to autodesign.
**Usage**:
.. code-block:: shell-session
gnpy-path-request my_network.json my_services.json --save-network-before-autodesign <FILE.json>
**Functionality**: This option is useful for users who want to inspect the network configuration before any automatic design adjustments are made.
**Option**: `--sim-params`
--------------------------
**Description**: Path to the JSON file containing simulation parameters.
**Usage**:
.. code-block:: shell-session
gnpy-transmission-example my_network.json --sim-params <FILE.json>
**Functionality**: The `--sim-params` option is a command-line argument available in GNPy that allows users to specify a
JSON file containing simulation parameters. This option is crucial for customizing the behavior of the simulation:
the file ``sim_params.json`` contains the tuning parameters used within both the ``gnpy.science_utils.RamanSolver`` and
the ``gnpy.science_utils.NliSolver`` for the evaluation of the Raman profile and the NLI generation, respectively.
The tuning of the parameters is detailed here: :ref:`json input sim-params<sim-params>`.
`gnpy-transmission-example` options
===================================
**Option**: `--show-channels`
-----------------------------
**Description**: Displays the final per-channel OSNR and GSNR summary.
**Usage**:
.. code-block:: shell-session
gnpy-transmission-example my_network.json --show-channels
**Functionality**: This option provides a summary of the optical signal-to-noise ratio (OSNR)
and generalized signal-to-noise ratio (GSNR) for each channel after the simulation.
**Option**: `-pl`, `--plot`
---------------------------
**Description**: Generates plots of the results.
**Usage**:
.. code-block:: shell-session
gnpy-transmission-example my_network.json -pl
**Functionality**: This option allows users to visualize the results of the simulation through graphical plots.
**Option**: `-l`, `--list-nodes`
--------------------------------
**Description**: Lists all transceiver nodes in the network.
**Usage**:
.. code-block:: shell-session
gnpy-transmission-example my_network.json -l
**Functionality**: This option provides a quick way to view all transceiver nodes present in the network topology.
**Option**: `-po`, `--power`
----------------------------
**Description**: Specifies the reference channel power in span in dBm.
**Usage**:
.. code-block:: shell-session
gnpy-transmission-example my_network.json -po <value>
**Functionality**: This option allows users to set the input power level for the reference channel used in the simulation.
It replaces the value specified in the `SI` section of the equipment library (:ref:`power_dbm<spectral_info>`).
**Option**: `--spectrum`
------------------------
**Description**: Specifies a user-defined mixed rate spectrum JSON file for propagation.
**Usage**:
.. code-block:: shell-session
gnpy-transmission-example my_network.json --spectrum <FILE.json>
**Functionality**: This option allows users to define a custom spectrum for the simulation, which can
include varying channel rates and configurations. More details here: :ref:`mixed-rate<mixed-rate>`.
Options for `path_requests_run`
===============================
The `gnpy-path-request` script provides a simple path computation function that supports routing, transceiver mode selection, and spectrum assignment.
It supports include and disjoint constraints for the path computation, but does not provide any optimisation.
It requires two mandatory arguments: network file and service file (see :ref:`XLS files<excel-service-sheet>` or :ref:`JSON files<legacy-json>`).
The `gnpy-path-request` computes:
- design network once and propagate the service requests on this design
- computes performance of each request defined in the service file independently from each other, considering full load (based on the request settings),
- assigns spectrum for each request according to the remaining spectrum, on a first arrived first served basis.
Lack of spectrum leads to blocking, but performance estimation is still returned for information.
**Option**: `-bi`, `--bidir`
----------------------------
**Description**: Indicates that all demands are bidirectional.
**Usage**:
.. code-block:: shell-session
gnpy-path-request my_network.json my_service.json -e my_equipment.json -bi
**Functionality**: This option allows users to specify that the performance of the service requests should be
computed in both directions (source to destination and destination to source). This forces the 'bidirectional'
attribute to true in the service file, possibly affecting feasibility if one direction is not feasible.
**Option**: `-o`, `--output`
----------------------------
**Description**: Stores computation results requests into a JSON or CSV file.
**Usage**:
.. code-block:: shell-session
gnpy-path-request my_network.json my_service.json -o <FILE.json|FILE.csv>
**Functionality**: This option allows users to save the results of the path requests into a specified output file
for further analysis.
**Option**: `--redesign-per-request`
------------------------------------
**Description**: Redesigns the network for each request using the request as the reference channel
(replaces the `SI` section of the equipment library with the request specifications).
**Usage**:
.. code-block:: shell-session
gnpy-path-request my_network.json my_services.json --redesign-per-request
**Functionality**: This option enables checking different scenarios for design.

View File

@@ -1,271 +0,0 @@
.. _concepts:
*****************************
Simulating networks with GNPy
*****************************
Running simulations with GNPy requires three pieces of information:
- the :ref:`network topology<concepts-topology>`, which describes how the network looks like, what are the fiber lengths, what amplifiers are used, etc.,
- the :ref:`equipment library<concepts-equipment>`, which holds machine-readable datasheets of the equipment used in the network,
- the :ref:`simulation options<concepts-simulation>` holding instructions about what to simulate, and under which conditions.
.. _concepts-topology:
Network Topology
================
The *topology* acts as a "digital self" of the simulated network.
When given a network topology, GNPy can either run a specific simulation as-is, or it can *optimize* the topology before performing the simulation.
A network topology for GNPy is often a generic, mesh network.
This enables GNPy to take into consideration the current spectrum allocation as well as availability and resiliency considerations.
When the time comes to run a particular *propagation* of a signal and its impairments are computed, though, a linear path through the network is used.
For this purpose, the *path* through the network refers to an ordered, acyclic sequence of *nodes* that are processed.
This path is directional, and all "GNPy elements" along the path match the unidirectional part of a real-world network equipment.
.. note::
In practical terms, an amplifier in GNPy refers to an entity with a single input port and a single output port.
A real-world inline EDFA enclosed in a single chassis will be therefore represented as two GNPy-level amplifiers.
The network topology contains not just the physical topology of the network, but also references to the :ref:`equipment library<concepts-equipment>` and a set of *operating parameters* for each entity.
These parameters include the **fiber length** of each fiber, the connector **attenutation losses**, or an amplifier's specific **gain setting**.
The topology is specified via :ref:`XLS files<excel>` or via :ref:`JSON<legacy-json>`.
.. _complete-vs-incomplete:
Fully Specified vs. Partially Designed Networks
-----------------------------------------------
Let's consider a simple triangle topology with three :abbr:`PoPs (Points of Presence)` covering three cities:
.. graphviz::
:layout: neato
:align: center
graph "High-level topology with three PoPs" {
A -- B
B -- C
C -- A
}
In the real world, each city would probably host a ROADM and some transponders:
.. graphviz::
:layout: neato
:align: center
graph "Simplified topology with transponders" {
"ROADM A" [pos="2,2!"]
"ROADM B" [pos="4,2!"]
"ROADM C" [pos="3,1!"]
"Transponder A" [shape=box, pos="0,2!"]
"Transponder B" [shape=box, pos="6,2!"]
"Transponder C" [shape=box, pos="3,0!"]
"ROADM A" -- "ROADM B"
"ROADM B" -- "ROADM C"
"ROADM C" -- "ROADM A"
"Transponder A" -- "ROADM A"
"Transponder B" -- "ROADM B"
"Transponder C" -- "ROADM C"
}
GNPy simulation works by propagating the optical signal over a sequence of elements, which means that one has to add some preamplifiers and boosters.
The amplifiers are, by definition, unidirectional, so the graph becomes quite complex:
.. _topo-roadm-preamp-booster:
.. graphviz::
:layout: neato
:align: center
digraph "Preamps and boosters are explicitly modeled in GNPy" {
"ROADM A" [pos="2,4!"]
"ROADM B" [pos="6,4!"]
"ROADM C" [pos="4,0!"]
"Transponder A" [shape=box, pos="1,5!"]
"Transponder B" [shape=box, pos="7,5!"]
"Transponder C" [shape=box, pos="4,-1!"]
"Transponder A" -> "ROADM A"
"Transponder B" -> "ROADM B"
"Transponder C" -> "ROADM C"
"ROADM A" -> "Transponder A"
"ROADM B" -> "Transponder B"
"ROADM C" -> "Transponder C"
"Booster A C" [shape=triangle, orientation=-150, fixedsize=true, width=0.5, height=0.5, pos="2.2,3.2!", color=red, label=""]
"Preamp A C" [shape=triangle, orientation=0, fixedsize=true, width=0.5, height=0.5, pos="1.5,3.0!", color=red, label=""]
"ROADM A" -> "Booster A C"
"Preamp A C" -> "ROADM A"
"Booster A B" [shape=triangle, orientation=-90, fixedsize=true, width=0.5, height=0.5, pos="3,4.3!", color=red, fontcolor=red, labelloc=b, label="\N\n\n"]
"Preamp A B" [shape=triangle, orientation=90, fixedsize=true, width=0.5, height=0.5, pos="3,3.6!", color=red, fontcolor=red, labelloc=t, label="\n \N"]
"ROADM A" -> "Booster A B"
"Preamp A B" -> "ROADM A"
"Booster C B" [shape=triangle, orientation=-30, fixedsize=true, width=0.5, height=0.5, pos="4.7,0.9!", color=red, label=""]
"Preamp C B" [shape=triangle, orientation=120, fixedsize=true, width=0.5, height=0.5, pos="5.4,0.7!", color=red, label=""]
"ROADM C" -> "Booster C B"
"Preamp C B" -> "ROADM C"
"Booster C A" [shape=triangle, orientation=30, fixedsize=true, width=0.5, height=0.5, pos="2.6,0.7!", color=red, label=""]
"Preamp C A" [shape=triangle, orientation=-30, fixedsize=true, width=0.5, height=0.5, pos="3.3,0.9!", color=red, label=""]
"ROADM C" -> "Booster C A"
"Preamp C A" -> "ROADM C"
"Booster B A" [shape=triangle, orientation=90, fixedsize=true, width=0.5, height=0.5, pos="5,3.6!", labelloc=t, color=red, fontcolor=red, label="\n\N "]
"Preamp B A" [shape=triangle, orientation=-90, fixedsize=true, width=0.5, height=0.5, pos="5,4.3!", labelloc=b, color=red, fontcolor=red, label="\N\n\n"]
"ROADM B" -> "Booster B A"
"Preamp B A" -> "ROADM B"
"Booster B C" [shape=triangle, orientation=-180, fixedsize=true, width=0.5, height=0.5, pos="6.5,3.0!", color=red, label=""]
"Preamp B C" [shape=triangle, orientation=-20, fixedsize=true, width=0.5, height=0.5, pos="5.8,3.2!", color=red, label=""]
"ROADM B" -> "Booster B C"
"Preamp B C" -> "ROADM B"
"Booster A C" -> "Preamp C A"
"Booster A B" -> "Preamp B A"
"Booster C A" -> "Preamp A C"
"Booster C B" -> "Preamp B C"
"Booster B C" -> "Preamp C B"
"Booster B A" -> "Preamp A B"
}
In many regions, the ROADMs are not placed physically close to each other, so the long-haul fiber links (:abbr:`OMS (Optical Multiplex Section)`) are split into individual spans (:abbr:`OTS (Optical Transport Section)`) by in-line amplifiers, resulting in an even more complicated topology graphs:
.. graphviz::
:layout: neato
:align: center
digraph "A subset of a real topology with inline amplifiers" {
"ROADM A" [pos="2,4!"]
"ROADM B" [pos="6,4!"]
"ROADM C" [pos="4,-3!"]
"Transponder A" [shape=box, pos="1,5!"]
"Transponder B" [shape=box, pos="7,5!"]
"Transponder C" [shape=box, pos="4,-4!"]
"Transponder A" -> "ROADM A"
"Transponder B" -> "ROADM B"
"Transponder C" -> "ROADM C"
"ROADM A" -> "Transponder A"
"ROADM B" -> "Transponder B"
"ROADM C" -> "Transponder C"
"Booster A C" [shape=triangle, orientation=-166, fixedsize=true, width=0.5, height=0.5, pos="2.2,3.2!", label=""]
"Preamp A C" [shape=triangle, orientation=0, fixedsize=true, width=0.5, height=0.5, pos="1.5,3.0!", label=""]
"ROADM A" -> "Booster A C"
"Preamp A C" -> "ROADM A"
"Booster A B" [shape=triangle, orientation=-90, fixedsize=true, width=0.5, height=0.5, pos="3,4.3!", label=""]
"Preamp A B" [shape=triangle, orientation=90, fixedsize=true, width=0.5, height=0.5, pos="3,3.6!", label=""]
"ROADM A" -> "Booster A B"
"Preamp A B" -> "ROADM A"
"Booster C B" [shape=triangle, orientation=-30, fixedsize=true, width=0.5, height=0.5, pos="4.7,-2.1!", label=""]
"Preamp C B" [shape=triangle, orientation=10, fixedsize=true, width=0.5, height=0.5, pos="5.4,-2.3!", label=""]
"ROADM C" -> "Booster C B"
"Preamp C B" -> "ROADM C"
"Booster C A" [shape=triangle, orientation=20, fixedsize=true, width=0.5, height=0.5, pos="2.6,-2.3!", label=""]
"Preamp C A" [shape=triangle, orientation=-30, fixedsize=true, width=0.5, height=0.5, pos="3.3,-2.1!", label=""]
"ROADM C" -> "Booster C A"
"Preamp C A" -> "ROADM C"
"Booster B A" [shape=triangle, orientation=90, fixedsize=true, width=0.5, height=0.5, pos="5,3.6!", label=""]
"Preamp B A" [shape=triangle, orientation=-90, fixedsize=true, width=0.5, height=0.5, pos="5,4.3!", label=""]
"ROADM B" -> "Booster B A"
"Preamp B A" -> "ROADM B"
"Booster B C" [shape=triangle, orientation=-180, fixedsize=true, width=0.5, height=0.5, pos="6.5,3.0!", label=""]
"Preamp B C" [shape=triangle, orientation=-20, fixedsize=true, width=0.5, height=0.5, pos="5.8,3.2!", label=""]
"ROADM B" -> "Booster B C"
"Preamp B C" -> "ROADM B"
"Inline A C 1" [shape=triangle, orientation=-166, fixedsize=true, width=0.5, pos="2.4,2.2!", label=" \N", color=red, fontcolor=red]
"Inline A C 2" [shape=triangle, orientation=-166, fixedsize=true, width=0.5, pos="2.6,1.2!", label=" \N", color=red, fontcolor=red]
"Inline A C 3" [shape=triangle, orientation=-166, fixedsize=true, width=0.5, pos="2.8,0.2!", label=" \N", color=red, fontcolor=red]
"Inline A C n" [shape=triangle, orientation=-166, fixedsize=true, width=0.5, pos="3.0,-1.1!", label=" \N", color=red, fontcolor=red]
"Booster A C" -> "Inline A C 1"
"Inline A C 1" -> "Inline A C 2"
"Inline A C 2" -> "Inline A C 3"
"Inline A C 3" -> "Inline A C n" [style=dotted]
"Inline A C n" -> "Preamp C A"
"Booster A B" -> "Preamp B A" [style=dotted]
"Booster C A" -> "Preamp A C" [style=dotted]
"Booster C B" -> "Preamp B C" [style=dotted]
"Booster B C" -> "Preamp C B" [style=dotted]
"Booster B A" -> "Preamp A B" [style=dotted]
}
In such networks, GNPy's autodesign features becomes very useful.
It is possible to connect ROADMs via "tentative links" which will be replaced by a sequence of actual fibers and specific amplifiers.
In other cases where the location of amplifier huts is already known, but the specific EDFA models have not yet been decided, one can put in amplifier placeholders and let GNPy assign the best amplifier.
.. _concepts-equipment:
The Equipment Library
=====================
In order to produce an accurate simulation, GNPy needs to know the physical properties of each entity which affects the optical signal.
Entries in the equipment library correspond to actual real-world, tangible entities.
Unlike a typical :abbr:`NMS (Network Management System)`, GNPy considers not just the active :abbr:`NEs (Network Elements)` such as amplifiers and :abbr:`ROADMs (Reconfigurable Optical Add/Drop Multiplexers)`, but also the passive ones, such as the optical fiber.
As the signal propagates through the network, the largest source of optical impairments is the noise introduced from amplifiers.
An accurate description of the :abbr:`EDFA (Erbium-Doped Fiber Amplifier)` and especially its noise characteristics is required.
GNPy describes this property in terms of the **Noise Figure (NF)** of an amplifier model as a function of its operating point.
The amplifiers compensate power losses induced on the signal in the optical fiber.
The linear losses, however, are just one phenomenon of a multitude of effects that affect the signals in a long fiber run.
While a more detailed description is available :ref:`in the literature<physical-model>`, for the purpose of the equipment library, the description of the *optical fiber* comprises its **linear attenutation coefficient**, a set of parameters for the **Raman effect**, optical **dispersion**, etc.
Signals are introduced into the network via *transponders*.
The set of parameters that are required describe the physical properties of each supported *mode* of the transponder, including its **symbol rate**, spectral **width**, etc.
In the junctions of the network, *ROADMs* are used for spectrum routing.
GNPy currently does not take into consideration the spectrum filtering penalties of the :abbr:`WSSes (Wavelength Selective Switches)`, but the equipment library nonetheless contains a list of required parameters, such as the attenuation options, so that the network can be properly simulated.
.. _concepts-nf-model:
Amplifier Noise Figure Models
-----------------------------
One of the key parameters of an amplifier is the method to use for computing the Noise Figure (NF).
GNPy supports several different noise models with varying level of accuracy.
When in doubt, contact your vendor's technical support and ask them to :ref:`contribute their equipment descriptions<extending-edfa>` to GNPy.
The most accurate noise models describe the resulting NF of an EDFA as a third-degree polynomial.
GNPy understands polynomials as a NF-yielding function of the :ref:`gain difference from the optimal gain<ext-nf-model-polynomial-NF>`, or as a function of the input power resulting in an incremental OSNR as used in :ref:`OpenROADM inline amplifiers<ext-nf-model-polynomial-OSNR-OpenROADM>` and :ref:`OpenROADM booster/preamps in the ROADMs<ext-nf-model-noise-mask-OpenROADM>`.
For scenarios where the vendor has not yet contributed an accurate EDFA NF description to GNPy, it is possible to approximate the characteristics via an operator-focused, min-max NF model.
.. _nf-model-min-max-NF:
Min-max NF
^^^^^^^^^^
This is an operator-focused model where performance is defined by the *minimal* and *maximal NF*.
These are especially suited to model a dual-coil EDFA with a VOA in between.
In these amplifiers, the minimal NF is achieved when the EDFA operates at its maximal (and usually optimal, in terms of flatness) gain.
The worst (maximal) NF applies when the EDFA operates at its minimal gain.
This model is suitable for use when the vendor has not provided a more accurate performance description of the EDFA.
Raman Approximation
^^^^^^^^^^^^^^^^^^^
While GNPy is fully Raman-aware, under certain scenarios it is useful to be able to run a simulation without an accurate Raman description.
For these purposes the :ref:`polynomial NF<ext-nf-model-polynomial-NF>` model with :math:`\text{a} = \text{b} = \text{c} = 0`, and :math:`\text{d} = NF` can be used.
.. _concepts-simulation:
Simulation
==========
When the network model has been instantiated and the physical properties and operational settings of the actual physical devices are known, GNPy can start simulating how the signal propagate through the optical fiber.
This set of input parameters include options such as the *spectrum allocation*, i.e., the number of channels and their spacing.
Various strategies for network optimization can be provided as well.

View File

@@ -19,8 +19,6 @@
#
import os
import sys
sys.path.insert(0, os.path.abspath('../'))
# -- General configuration ------------------------------------------------
@@ -33,18 +31,10 @@ sys.path.insert(0, os.path.abspath('../'))
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.githubpages',
'sphinxcontrib.bibtex',
'sphinx.ext.graphviz',
'myst_parser',
'sphinx_rtd_theme',
]
myst_enable_extensions = [
"deflist",
"dollarmath",
]
'sphinx.ext.mathjax',
'sphinx.ext.githubpages',
'sphinxcontrib.bibtex',
'pbr.sphinxext',]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -60,15 +50,15 @@ master_doc = 'index'
# General information about the project.
project = 'gnpy'
copyright = '2018 - 2021, Telecom Infra Project - OOPT PSE Group'
author = 'Telecom Infra Project - OOPT PSE Group'
copyright = '2018, Telecom InfraProject - OOPT PSE Group'
author = 'Telecom InfraProject - OOPT PSE Group'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
@@ -87,22 +77,18 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
html_theme_options = {
'logo': 'images/GNPy-logo.png',
'logo_name': False,
'prev_next_buttons_location': 'bottom',
# Toc options
'collapse_navigation': True,
'sticky_navigation': True,
'navigation_depth': 4,
'includehidden': True,
'titles_only': False
}
html_theme_options = {
'navigation_depth': 4,
}
html_favicon = 'images/GNPy-logo.png'
on_rtd = os.environ.get('READTHEDOCS') == 'True'
if on_rtd:
html_theme = 'default'
html_theme_options = {
'logo_only': True,
}
else:
html_theme = 'alabaster'
html_theme_options = {
'logo': 'images/GNPy-logo.png',
'logo_name': False,
}
html_logo = 'images/GNPy-logo.png'
@@ -115,10 +101,7 @@ html_logo = 'images/GNPy-logo.png'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_css_files = [
'custom.css', # Inclure votre fichier CSS personnalisé
]
html_static_path = []
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
@@ -135,7 +118,6 @@ html_sidebars = {
]
}
html_secnum_depth = 4
# -- Options for HTMLHelp output ------------------------------------------
@@ -168,7 +150,7 @@ latex_elements = {
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'gnpy.tex', 'gnpy Documentation',
'Telecom Infra Project - OOPT PSE Group', 'manual'),
'Telecom InfraProject - OOPT PSE Group', 'manual'),
]
@@ -199,7 +181,3 @@ autodoc_default_options = {
'private-members': True,
'show-inheritance': True,
}
graphviz_output_format = 'svg'
bibtex_bibfiles = ['biblio.bib']

View File

@@ -1,8 +1,5 @@
.. _excel:
*****************************
Excel (XLS, XLSX) input files
*****************************
=============================
``gnpy-transmission-example`` gives the possibility to use an excel input file instead of a json file. The program then will generate the corresponding json file for you.
@@ -10,22 +7,21 @@ The file named 'meshTopologyExampleV2.xls' is an example.
In order to work the excel file MUST contain at least 2 sheets:
- `Nodes`
- `Links`
- Nodes
- Links
(In progress) The File MAY contain additional sheets:
(In progress) The File MAY contain an additional sheet:
- `Eqpt`
- `Service`
- `Roadms`
- Eqt
- Service
.. _excel-nodes-sheet:
`Nodes` sheet
=============
Nodes sheet
-----------
`Nodes` sheet contains nine columns.
Each line represents a 'node' (`ROADM` site or an in line amplifier site `ILA` or a `Fused`)::
Nodes sheet contains nine columns.
Each line represents a 'node' (ROADM site or an in line amplifier site ILA or a Fused)::
City (Mandatory) ; State ; Country ; Region ; Latitude ; Longitude ; Type
@@ -52,15 +48,15 @@ Each line represents a 'node' (`ROADM` site or an in line amplifier site `ILA` o
.. _excel-links-sheet:
Links sheet
===========
-----------
Links sheet must contain sixteen columns::
<-- east cable from a to z --> <-- west from z to a -->
<-- east cable from a to z --> <-- west from z to -->
NodeA ; NodeZ ; Distance km ; Fiber type ; Lineic att ; Con_in ; Con_out ; PMD ; Cable Id ; Distance km ; Fiber type ; Lineic att ; Con_in ; Con_out ; PMD ; Cable Id
`Links` sheet MUST contain all links between nodes defined in Nodes sheet.
Links sheets MUST contain all links between nodes defined in Nodes sheet.
Each line represents a 'bidir link' between two nodes. The two directions are represented on a single line with "east cable from a to z" fields and "west from z to a" fields. Values for 'a to z' may be different from values from 'z to a'.
Since both direction of a bidir 'a-z' link are described on the same line (east and west), 'z to a' direction MUST NOT be repeated in a different line. If repeated, it will generate another parrallel bidir link between the same end nodes.
@@ -87,54 +83,53 @@ and a fiber span from node3 to node6::
- If filled it MUST contain numbers. If empty it is replaced by a default "80" km value.
- If value is below 150 km, it is considered as a single (bidirectional) fiber span.
- If value is over 150 km or if the loss is greater than 28 dB, the autodesign program
will automatically split the span with "_1","_2", ... trailing strings in names.
Splitting threshold can be tuned in ["Span"]["max_length"] and ["Span"]["max_loss"] in
equipment library.
- If value is over 150 km the `gnpy-transmission-example`` program will automatically suppose that intermediate span description are required and will generate fiber spans elements with "_1","_2", ... trailing strings which are not visible in the json output. The reason for the splitting is that current edfa usually do not support large span loss. The current assumption is that links larger than 150km will require intermediate amplification. This value will be revisited when Raman amplification is added”
- **Fiber type** is not mandatory.
If filled it must contain types listed in `eqpt_config.json <gnpy/example-data/eqpt_config.json>`_ in "Fiber" list "type_variety".
If not filled it takes "SSMF" as default value.
- **Lineic att** is not mandatory.
- **Lineic att** is not mandatory.
It is the lineic attenuation expressed in dB/km.
If filled it must contain positive numbers.
If not filled it takes "0.2" dB/km value
- **Con_in**, **Con_out** are not mandatory.
- *Con_in*, *Con_out* are not mandatory.
They are the connector loss in dB at ingress and egress of the fiber spans.
If filled they must contain positive numbers.
If not filled they take "0.5" dB default value.
- **PMD** is not mandatory.
- *PMD* is not mandatory and and is not used yet.
It is the PMD value of the link in ps.
If filled they must contain positive numbers.
If not filled, it takes "0.1" ps value.
- **Cable Id** is not mandatory.
- *Cable Id* is not mandatory.
If filled they must contain strings with the same constraint as "City" names. Its value is used to differenate links having the same end points. In this case different Id should be used. Cable Ids are not meant to be unique in general.
(in progress)
.. _excel-equipment-sheet:
Eqpt sheet
==========
----------
The equipment sheet (named "Eqpt") is optional.
If provided, it specifies types of boosters and preamplifiers for all ROADM degrees of all ROADM nodes, and for all ILA nodes.
This sheet contains twelve columns::
Eqt sheet is optional. It lists the amplifiers types and characteristics on each degree of the *Node A* line.
Eqpt sheet must contain twelve columns::
<-- east cable from a to z --> <-- west from z to a -->
Node A ; Node Z ; amp type ; att_in ; amp gain ; tilt ; att_out ; delta_p ; amp type ; att_in ; amp gain ; tilt ; att_out ; delta_p
If the sheet is present, it MUST have as many lines as there are egress directions of ROADMs defined in Links Sheet, and all ILAs.
If the sheet is present, it MUST have as many lines as egress directions of ROADMs defined in Links Sheet.
For example, consider the following list of links (A, B and C being a ROADM and amp# ILAs):
For example, consider the following list of links (A,B and C being a ROADM and amp# ILAs)
::
@@ -146,8 +141,8 @@ For example, consider the following list of links (A, B and C being a ROADM and
then Eqpt sheet should contain:
- one line for each ILAs: amp1, amp2, amp3
- one line for each one-degree ROADM (B and C in this example)
- two lines for each two-degree ROADM (just the ROADM A)
- one line for each degree 1 ROADMs B and C
- two lines for ROADM A which is a degree 2 ROADM
::
@@ -177,7 +172,7 @@ This generates a text file meshTopologyExampleV2_eqt_sheet.txt whose content ca
- **Node Z** is mandatory. It is the egress direction from the *Node A* site. Multiple Links between the same Node A and NodeZ is not supported.
- **amp type** is not mandatory.
If filled it must contain types listed in the equipment librairie like in the example `eqpt_config.json <gnpy/example-data/eqpt_config.json>`_ in "Edfa" list "type_variety".
If filled it must contain types listed in `eqpt_config.json <gnpy/example-data/eqpt_config.json>`_ in "Edfa" list "type_variety".
If not filled it takes "std_medium_gain" as default value.
If filled with fused, a fused element with 0.0 dB loss will be placed instead of an amplifier. This might be used to avoid booster amplifier on a ROADM direction.
@@ -185,59 +180,23 @@ This generates a text file meshTopologyExampleV2_eqt_sheet.txt whose content ca
If not filled, it will be determined with design rules in the convert.py file.
If filled, it must contain positive numbers.
- **att_in** and **att_out** are not mandatory. They are the value of the attenuator at input and output of amplifier (in dB).
- *att_in* and *att_out* are not mandatory and are not used yet. They are the value of the attenuator at input and output of amplifier (in dB).
If filled they must contain positive numbers.
- **tilt**, in dB, is not mandatory. It is the target gain tilt over the full amplfifier bandwidth and is defined with regard to wavelength, i.e. negative tilt means lower gain
for higher wavelengths (lower frequencies). If not filled, the default value is 0.
- *tilt* --TODO--
- **delta_p**, in dB, is not mandatory. If filled it is used to set the output target power per channel at the output of the amplifier, if power_mode is True. The output power is then set to power_dbm + delta_power.
- **delta_p**, in dBm, is not mandatory. If filled it is used to set the output target power per channel at the output of the amplifier, if power_mode is True. The output power is then set to power_dbm + delta_power.
.. _excel-roadms-sheet:
Roadms sheet
============
The ROADM sheet (named "Roadms") is optional.
If provided, it can be used to specify:
- per channel power target on a specific ROADM degree (*per_degree_pch_out_db*),
- ROADM type variety,
- impairment ID (identifier) on a particular ROADM path (from degree - to degree).
This sheet contains six columns:
Node A ; Node Z ; per degree target power (dBm) ; type_variety ; from degrees ; from degree to degree impairment id
- **Node A** is mandatory. Name of the ROADM node (as listed in Nodes sheet).
Must be a 'ROADM' (Type attribute in Node sheet), its number of occurence may be equal to its degree.
- **Node Z** is mandatory. Egress direction from the *Node A* ROADM site. Multiple Links between the same Node A
and NodeZ is not supported.
- **per degree target power (dBm)** (optional).
If filled it must contain a value in dBm corresponding to :ref:`per_degree_pch_out_db<roadm_json_instance>` on the **Node Z** degree.
Defaults to equipment library value if not filled.
- **type_variety** (optional). Must be the same for all ROADM entries if filled,
and defined in the :ref:`equipment library<roadm>`. Defaults to 'default' if not filled.
- **from degrees** (optional): List of Node names separated by ' | '. Names must be present in Node sheet.
Together with Node Z, they define a list of internal path in ROADM for which the impairment ID applies
- **from degree to degree impairment id** (optional):List of impairment IDs separated by ' | '. Must be filled
if **from degrees** is defined.
The impairment ID must be defined in the equipment library and be of "express" type.
# to be completed #
(in progress)
.. _excel-service-sheet:
Service sheet
=============
-------------
Service sheet is optional. It lists the services for which path and feasibility must be computed with ``gnpy-path-request``.
Service sheet is optional. It lists the services for which path and feasibility must be computed with ``gnpy-path_request``.
Service sheet must contain 11 columns::
@@ -249,7 +208,7 @@ Service sheet must contain 11 columns::
- **Destination** is mandatory. It is the name of the destination node (as listed in Nodes sheet). Source MUST be a ROADM node. (TODO: relax this and accept trx entries)
- **TRX type** is mandatory. It is the variety type of the transceiver to be used for the propagation simulation. These modes MUST be defined in the equipment library. The format of the mode is used as the name of the mode. (TODO: maybe add another mode id on Transceiver library ?). In particular the mode selection defines the channel baudrate to be used for the propagation simulation.
- **TRX type** is mandatory. They are the variety type and selected mode of the transceiver to be used for the propagation simulation. These modes MUST be defined in the equipment library. The format of the mode is used as the name of the mode. (TODO: maybe add another mode id on Transceiver library ?). In particular the mode selection defines the channel baudrate to be used for the propagation simulation.
- **mode** is optional. If not specified, the program will search for the mode of the defined transponder with the highest baudrate fitting within the spacing value.

View File

@@ -1,173 +0,0 @@
.. _extending:
****************************************
Extending GNPy with vendor-specific data
****************************************
GNPy ships with an :ref:`equipment library<concepts-equipment>` containing machine-readable datasheets of networking equipment.
Vendors who are willing to contribute descriptions of their supported products are encouraged to `submit a patch <https://review.gerrithub.io/Documentation/intro-gerrit-walkthrough-github.html>`__ -- or just :ref:`get in touch with us directly<contributing>`.
This chapter discusses option for modeling performance of :ref:`EDFA amplifiers<extending-edfa>`, :ref:`Raman amplifiers<extending-raman>`, :ref:`transponders<extending-transponder>` and :ref:`ROADMs<extending-roadm>`.
.. _extending-edfa:
EDFAs
=====
An accurate description of the :abbr:`EDFA (Erbium-Doped Fiber Amplifier)` and especially its noise characteristics is required.
GNPy describes this property in terms of the **Noise Figure (NF)** of an amplifier model as a function of its operating point.
GNPy supports several different :ref:`noise models<concepts-nf-model>`, and vendors are encouraged to pick one which describes performance of their equipment most accurately.
.. _ext-nf-model-polynomial-NF:
Polynomial NF
-------------
This model computes the NF as a function of the difference between the optimal gain and the current gain.
The NF is expressed as a third-degree polynomial:
.. math::
f(x) &= \text{a}x^3 + \text{b}x^2 + \text{c}x + \text{d}
\text{NF} &= f(G - G_\text{max})
This model can be also used for fixed-gain fixed-NF amplifiers.
In that case, use:
.. math::
a = b = c &= 0
d &= \text{NF}
.. _ext-nf-model-polynomial-OSNR-OpenROADM:
Polynomial OSNR (OpenROADM-style for inline amplifier)
------------------------------------------------------
This model is useful for amplifiers compliant to the OpenROADM specification for ILA (an in-line amplifier).
The amplifier performance is evaluated via its incremental OSNR, which is a function of the input power.
.. math::
\text{OSNR}_\text{inc}(P_\text{in}) = \text{a}P_\text{in}^3 + \text{b}P_\text{in}^2 + \text{c}P_\text{in} + \text{d}
.. _ext-nf-model-noise-mask-OpenROADM:
Noise mask (OpenROADM-style for combined preamp and booster)
------------------------------------------------------------
Unlike GNPy which simluates the preamplifier and the booster separately as two amplifiers for best accuracy, the OpenROADM specification mandates a certain performance level for a combination of these two amplifiers.
For the express path, the effective noise mask comprises the preamplifier and the booster.
When terminating a channel, the same effective noise mask is mandated for a combination of the preamplifier and the drop stage.
GNPy emulates this specification via two special NF models:
- The ``openroadm_preamp`` NF model for preamplifiers.
This NF model provides all of the linear impairments to the signal, including those which are incured by the booster in a real network.
- The ``openroadm_booster`` NF model is a special "zero noise" faux amplifier in place of the booster.
.. _ext-nf-model-min-max-NF:
Min-max NF
----------
When the vendor prefers not to share the amplifier description in full detail, GNPy also supports describing the NF characteristics via the *minimal* and *maximal NF*.
This approximates a more accurate polynomial description reasonably well for some models of a dual-coil EDFA with a VOA in between.
In these amplifiers, the minimal NF is achieved when the EDFA operates at its maximal (and usually optimal, in terms of flatness) gain.
The worst (maximal) NF applies when the EDFA operates at the minimal gain.
.. _ext-nf-model-dual-stage-amplifier:
Dual-stage
----------
Dual-stage amplifier combines two distinct amplifiers.
Vendors which provide an accurate description of their preamp and booster stages separately can use the dual-stage model for an aggregate description of the whole amplifier.
.. _ext-nf-model-advanced:
Advanced Specification
----------------------
The amplifier performance can be further described in terms of gain ripple, NF ripple, and the dynamic gain tilt.
When provided, the amplifier characteristic is fine-tuned as a function of carrier frequency. Note that in this advanced
specification tilt is defined vs frequency while tilt_target specified in EDFA instances is defined vs wavelength.
.. _extending-raman:
Raman Amplifiers
================
An accurate simulation of Raman amplification requires knowledge of:
* the *power* and *wavelength* of all Raman pumping lasers,
* the *direction*, whether it is co-propagating or counter-propagating,
* the Raman efficiency of the fiber,
* the fiber temperature.
Under certain scenarios it is useful to be able to run a simulation without an accurate Raman description.
For these purposes, it is possible to approximate a Raman amplifier via a fixed-gain EDFA with the :ref:`polynomial NF<ext-nf-model-polynomial-NF>` model using :math:`\text{a} = \text{b} = \text{c} = 0`, and a desired effective :math:`\text{d} = NF`.
This is also useful to quickly approximate a hybrid EDFA+Raman amplifier.
.. _extending-transponder:
Transponders
============
Since transponders are usually capable of operating in a variety of modes, these are described separately.
A *mode* usually refers to a particular performance point that is defined by a combination of the symbol rate, modulation format, and :abbr:`FEC (Forward Error Correction)`.
The following data are required for each mode:
``bit_rate``
Data bit rate, in :math:`\text{bits}\times s^{-1}`.
``baud_rate``
Symbol modulation rate, in :math:`\text{baud}`.
``OSNR``
Minimal required OSNR for the receiver. In :math:`\text{dB}`
``tx-osnr``
Initial OSNR at the transmitter's output. In :math:`\text{dB}`
``min-spacing``
Minimal grid spacing, i.e., an effective channel spectral bandwidth.
In :math:`\text{Hz}`.
``roll-off``
Roll-off parameter (:math:`\beta`) of the TX pulse shaping filter.
This assumes a raised-cosine filter.
``rx-power-min`` and ``rx-power-max``
(work in progress) The allowed range of power at the receiver.
In :math:`\text{dBm}`.
``penalties``
Impairments such as Chromatic Dispersion (CD), Polarization Mode Dispersion (PMD), and Polarization Dispersion Loss (PDL)
result in penalties at the receiver. The receiver's ability to handle these impairments can be defined for each mode as
a list of {impairment: in defined units, 'penalty_value' in dB} (see `transceiver section here <json.rst#_transceiver>`).
Maximum allowed CD, maximum allowed PMD, and maximum allowed PDL should be listed there with corresponding penalties.
Impairments experienced during propagation are linearly interpolated between given points to obtain the corresponding penalty.
The accumulated penalties are subtracted from the path GSNR before comparing with the minimum required OSNR.
Impairments: PMD in :math:`\text{ps}`, CD in :math:`\text{ps/nm}`, PDL in :math:`\text{dB}`, penalty_value in :math:`\text{dB}`
GNPy does not directly track the FEC performance, so the type of chosen FEC is likely indicated in the *name* of the selected transponder mode alone.
.. _extending-roadm:
ROADMs
======
In a :abbr:`ROADM (Reconfigurable Add/Drop Multiplexer)`, GNPy simulates the impairments of the preamplifiers and boosters of line degrees :ref:`separately<topo-roadm-preamp-booster>`.
The set of parameters for each ROADM model therefore includes:
``add-drop-osnr``
OSNR penalty introduced by the Add and Drop stages of this ROADM type.
``target-channel-out-power``
Per-channel target TX power towards the egress amplifier.
Within GNPy, a ROADM is expected to attenuate any signal that enters the ROADM node to this level.
This can be overridden on a per-link in the network topology.
Targets can be set using power or power spectral density (see `roadm section here <json.rst#__roadm>`)
``pmd``
Polarization mode dispersion (PMD) penalty of the express path.
In :math:`\text{ps}`.
Provisions are in place to define the list of all allowed booster and preamplifier types.
This is useful for specifying constraints on what amplifier modules fit into ROADM chassis, and when using fully disaggregated ROADM topologies as well.

View File

@@ -7,4 +7,3 @@
.. automodule:: gnpy.tools.json_io
.. automodule:: gnpy.tools.plots
.. automodule:: gnpy.tools.service_sheet
.. automodule:: gnpy.tools.worker_utils

View File

@@ -2,8 +2,8 @@
API Reference Documentation
***************************
GNPy package
============
``gnpy`` package
================
.. automodule:: gnpy

Binary file not shown.

Before

Width:  |  Height:  |  Size: 288 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 478 KiB

View File

@@ -1,6 +1,5 @@
************************************
GNPy: Optical Route Planning Library
************************************
=====================================================================
`GNPy <http://github.com/telecominfraproject/gnpy>`_ is an open-source,
community-developed library for building route planning and optimization tools
@@ -8,27 +7,15 @@ in real-world mesh optical networks. It is based on the Gaussian Noise Model.
.. toctree::
:maxdepth: 4
:caption: Contents
intro
concepts
install
cli_options
amplifier_models_description
json
json_instance_examples
excel
extending
about-project
model
gnpy-api
release-notes
publications
genindex
modindex
Indices and tables
------------------
==================
* :ref:`genindex`
* :ref:`modindex`

View File

@@ -1,6 +1,5 @@
***************
Installing GNPy
***************
---------------
There are several methods on how to obtain GNPy.
The easiest option for a non-developer is probably going via our :ref:`Docker images<install-docker>`.
@@ -10,7 +9,7 @@ Note that this needs a :ref:`working installation of Python<install-python>`, fo
.. _install-docker:
Using prebuilt Docker images
============================
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Our `Docker images <https://hub.docker.com/r/telecominfraproject/oopt-gnpy>`_ contain everything needed to run all examples from this guide.
Docker transparently fetches the image over the network upon first use.
@@ -36,10 +35,10 @@ Remove that directory if you want to start from scratch.
.. _install-python:
Using Python on your computer
=============================
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
**Note**: `gnpy` supports Python 3 only. Python 2 is not supported.
`gnpy` requires Python ≥3.8
`gnpy` requires Python ≥3.6
**Note**: the `gnpy` maintainers strongly recommend the use of Anaconda for
managing dependencies.
@@ -85,12 +84,12 @@ exact version of Python you are using.
$ which python # check which Python executable is used
/path/to/anaconda/bin/python
$ python -V # check your Python version
Python 3.8.0 :: Anaconda, Inc.
Python 3.6.5 :: Anaconda, Inc.
.. _install-pip:
Installing the Python package
-----------------------------
*****************************
From within your Anaconda Python 3 environment, you can clone the master branch
of the `gnpy` repo and install it with:
@@ -99,7 +98,7 @@ of the `gnpy` repo and install it with:
$ git clone https://github.com/Telecominfraproject/oopt-gnpy # clone the repo
$ cd oopt-gnpy
$ pip install --editable . # note the trailing dot
$ python setup.py develop
To test that `gnpy` was successfully installed, you can run this command. If it
executes without a ``ModuleNotFoundError``, you have successfully installed

View File

@@ -1,97 +0,0 @@
.. _intro:
************
Introduction
************
``gnpy`` is a library for building route planning and optimization tools.
It ships with a number of example programs. Release versions will ship with
fully-functional programs.
**Note**: *If you are a network operator or involved in route planning and
optimization for your organization, please contact project maintainers
esther Le Rouzic <esther.lerouzic@orange.com>, Andrea D'Amico <adamico@nec-labs.com>.
gnpy is looking for users with
specific, delineated use cases to drive requirements for future
development.*
This example demonstrates how GNPy can be used to check the expected SNR at the end of the line by varying the channel input power,
or to run a planning script to check SNR of several services:
.. image:: images/gnpy-transmission-example.svg
:width: 100%
:align: left
:alt: Running a simple simulation example
By default, the gnpy-transmission-example script operates on a single span network defined in
`gnpy/example-data/edfa_example_network.json <https://github.com/Telecominfraproject/oopt-gnpy/blob/master/gnpy/example-data/edfa_example_network.json>`_
You can specify a different network at the command line as follows. For
example, to use the CORONET Global network defined in
`gnpy/example-data/CORONET_Global_Topology.json <https://github.com/Telecominfraproject/oopt-gnpy/blob/master/gnpy/example-data/CORONET_Global_Topology.json>`_:
.. code-block:: shell-session
$ gnpy-transmission-example $(gnpy-example-data)/CORONET_Global_Topology.json
It is also possible to use an Excel file input (for example
`gnpy/example-data/CORONET_Global_Topology.xls <https://github.com/Telecominfraproject/oopt-gnpy/blob/master/gnpy/example-data/CORONET_Global_Topology.xls>`_).
The Excel file will be processed into a JSON file with the same prefix.
Further details about the Excel data structure are available `in the documentation <excel.rst>`__.
The main transmission example will calculate the average signal OSNR and SNR
across network elements (transceiver, ROADMs, fibers, and amplifiers)
between two transceivers selected by the user. Additional details are provided by doing ``gnpy-transmission-example -h``. (By default, for the CORONET Global
network, it will show the transmission of spectral information between Abilene and Albany)
This script calculates the average signal OSNR = |OSNR| and SNR = |SNR|.
.. |OSNR| replace:: P\ :sub:`ch`\ /P\ :sub:`ase`
.. |SNR| replace:: P\ :sub:`ch`\ /(P\ :sub:`nli`\ +\ P\ :sub:`ase`)
|Pase| is the amplified spontaneous emission noise, and |Pnli| the non-linear
interference noise.
.. |Pase| replace:: P\ :sub:`ase`
.. |Pnli| replace:: P\ :sub:`nli`
Further Instructions for Use
============================
Simulations are driven by a set of `JSON <json.rst>`__ or `XLS <excel.rst>`__ files.
The ``gnpy-transmission-example`` script propagates a spectrum of channels at 32 Gbaud, 50 GHz spacing and 0 dBm/channel.
Launch power in fiber spans can be overridden by using the ``--power`` argument.
Spectrum information is not yet parametrized but can be modified directly in the ``eqpt_config.json`` (via the ``SpectralInformation`` -SI- structure) to accommodate any baud rate or spacing.
The number of channel is computed based on ``spacing`` and ``f_min``, ``f_max`` values.
An experimental support for Raman amplification is available:
.. code-block:: shell-session
$ gnpy-transmission-example \
$(gnpy-example-data)/raman_edfa_example_network.json \
--sim $(gnpy-example-data)/sim_params.json --show-channels
Configuration of Raman pumps (their frequencies, power and pumping direction) is done via the `RamanFiber element in the network topology <https://github.com/Telecominfraproject/oopt-gnpy/blob/master/gnpy/example-data/raman_edfa_example_network.json>`_.
General numeric parameters for simulation control are provided in the `gnpy/example-data/sim_params.json <https://github.com/Telecominfraproject/oopt-gnpy/blob/master/gnpy/example-data/sim_params.json>`_.
Use ``gnpy-path-request`` to request several paths at once:
.. code-block:: shell-session
$ cd $(gnpy-example-data)
$ gnpy-path-request -o output_file.json \
meshTopologyExampleV2.xls meshTopologyExampleV2_services.json
This program operates on a network topology (`JSON <json.rst>`__ or `Excel <excel.rst>`__ format), processing the list of service requests (JSON or XLS again).
The service requests and reply formats are based on the `draft-ietf-teas-yang-path-computation-01 <https://tools.ietf.org/html/draft-ietf-teas-yang-path-computation-01>`__ with custom extensions (e.g., for transponder modes).
An example of the JSON input is provided in file `service-template.json`, while results are shown in `path_result_template.json`.
Important note: ``gnpy-path-request`` is not a network dimensionning tool: each service does not reserve spectrum, or occupy ressources such as transponders. It only computes path feasibility assuming the spectrum (between defined frequencies) is loaded with "nb of channels" spaced by "spacing" values as specified in the system parameters input in the service file, each cannel having the same characteristics in terms of baudrate, format,... as the service transponder. The transceiver element acts as a "logical starting/stopping point" for the spectral information propagation. At that point it is not meant to represent the capacity of add drop ports.
As a result transponder type is not part of the network info. it is related to the list of services requests.
The current version includes a spectrum assigment features that enables to compute a candidate spectrum assignment for each service based on a first fit policy. Spectrum is assigned based on service specified spacing value, path_bandwidth value and selected mode for the transceiver. This spectrum assignment includes a basic capacity planning capability so that the spectrum resource is limited by the frequency min and max values defined for the links. If the requested services reach the link spectrum capacity, additional services feasibility are computed but marked as blocked due to spectrum reason.
OpenROADM networks can be simulated via ``gnpy/example-data/eqpt_config_openroadm_*.json`` -- see ``gnpy/example-data/Sweden_OpenROADM*_example_network.json`` as an example.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,8 @@
.. _physical-model:
***************************
Physical Model used in GNPy
***************************
===========================
QoT-E including ASE noise and NLI accumulation
==============================================
----------------------------------------------
The operations of PSE simulative framework are based on the capability to
estimate the QoT of one or more channels operating lightpaths over a given
@@ -84,7 +81,7 @@ ps/nm/km, the analytical approximation ensures an excellent accuracy
with a computational time compatible with real-time operations.
The Gaussian Noise Model to evaluate the NLI
============================================
--------------------------------------------
As previously stated, fiber propagation of multilevel modulation formats
relying on the polarization-division-multiplexing generates impairments that
@@ -127,9 +124,9 @@ that can be easily evaluated extending the FWM theory from a set of discrete
tones - the standard FWM theory introduced back in the 90s by Inoue
:cite:`Innoue-FWM`- to a continuity of tones, possibly spectrally shaped.
Signals propagating in the fiber are not equivalent to Gaussian noise, but
thanks to the absence of in-line compensation for chromatic dispersion, the
thanks to the absence of in-line compensation for choromatic dispersion, the
become so, over short distances. So, the Gaussian noise model with incoherent
accumulation of NLI has extensively proved to be a quick yet accurate and
accumulation of NLI has estensively proved to be a quick yet accurate and
conservative tool to estimate propagation impairments of fiber propagation.
Note that the GN-model has not been derived with the aim of an *exact*
performance estimation, but to pursue a conservative performance prediction.
@@ -146,4 +143,4 @@ Raman Scattering in order to give a proper estimation for all channels
:cite:`cantono2018modeling`. This will be the main upgrade required within the
PSE framework.
.. bibliography::
.. bibliography:: biblio.bib

View File

@@ -1,25 +0,0 @@
.. _publications:
************
Publications
************
Below is a chronological list of notable publications that emerged from the PSE group's collaborative work.
These articles detail the evolution of GNPy and confirm its performance through experimental trials:
- `G. Grammel, V. Curri, and J. Auge, "Physical Simulation Environment of The Telecommunications Infrastructure Project (TIP)," in Optical Fiber Communication Conference, OSA Technical Digest (online) (Optica Publishing Group, 2018), paper M1D.3. <https://opg.optica.org/abstract.cfm?uri=OFC-2018-M1D.3>`_
- `B. D. Taylor, G. Goldfarb, S. Bandyopadhyay, V. Curri, and H. Schmidtke, "Towards a Route Planning Tool for Open Optical Networks in the Telecom Infrastructure Project," in Optical Fiber Communication Conference, OSA Technical Digest (online) (Optica Publishing Group, 2018), paper Tu3E.4. <https://opg.optica.org/abstract.cfm?uri=OFC-2018-Tu3E.4>`_
- `M. Filer, M. Cantono, A. Ferrari, G. Grammel, G. Galimberti, and V. Curri, "Multi-Vendor Experimental Validation of an Open Source QoT Estimator for Optical Networks," J. Lightwave Technol. 36, 3073-3082 (2018). <https://opg.optica.org/jlt/abstract.cfm?uri=jlt-36-15-3073>`_
- `J. Auge, G. Grammel, E. le Rouzic, V. Curri, G. Galimberti, and J. Powell, "Open optical network planning demonstration," in Optical Fiber Communication Conference (OFC) 2019, OSA Technical Digest (Optica Publishing Group, 2019), paper M3Z.9. <https://opg.optica.org/abstract.cfm?uri=OFC-2019-M3Z.9>`_
- `J. Kundrát, A. Campanella, E. Le Rouzic, A. Ferrari, O. Havliš, M. Hažlinský, G. Grammel, G. Galimberti, and V. Curri, "Physical-Layer Awareness: GNPy and ONOS for End-to-End Circuits in Disaggregated Networks," in Optical Fiber Communication Conference (OFC) 2020, OSA Technical Digest (Optica Publishing Group, 2020), paper M3Z.17. <https://opg.optica.org/abstract.cfm?uri=ofc-2020-m3z.17>`_
- `A. Ferrari, M. Filer, K. Balasubramanian, Y. Yin, E. Le Rouzic, J. Kundrát, G. Grammel, G. Galimberti, and V. Curri, "Experimental Validation of an Open Source Quality of Transmission Estimator for Open Optical Networks," in Optical Fiber Communication Conference (OFC) 2020, OSA Technical Digest (Optica Publishing Group, 2020), paper W3C.2. <https://opg.optica.org/abstract.cfm?uri=ofc-2020-W3C.2>`_
- `A. Ferrari, M. Filer, K. Balasubramanian, Y. Yin, E. Le Rouzic, J. Kundrát, G. Grammel, G. Galimberti, and V. Curri, "GNPy: an open source application for physical layer aware open optical networks," J. Opt. Commun. Netw. 12, C31-C40 (2020). <https://opg.optica.org/jocn/fulltext.cfm?uri=jocn-12-6-C31&id=429003>`_
- `A. Ferrari, K. Balasubramanian, M. Filer, Y. Yin, E. Le Rouzic, J. Kundrát, G. Grammel, G. Galimberti, and V. Curri, "Softwarized Optical Transport QoT in Production Optical Network: a Brownfield Validation," 2020 European Conference on Optical Communications (ECOC), Brussels, Belgium, 2020. <https://ieeexplore.ieee.org/document/9333280>`_
- `A. Ferrari, K. Balasubramanian, M. Filer, Y. Yin, E. Le Rouzic, J. Kundrát, G. Grammel, G. Galimberti, and V. Curri, "Assessment on the in-field lightpath QoT computation including connector loss uncertainties," in Journal of Optical Communications and Networking, vol. 13, no. 2, pp. A156-A164, February 2021. <https://ieeexplore.ieee.org/document/9308057>`_
- `J. Kundrát, E. Le Rouzic, J. Mårtensson, A. Campanella, O. Havliš, A. DAmico, G. Grammel, G. Galimberti, V. Curri, and J. Vojtěch, "GNPy & YANG: Open APIs for End-to-End Service Provisioning in Optical Networks," in Optical Fiber Communication Conference (OFC) 2021, P. Dong, J. Kani, C. Xie, R. Casellas, C. Cole, and M. Li, eds., OSA Technical Digest (Optica Publishing Group, 2021), paper M1B.6. <https://opg.optica.org/abstract.cfm?uri=ofc-2021-M1B.6>`_
- `A. DAmico, E. London, B. Le Guyader, F. Frank, E. Le Rouzic, E. Pincemin, N. Brochier, and V. Curri, "GNPy experimental validation on flex-grid, flex-rate WDM optical transport scenarios," in Optical Fiber Communication Conference (OFC) 2021, P. Dong, J. Kani, C. Xie, R. Casellas, C. Cole, and M. Li, eds., OSA Technical Digest (Optica Publishing Group, 2021), paper W1G.2. <https://opg.optica.org/abstract.cfm?uri=ofc-2021-W1G.2>`_
- `E. Virgillito, R. Braun, D. Breuer, A. Gladisch, V. Curri, and G. Grammel, "Testing TIP Open Source Solutions in Deployed Optical Networks," in Optical Fiber Communication Conference (OFC) 2021, P. Dong, J. Kani, C. Xie, R. Casellas, C. Cole, and M. Li, eds., OSA Technical Digest (Optica Publishing Group, 2021), paper F1C.3. <https://opg.optica.org/abstract.cfm?uri=ofc-2021-F1C.3>`_
- `A. DAmico, E. London, B. Le Guyader, F. Frank, E. Le Rouzic, E. Pincemin, N. Brochier, and V. Curri, "Experimental validation of GNPy in a multi-vendor flex-grid flex-rate WDM optical transport scenario," J. Opt. Commun. Netw. 14, 79-88 (2022). <https://opg.optica.org/jocn/fulltext.cfm?uri=jocn-14-3-79&id=466355>`_
- `J. Kundrát, E. Le Rouzic, J. Mårtensson, S. Melin, A. DAmico, G. Grammel, G. Galimberti, and V. Curri, "GNPy: Lessons Learned and Future Plans [Invited]," in European Conference on Optical Communication (ECOC) 2022, J. Leuthold, C. Harder, B. Offrein, and H. Limberger, eds., Technical Digest Series (Optica Publishing Group, 2022), paper We3B.6. <https://opg.optica.org/abstract.cfm?uri=ECEOC-2022-We3B.6>`_
- `G. Grammel, J. Kundrat, E. Le Rouzic, S. Melin, V. Curri, A. D'Amico, R. Manzotti, "Open Optical Networks: the good, the bad and the ugly," 49th European Conference on Optical Communications (ECOC 2023), Hybrid Conference, Glasgow, UK, 2023. <https://ieeexplore.ieee.org/document/10484723>`_
- `A. DAmico, V. Gatto, A. Nespola, G. Borraccini, Y. Jiang, P. Poggiolini, E. Le Rouzic, A. M. L. de Lerma, G. Grammel, R. Manzotti, V. Curri, "GNPy Experimental Validation in a C+L Multiband Optical Multiplex Section," 2024 24th International Conference on Transparent Optical Networks (ICTON), Bari, Italy, 2024. <https://ieeexplore.ieee.org/document/10648172>`_

View File

@@ -1,527 +0,0 @@
.. _release-notes:
******************
Release change log
******************
Each release introduces some changes and new features.
(prepare text for next release)
v2.13
=====
**Environment**
The windows-2019 environment is no more supported.
**Yang Conversion Utilities**
This release introduces new conversion utilities to facilitate conversion between YANG and legacy formats,
ensuring full compatibility with GNPy. The "legacy" format also benefit from the YANG validation for
a stricter verification of input files.
Console Script for Yang Conversion: Added a new command-line script to perform Yang format conversions easily.
**Design Enhancements**
This release adds the ability to parametrize power target calculations, allowing customization of reference
span loss and deviation ratios. It implements the use of a reference channel per OMS (Optical Multiplex Section)
instead of total power for design calculations, improving accuracy and performance.
It also includes spacing information in design band data to assist in maximum power computation for EDFA
targets compution during autodesign.
**Excel handling**
XLSX files are now read with openpyxl library (while XLS files are still read with xlrd library). Latest release of
xlrd is supported, which solves compatibility issues with anaconda install.
v2.12
=====
**Important Changes:**
The default values for EDFA configuration, including frequency range, gain ripple, noise figure ripple, or dynamic gain tilt
are now hardcoded in parameters.py and are no longer read from the default_edfa_config.json file (the file has been removed).
However, users can define their own custom parameters using the default_config_from_json variable, which should be populated with a file name containing the desired parameter description. This applies to both variable_gain and fixed_gain amplifier types.
This change streamlines the configuration process but requires users to explicitly set parameters through the new
model if the default values do not suit their needs via the --extra-config option.
v2.11.1
-------
**Environment**
The macOS-12 environment is no more supported.
**per degree impairment enabled in xls input**
This release now read per degre roadm-path impairment from roadm sheet
Several optional columns are added: 'type_variety' and 'from degrees'
and 'from degree to degree impairment id'.
- 'from degrees' can contain a list of degrees separated with ' | ', then the
'from degree to degree impairment id' must contain a list of ids of the same
length.
Impairment ids are expected to be defined in the ROADM equipment library and
from degree must be among the previous node from this ROADM.
**optimizing computation speed**
The computation of path is skipped if the provided include nodes provides
a complete explicit path (speeds simulation time).
v2.11
=====
**New feature**
A new type_def for amplifiers has been introduced: multi_band. This allows the definition of a
multiband amplifier site composed of several amplifiers per band (a typical application is C+L transmission). The
release also includes autodesign for links (Optical Multiplex Section, OMS) composed of multi_band amplifiers.
Multi_band autodesign includes basic tilt and tilt_target calculation when the Raman flag is enabled with the
--sim-params option. The spectrum is demultiplexed before propagation in the amplifier and multiplexed in the output
fiber at the amplifier output.
In the library:
.. code-block:: json
{
"type_variety": "std_medium_gain_C",
"f_min": 191.225e12,
"f_max": 196.125e12,
"type_def": "variable_gain",
"gain_flatmax": 26,
"gain_min": 15,
"p_max": 21,
"nf_min": 6,
"nf_max": 10,
"out_voa_auto": false,
"allowed_for_design": false
},
{
"type_variety": "std_medium_gain_L",
"f_min": 186.5e12,
"f_max": 190.1e12,
"type_def": "variable_gain",
"gain_flatmax": 26,
"gain_min": 15,
"p_max": 21,
"nf_min": 6,
"nf_max": 10,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_medium_gain_multiband",
"type_def": "multi_band",
"amplifiers": [
"std_medium_gain_C",
"std_medium_gain_L"
],
"allowed_for_design": false
},
In the network topology:
.. code-block:: json
{
"uid": "east edfa in Site_A to Site_B",
"type": "Multiband_amplifier",
"type_variety": "std_medium_gain_multiband",
"amplifiers": [{
"type_variety": "std_medium_gain_C",
"operational": {
"gain_target": 22.55,
"delta_p": 0.9,
"out_voa": 3.0,
"tilt_target": 0.0
}
}, {
"type_variety": "std_medium_gain_L",
"operational": {
"gain_target": 21,
"delta_p": 3.0,
"out_voa": 3.0,
"tilt_target": 0.0
}
}
]
}
**Network design**
Optionally, users can define a design target per OMS (single or multi-band), with specific frequency ranges.
Default design bands are defined in the SI.
.. code-block:: json
{
"uid": "roadm Site_A",
"type": "Roadm",
"params": {
"target_pch_out_db": -20,
"design_bands": [{"f_min": 191.3e12, "f_max": 195.1e12}]
}
}
It is possible to define a set of bands in the SI block instead of a single Spectrum Information.
In this case type_variety must be used.
Each set defines a reference channel used for design functions and autodesign.
The default design settings for the path-request-run script have been modified.
Now, design is performed once for the reference channel defined in the SI block of the eqpt_config,
and requests are propagated based on this design.
The --redesign-per-request option can be used to restore previous behaviour
(design using request channel types).
The autodesign function has been updated to insert multiband booster, preamp or inline amplifiers based on the OMS
nature. If nothing is stated (no amplifier defined in the OMS, no design_bands attribute in the ROADM), then
it uses single band Edfas.
**Propagation**
Only carriers within the amplifier bandwidth are propagated, improving system coherence. This more rigorous checking
of the spectrum to be propagated and the amplifier bandwidth may lead to changes in the total number of channels
compared to previous releases. The range can be adjusted by changing the values of ``f_min`` and ``f_max``
in the amplifier library.
``f_min`` and ``f_max`` represent the boundary frequencies of the amplification bandwidth (the entire channel must fit
within this range).
In the example below, a signal center frequency of 190.05THz with a 50GHz width cannot fit within the amplifier band.
Note that this has a different meaning in the SI or Transceiver blocks, where ``f_min`` and ``f_max`` refers to the
minimum / maximum values of the carrier center frequency.
.. code-block:: json
{
"type_variety": "std_booster_L",
"f_min": 186.55e12,
"f_max": 190.05e12,
"type_def": "fixed_gain",
"gain_flatmax": 21,
"gain_min": 20,
"p_max": 21,
"nf0": 5,
"allowed_for_design": false
}
**Display**
The CLI output for the transmission_main_example now displays the channels used for design and simulation,
as well as the tilt target of amplifiers.
.. code-block:: text
Reference used for design: (Input optical power reference in span = 0.00dBm,
spacing = 50.00GHz
nb_channels = 76)
Channels propagating: (Input optical power deviation in span = 0.00dB,
spacing = 50.00GHz,
transceiver output power = 0.00dBm,
nb_channels = 76)
The CLI output displays the settings of each amplifier:
.. code-block:: text
Multiband_amplifier east edfa in Site_A to Site_B
type_variety: std_medium_gain_multiband
type_variety: std_medium_gain_C type_variety: std_medium_gain_L
effective gain(dB): 20.90 effective gain(dB): 22.19
(before att_in and before output VOA) (before att_in and before output VOA)
tilt-target(dB) 0.00 tilt-target(dB) 0.00
noise figure (dB): 6.38 noise figure (dB): 6.19
(including att_in) (including att_in)
pad att_in (dB): 0.00 pad att_in (dB): 0.00
Power In (dBm): -1.08 Power In (dBm): -1.49
Power Out (dBm): 19.83 Power Out (dBm): 20.71
Delta_P (dB): 0.90 Delta_P (dB): 2.19
target pch (dBm): 0.90 target pch (dBm): 3.00
actual pch out (dBm): -2.09 actual pch out (dBm): -0.80
output VOA (dB): 3.00 output VOA (dB): 3.00
**New feature**
The preturbative Raman and the approximated GGN models are introduced for a faster evaluation of the Raman and
Kerr effects, respectively.
These implementation are intended to reduce the computational effort required by multiband transmission scenarios.
Both the novel models have been validated with exstensive simulations
(see `arXiv:2304.11756 <https://arxiv.org/abs/2304.11756>`_ for the new Raman model and
`jlt:9741324 <https://eeexplore.ieee.org/document/9741324>`_ for the new NLI model).
Additionally, they have been experimentally validated in a laboratory setup composed of commertial equipment
(see `icton:10648172 <https://eeexplore.ieee.org/document/10648172>`_).
v2.10
=====
ROADM impairments can be defined per degree and roadm-path type (add, drop or express).
Minimum loss when crossing a ROADM is no more 0 dB. It can be set per ROADM degree with roadm-path-impairments.
The transceiver output power, which was previously set using the same parameter as the input span power (power_dbm),
can now be set using a different parameter. It can be set as:
- for all channels, with tx_power_dbm using SI similarly to tx_osnr (gnpy-transmission-example script)
.. code-block:: json
"SI": [{
"f_min": 191.35e12,
"baud_rate": 32e9,
"f_max": 196.1e12,
"spacing": 50e9,
"power_dbm": 3,
"power_range_db": [0, 0, 1],
"roll_off": 0.15,
"tx_osnr": 40,
"tx_power_dbm": -10,
"sys_margins": 2
}
]
- for certain channels, using -spectrum option and tx_channel_power_dbm option (gnpy-transmission-example script).
.. code-block:: json
{
"spectrum": [
{
"f_min": 191.35e12,
"f_max":193.1e12,
"baud_rate": 32e9,
"slot_width": 50e9,
"power_dbm": 0,
"roll_off": 0.15,
"tx_osnr": 40
},
{
"f_min": 193.15e12,
"f_max":193.15e12,
"baud_rate": 32e9,
"slot_width": 50e9,
"power_dbm": 0,
"roll_off": 0.15,
"tx_osnr": 40,
"tx_power_dbm": -10
},
{
"f_min": 193.2e12,
"f_max":195.1e12,
"baud_rate": 32e9,
"slot_width": 50e9,
"power_dbm": 0,
"roll_off": 0.15,
"tx_osnr": 40
}
]
}
- per service using the additional parameter ``tx_power`` which similarly to ``power`` should be defined in Watt (gnpy-path-request script)
.. code-block:: json
{
"path-request": [
{
"request-id": "0",
"source": "trx SITE1",
"destination": "trx SITE2",
"src-tp-id": "trx SITE1",
"dst-tp-id": "trx SITE2",
"bidirectional": false,
"path-constraints": {
"te-bandwidth": {
"technology": "flexi-grid",
"trx_type": "Voyager",
"trx_mode": "mode 1",
"spacing": 50000000000.0,
"path_bandwidth": 100000000000.0
}
}
},
{
"request-id": "0 with tx_power",
"source": "trx SITE1",
"destination": "trx SITE2",
"src-tp-id": "trx SITE1",
"dst-tp-id": "trx SITE2",
"bidirectional": false,
"path-constraints": {
"te-bandwidth": {
"technology": "flexi-grid",
"trx_type": "Voyager",
"trx_mode": "mode 1",
"tx_power": 0.0001,
"spacing": 50000000000.0,
"path_bandwidth": 100000000000.0
}
}
}
]
}
v2.9
====
The revision introduces a major refactor that separates design and propagation. Most of these changes have no impact
on the user experience, except the following ones:
**Network design - amplifiers**: amplifier saturation is checked during design in all cases, even if type_variety is
set; amplifier gain is no more computed on the fly but only at design phase.
Before, the design did not consider amplifier power saturation during design if amplifier type_variety was stated.
With this revision, the saturation is always applied:
If design is made for a per channel power that leads to saturation, the target are properly reduced and the design
is freezed. So that when a new simulation is performed on the same network for lower levels of power per channel
the same gain target is applied. Before these were recomputed, changing the gain targets, so the simulation was
not considering the exact same working points for amplifiers in case of saturation.
Note that this case (working with saturation settings) is not recommended.
The gain of amplifiers was estimated on the fly also in case of RamanFiber preceding elements. The refactor now
requires that an estimation of Raman gain of the RamanFiber is done during design to properly compute a gain target.
The Raman gain is estimated at design for every RamanFiber span and also during propagation instead of being only
estimated at propagation stage for those Raman Fiber spans concerned with the transmission. The auto-design is more
accurate for unpropagated spans, but this results in an increase overall computation time.
This will be improved in the future.
**Network design - ROADMs**: ROADM target power settings are verified during design.
Design checks that expected power coming from every directions ingress from a ROADM are consistent with output power
targets. The checks only considers the adjacent previous hop. If the expected power at the input of this ROADM is
lower than the target power on the out-degree of the ROADM, a warning is displayed, and user is asked to review the
input network to avoid this situation. This does not change the design or propagation behaviour.
**Propagation**: amplifier gain target is no more recomputed during propagation. It is now possible to freeze
the design and propagate without automatic changes.
In previous release, gain was recomputed during propagation based on an hypothetical reference noiseless channel
propagation. It was not possible to «freeze» the autodesign, and propagate without recomputing the gain target
of amplifiers.
With this new release, the design is freezed, so that it is possible to compare performances on same basis.
**Display**: "effective pch (dbm)" is removed. Display contains the target pch which is the target power per channel
in dBm, computed based on reference channel used for design and the amplifier delta_p in dB (and before out VOA
contribution). Note that "actual pch out (dBm)" is the actual propagated total power per channel averaged per spectrum
band definition at the output of the amplifier element, including noises and out VOA contribution.
v2.8
====
**Spectrum assignment**: requests can now support multiple slots.
The definition in service file supports multiple assignments (unchanged syntax):
.. code-block:: json
"effective-freq-slot": [
{
"N": 0,
"M": 4
}, {
"N": 50,
"M": 4
}
],
But in results, label-hop is now a list of slots and center frequency index:
.. code-block:: json
{
"path-route-object": {
"index": 4,
"label-hop": [
{
"N": 0,
"M": 4
}, {
"N": 50,
"M": 4
}
]
}
},
instead of
.. code-block:: json
{
"path-route-object": {
"index": 4,
"label-hop": {
"N": 0,
"M": 4
}
}
},
**change in display**: only warnings are displayed ; information are disabled and needs the -v (verbose)
option to be displayed on standard output.
**frequency scaling**: A more accurate description of fiber parameters is implemented, including frequency scaling of
chromatic dispersion, effective area, Raman gain coefficient, and nonlinear coefficient.
In particular:
1. Chromatic dispersion can be defined with ``'dispersion'`` and ``'dispersion_slope'``, as in previous versions, or
with ``'dispersion_per_frequency'``; the latter must be defined as a dictionary with two keys, ``'value'`` and
``'frequency'`` and it has higher priority than the entries ``'dispersion'`` and ``'dispersion_slope'``.
Essential change: In previous versions, when it was not provided the ``'dispersion_slope'`` was calculated in an
involute manner to get a vanishing beta3 , and this was a mere artifact for NLI evaluation purposes (namely to evaluate
beta2 and beta3, not for total dispersion accumulation). Now, the evaluation of beta2 and beta3 is performed explicitly
in the element.py module.
1. The effective area is provided as a scalar value evaluated at the Fiber reference frequency and properly scaled
considering the Fiber refractive indices n1 and n2, and the core radius. These quantities are assumed to be fixed and
are hard coded in the parameters.py module. Essential change: The effective area is always scaled along the frequency.
1. The Raman gain coefficient is properly scaled considering the overlapping of fiber effective area values scaled at
the interacting frequencies. Essential change: In previous version the Raman gain coefficient depends only on
the frequency offset.
1. The nonlinear coefficient ``'gamma'`` is properly scaled considering the refractive index n2 and the scaling
effective area. Essential change: As the effective area, the nonlinear coefficient is always scaled along the
frequency.
**power offset**: Power equalization now enables defining a power offset in transceiver library to represent
the deviation from the general equalisation strategy defined in ROADMs.
.. code-block:: json
"mode": [{
"format": "100G",
"baud_rate": 32.0e9,
"tx_osnr": 35.0,
"min_spacing": 50.0e9,
"cost": 1,
"OSNR": 10.0,
"bit_rate": 100.0e9,
"roll_off": 0.2,
"equalization_offset_db": 0.0
}, {
"format": "200G",
"baud_rate": 64.0e9,
"tx_osnr": 35.0,
"min_spacing": 75.0e9,
"cost": 1,
"OSNR": 13.0,
"bit_rate": 200.0e9,
"roll_off": 0.2,
"equalization_offset_db": 1.76
}
]
v2.7
====

View File

@@ -1,8 +1,8 @@
"""
'''
GNPy is an open-source, community-developed library for building route planning and optimization tools in real-world mesh optical networks. It is based on the Gaussian Noise Model.
Signal propagation is implemented in :py:mod:`.core`.
Path finding and spectrum assignment is in :py:mod:`.topology`.
Various tools and auxiliary code, including the JSON I/O handling, is in
:py:mod:`.tools`.
"""
'''

View File

@@ -1,4 +1,4 @@
"""
'''
Simulation of signal propagation in the DWDM network
Optical signals, as defined via :class:`.info.SpectralInformation`, enter
@@ -6,4 +6,4 @@ Optical signals, as defined via :class:`.info.SpectralInformation`, enter
through the :py:mod:`.network`.
The simulation is controlled via :py:mod:`.parameters` and implemented mainly
via :py:mod:`.science_utils`.
"""
'''

View File

@@ -1,17 +1,12 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.core.ansi_escapes: A random subset of ANSI terminal escape codes for colored messages
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
'''
gnpy.core.ansi_escapes
======================
A random subset of ANSI terminal escape codes for colored messages
"""
'''
red = '\x1b[1;31;40m'
blue = '\x1b[1;34;40m'

File diff suppressed because it is too large Load Diff

View File

@@ -1,137 +1,73 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.core.equipment: functionality for specifying equipment
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
'''
gnpy.core.equipment
===================
This module contains functionality for specifying equipment.
"""
from collections import defaultdict
from functools import reduce
from typing import List
'''
from gnpy.core.exceptions import EquipmentConfigError, ConfigurationError
from gnpy.core.utils import automatic_nch, db2lin
from gnpy.core.exceptions import EquipmentConfigError
def trx_mode_params(equipment, trx_type_variety='', trx_mode='', error_message=False):
"""return the trx and SI parameters from eqpt_config for a given type_variety and mode (ie format)
if the type or mode do no match an existing transceiver in the library, then the function
raises an error if error_message is True else returns a default mode based on equipment['SI']['default']
If trx_mode is None (but type is valid), it returns an undetermined mode whatever the error message:
this is a special case for automatic mode selection.
"""
"""return the trx and SI parameters from eqpt_config for a given type_variety and mode (ie format)"""
trx_params = {}
default_si_data = equipment['SI']['default']
# default transponder characteristics
# mainly used with transmission_main_example.py
default_trx_params = {
'f_min': default_si_data.f_min,
'f_max': default_si_data.f_max,
'baud_rate': default_si_data.baud_rate,
'spacing': default_si_data.spacing,
'OSNR': None,
'penalties': {},
'bit_rate': None,
'cost': None,
'roll_off': default_si_data.roll_off,
'tx_osnr': default_si_data.tx_osnr,
'min_spacing': None,
'equalization_offset_db': 0
}
# Undetermined transponder characteristics
# mainly used with path_request_run.py for the automatic mode computation case
undetermined_trx_params = {
"format": "undetermined",
"baud_rate": None,
"OSNR": None,
"penalties": None,
"bit_rate": None,
"roll_off": None,
"tx_osnr": None,
"min_spacing": None,
"cost": None,
"equalization_offset_db": 0
}
trxs = equipment['Transceiver']
if trx_type_variety in trxs:
modes = {mode['format']: mode for mode in trxs[trx_type_variety].mode}
trx_frequencies = {'f_min': trxs[trx_type_variety].frequency['min'],
'f_max': trxs[trx_type_variety].frequency['max']}
if trx_mode in modes:
# if called from transmission_main.py, trx_mode is ''
trx_params = {**modes[trx_mode], **trx_frequencies}
try:
trxs = equipment['Transceiver']
# if called from path_requests_run.py, trx_mode is filled with None when not specified by user
# if called from transmission_main.py, trx_mode is ''
if trx_mode is not None:
mode_params = next(mode for trx in trxs
if trx == trx_type_variety
for mode in trxs[trx].mode
if mode['format'] == trx_mode)
trx_params = {**mode_params}
# sanity check: spacing baudrate must be smaller than min spacing
if trx_params['baud_rate'] > trx_params['min_spacing']:
# sanity check: baudrate must be smaller than min spacing
raise EquipmentConfigError(f'Inconsistency in equipment library:\n Transponder "{trx_type_variety}" '
+ f'mode "{trx_params["format"]}" has baud rate '
+ f'{trx_params["baud_rate"] * 1e-9:.2f} GHz greater than min_spacing '
+ f'{trx_params["min_spacing"] * 1e-9:.2f}.')
trx_params['equalization_offset_db'] = trx_params.get('equalization_offset_db', 0)
return trx_params
if trx_mode is None:
# if called from path_requests_run.py, trx_mode is filled with None when not specified by user
trx_params = {**undetermined_trx_params, **trx_frequencies}
return trx_params
if trx_type_variety in trxs and error_message:
raise EquipmentConfigError(f'Could not find transponder "{trx_type_variety}" with mode "{trx_mode}" '
+ 'in equipment library')
if error_message:
raise EquipmentConfigError(f'Could not find transponder "{trx_type_variety}" in equipment library')
raise EquipmentConfigError(f'Inconsistency in equipment library:\n Transpoder "{trx_type_variety}" mode "{trx_params["format"]}" ' +
f'has baud rate {trx_params["baud_rate"]*1e-9} GHz greater than min_spacing {trx_params["min_spacing"]*1e-9}.')
else:
mode_params = {"format": "undetermined",
"baud_rate": None,
"OSNR": None,
"bit_rate": None,
"roll_off": None,
"tx_osnr": None,
"min_spacing": None,
"cost": None}
trx_params = {**mode_params}
trx_params['f_min'] = equipment['Transceiver'][trx_type_variety].frequency['min']
trx_params['f_max'] = equipment['Transceiver'][trx_type_variety].frequency['max']
# TODO: novel automatic feature maybe unwanted if spacing is specified
# trx_params['spacing'] = _automatic_spacing(trx_params['baud_rate'])
# temp = trx_params['spacing']
# print(f'spacing {temp}')
except StopIteration:
if error_message:
raise EquipmentConfigError(f'Could not find transponder "{trx_type_variety}" with mode "{trx_mode}" in equipment library')
else:
# default transponder charcteristics
# mainly used with transmission_main_example.py
trx_params['f_min'] = default_si_data.f_min
trx_params['f_max'] = default_si_data.f_max
trx_params['baud_rate'] = default_si_data.baud_rate
trx_params['spacing'] = default_si_data.spacing
trx_params['OSNR'] = None
trx_params['bit_rate'] = None
trx_params['cost'] = None
trx_params['roll_off'] = default_si_data.roll_off
trx_params['tx_osnr'] = default_si_data.tx_osnr
trx_params['min_spacing'] = None
nch = automatic_nch(trx_params['f_min'], trx_params['f_max'], trx_params['spacing'])
trx_params['nb_channel'] = nch
print(f'There are {nch} channels propagating')
trx_params['power'] = db2lin(default_si_data.power_dbm) * 1e-3
trx_params = {**default_trx_params}
return trx_params
def find_type_variety(amps: List[str], equipment: dict) -> List[str]:
"""Returns the multiband type_variety associated with a list of single band type_varieties
Args:
amps (List[str]): A list of single band type_varieties.
equipment (dict): A dictionary containing equipment information.
Returns:
str: an amplifier type variety
"""
listes = find_type_varieties(amps, equipment)
_found_type = list(reduce(lambda x, y: set(x) & set(y), listes))
# Given a list of single band amplifiers, find the multiband amplifier whose multi_band group
# matches. For example, if amps list contains ["a1_LBAND", "a2_CBAND"], with a1.multi_band = [a1_LBAND, a1_CBAND]
# and a2.multi_band = [a1_LBAND, a2_CBAND], then:
# possible_type_varieties = {"a1_LBAND": ["a1", "a2"], "a2_CBAND": ["a2"]}
# listes = [["a1", "a2"], ["a2"]]
# and _found_type = [a2]
if not _found_type:
msg = f'{amps} amps do not belong to the same amp type {listes}'
raise ConfigurationError(msg)
return _found_type
def find_type_varieties(amps: List[str], equipment: dict) -> List[List[str]]:
"""Returns the multiband list of type_varieties associated with a list of single band type_varieties
Args:
amps (List[str]): A list of single band type_varieties.
equipment (dict): A dictionary containing equipment information.
Returns:
List[List[str]]: A list of lists containing the multiband type_varieties
associated with each single band type_variety.
"""
possible_type_varieties = defaultdict(list)
for amp_name, amp in equipment['Edfa'].items():
if amp.multi_band is not None:
for elem in amp.multi_band:
# possible_type_varieties stores the list of multiband amp names that list this elem as
# a possible amplifier of the multiband group. For example, if "std_medium_gain_multiband"
# and "std_medium_gain_multiband_new" contain "std_medium_gain_C" in their "multi_band" list, then:
# possible_type_varieties["std_medium_gain_C"] =
# ["std_medium_gain_multiband", "std_medium_gain_multiband_new"]
possible_type_varieties[elem].append(amp_name)
return [possible_type_varieties[a] for a in amps]

View File

@@ -1,42 +1,37 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.core.exceptions: Exceptions thrown by other gnpy modules
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
'''
gnpy.core.exceptions
====================
Exceptions thrown by other gnpy modules
"""
'''
class ConfigurationError(Exception):
"""User-provided configuration contains an error"""
'''User-provided configuration contains an error'''
class EquipmentConfigError(ConfigurationError):
"""Incomplete or wrong configuration within the equipment library"""
'''Incomplete or wrong configuration within the equipment library'''
class NetworkTopologyError(ConfigurationError):
"""Topology of user-provided network is wrong"""
'''Topology of user-provided network is wrong'''
class ServiceError(Exception):
"""Service of user-provided request is wrong"""
'''Service of user-provided request is wrong'''
class DisjunctionError(ServiceError):
"""Disjunction of user-provided request can not be satisfied"""
'''Disjunction of user-provided request can not be satisfied'''
class SpectrumError(Exception):
"""Spectrum errors of the program"""
'''Spectrum errors of the program'''
class ParametersError(ConfigurationError):
"""Incomplete or wrong configurations within parameters json"""
'''Incomplete or wrong configurations within parameters json'''

View File

@@ -1,433 +1,57 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.core.info: classes for modelling Spectral Information
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
'''
gnpy.core.info
==============
This module contains classes for modelling :class:`SpectralInformation`.
"""
'''
from __future__ import annotations
from collections import namedtuple
from collections.abc import Iterable
from typing import Union, List, Optional
from dataclasses import dataclass
from numpy import argsort, mean, array, append, ones, ceil, any, zeros, outer, full, ndarray, asarray
from gnpy.core.utils import automatic_nch, db2lin, watt2dbm
from gnpy.core.exceptions import SpectrumError
DEFAULT_SLOT_WIDTH_STEP = 12.5e9 # Hz
"""Channels with unspecified slot width will have their slot width evaluated as the baud rate rounded up to the minimum
multiple of the DEFAULT_SLOT_WIDTH_STEP (the baud rate is extended including the roll off in this evaluation)"""
from gnpy.core.utils import automatic_nch, lin2db
class Power(namedtuple('Power', 'signal nli ase')):
"""carriers power in W"""
class Channel(
namedtuple('Channel',
'channel_number frequency baud_rate slot_width roll_off power chromatic_dispersion pmd pdl latency')):
"""Class containing the parameters of a WDM signal.
class Channel(namedtuple('Channel', 'channel_number frequency baud_rate roll_off power chromatic_dispersion pmd')):
""" Class containing the parameters of a WDM signal.
:param channel_number: channel number in the WDM grid
:param frequency: central frequency of the signal (Hz)
:param baud_rate: the symbol rate of the signal (Baud)
:param slot_width: the slot width (Hz)
:param roll_off: the roll off of the signal. It is a pure number between 0 and 1
:param power (gnpy.core.info.Power): power of signal, ASE noise and NLI (W)
:param chromatic_dispersion: chromatic dispersion (s/m)
:param pmd: polarization mode dispersion (s)
:param pdl: polarization dependent loss (dB)
:param latency: propagation latency (s)
:param channel_number: channel number in the WDM grid
:param frequency: central frequency of the signal (Hz)
:param baud_rate: the symbol rate of the signal (Baud)
:param roll_off: the roll off of the signal. It is a pure number between 0 and 1
:param power (gnpy.core.info.Power): power of signal, ASE noise and NLI (W)
:param chromatic_dispersion: chromatic dispersion (s/m)
:param pmd: polarization mode dispersion (s)
"""
class SpectralInformation(object):
"""Class containing the parameters of the entire WDM comb.
delta_pdb_per_channel: (per frequency) per channel delta power in dbm for the actual mix of channels"""
def __init__(self, frequency: array, baud_rate: array, slot_width: array, signal: array, nli: array, ase: array,
roll_off: array, chromatic_dispersion: array, pmd: array, pdl: array, latency: array,
delta_pdb_per_channel: array, tx_osnr: array, tx_power: array, label: array):
indices = argsort(frequency)
self._frequency = frequency[indices]
self._df = outer(ones(frequency.shape), frequency) - outer(frequency, ones(frequency.shape))
self._number_of_channels = len(self._frequency)
self._channel_number = [*range(1, self._number_of_channels + 1)]
self._slot_width = slot_width[indices]
self._baud_rate = baud_rate[indices]
overlap = self._frequency[:-1] + self._slot_width[:-1] / 2 > self._frequency[1:] - self._slot_width[1:] / 2
if any(overlap):
overlap = [pair for pair in zip(overlap * self._channel_number[:-1], overlap * self._channel_number[1:])
if pair != (0, 0)]
raise SpectrumError(f'Spectrum required slot widths larger than the frequency spectral distances '
f'between channels: {overlap}.')
exceed = self._baud_rate > self._slot_width
if any(exceed):
raise SpectrumError(f'Spectrum baud rate, including the roll off, larger than the slot width for channels: '
f'{[ch for ch in exceed * self._channel_number if ch]}.')
self._signal = signal[indices]
self._nli = nli[indices]
self._ase = ase[indices]
self._roll_off = roll_off[indices]
self._chromatic_dispersion = chromatic_dispersion[indices]
self._pmd = pmd[indices]
self._pdl = pdl[indices]
self._latency = latency[indices]
self._delta_pdb_per_channel = delta_pdb_per_channel[indices]
self._tx_osnr = tx_osnr[indices]
self._tx_power = tx_power[indices]
self._label = label[indices]
@property
def frequency(self):
return self._frequency
@property
def df(self):
"""Matrix of relative frequency distances between all channels. Positive elements in the upper right side."""
return self._df
@property
def slot_width(self):
return self._slot_width
@property
def baud_rate(self):
return self._baud_rate
@property
def number_of_channels(self):
return self._number_of_channels
@property
def powers(self):
powers = zip(self.signal, self.nli, self.ase)
return [Power(*p) for p in powers]
@property
def signal(self):
return self._signal
@signal.setter
def signal(self, signal):
self._signal = signal
@property
def nli(self):
return self._nli
@nli.setter
def nli(self, nli):
self._nli = nli
@property
def ase(self):
return self._ase
@ase.setter
def ase(self, ase):
self._ase = ase
@property
def roll_off(self):
return self._roll_off
@property
def chromatic_dispersion(self):
return self._chromatic_dispersion
@chromatic_dispersion.setter
def chromatic_dispersion(self, chromatic_dispersion):
self._chromatic_dispersion = chromatic_dispersion
@property
def pmd(self):
return self._pmd
@property
def label(self):
return self._label
@pmd.setter
def pmd(self, pmd):
self._pmd = pmd
@property
def pdl(self):
return self._pdl
@pdl.setter
def pdl(self, pdl):
self._pdl = pdl
@property
def latency(self):
return self._latency
@latency.setter
def latency(self, latency):
self._latency = latency
@property
def delta_pdb_per_channel(self):
return self._delta_pdb_per_channel
@delta_pdb_per_channel.setter
def delta_pdb_per_channel(self, delta_pdb_per_channel):
self._delta_pdb_per_channel = delta_pdb_per_channel
@property
def tx_osnr(self):
return self._tx_osnr
@tx_osnr.setter
def tx_osnr(self, tx_osnr):
self._tx_osnr = tx_osnr
@property
def tx_power(self):
return self._tx_power
@tx_power.setter
def tx_power(self, tx_power):
self._tx_power = tx_power
@property
def channel_number(self):
return self._channel_number
@property
def carriers(self):
entries = zip(self.channel_number, self.frequency, self.baud_rate, self.slot_width,
self.roll_off, self.powers, self.chromatic_dispersion, self.pmd, self.pdl, self.latency)
return [Channel(*entry) for entry in entries]
def apply_attenuation_lin(self, attenuation_lin):
self.signal *= attenuation_lin
self.nli *= attenuation_lin
self.ase *= attenuation_lin
def apply_attenuation_db(self, attenuation_db):
attenuation_lin = 1 / db2lin(attenuation_db)
self.apply_attenuation_lin(attenuation_lin)
def apply_gain_lin(self, gain_lin):
self.signal *= gain_lin
self.nli *= gain_lin
self.ase *= gain_lin
def apply_gain_db(self, gain_db):
gain_lin = db2lin(gain_db)
self.apply_gain_lin(gain_lin)
def __add__(self, other: SpectralInformation):
try:
return SpectralInformation(frequency=append(self.frequency, other.frequency),
slot_width=append(self.slot_width, other.slot_width),
signal=append(self.signal, other.signal), nli=append(self.nli, other.nli),
ase=append(self.ase, other.ase),
baud_rate=append(self.baud_rate, other.baud_rate),
roll_off=append(self.roll_off, other.roll_off),
chromatic_dispersion=append(self.chromatic_dispersion,
other.chromatic_dispersion),
pmd=append(self.pmd, other.pmd),
pdl=append(self.pdl, other.pdl),
latency=append(self.latency, other.latency),
delta_pdb_per_channel=append(self.delta_pdb_per_channel,
other.delta_pdb_per_channel),
tx_osnr=append(self.tx_osnr, other.tx_osnr),
tx_power=append(self.tx_power, other.tx_power),
label=append(self.label, other.label))
except SpectrumError:
raise SpectrumError('Spectra cannot be summed: channels overlapping.')
def _replace(self, carriers):
self.chromatic_dispersion = array([c.chromatic_dispersion for c in carriers])
self.pmd = array([c.pmd for c in carriers])
self.pdl = array([c.pdl for c in carriers])
self.latency = array([c.latency for c in carriers])
self.signal = array([c.power.signal for c in carriers])
self.nli = array([c.power.nli for c in carriers])
self.ase = array([c.power.ase for c in carriers])
return self
class Pref(namedtuple('Pref', 'p_span0, p_spani, neq_ch ')):
"""noiseless reference power in dBm:
p_span0: inital target carrier power
p_spani: carrier power after element i
neq_ch: equivalent channel count in dB"""
def create_arbitrary_spectral_information(frequency: Union[ndarray, Iterable, float],
signal: Union[float, ndarray, Iterable],
baud_rate: Union[float, ndarray, Iterable],
tx_osnr: Union[float, ndarray, Iterable],
tx_power: Union[float, ndarray, Iterable] = None,
delta_pdb_per_channel: Union[float, ndarray, Iterable] = 0.,
slot_width: Union[float, ndarray, Iterable] = None,
roll_off: Union[float, ndarray, Iterable] = 0.,
chromatic_dispersion: Union[float, ndarray, Iterable] = 0.,
pmd: Union[float, ndarray, Iterable] = 0.,
pdl: Union[float, ndarray, Iterable] = 0.,
latency: Union[float, ndarray, Iterable] = 0.,
label: Union[str, ndarray, Iterable] = None):
"""This is just a wrapper around the SpectralInformation.__init__() that simplifies the creation of
a non-uniform spectral information with NLI and ASE powers set to zero."""
frequency = asarray(frequency)
number_of_channels = frequency.size
try:
signal = full(number_of_channels, signal)
baud_rate = full(number_of_channels, baud_rate)
roll_off = full(number_of_channels, roll_off)
slot_width = full(number_of_channels, slot_width) if slot_width is not None else \
ceil((1 + roll_off) * baud_rate / DEFAULT_SLOT_WIDTH_STEP) * DEFAULT_SLOT_WIDTH_STEP
chromatic_dispersion = full(number_of_channels, chromatic_dispersion)
pmd = full(number_of_channels, pmd)
pdl = full(number_of_channels, pdl)
latency = full(number_of_channels, latency)
nli = zeros(number_of_channels)
ase = zeros(number_of_channels)
delta_pdb_per_channel = full(number_of_channels, delta_pdb_per_channel)
tx_osnr = full(number_of_channels, tx_osnr)
tx_power = full(number_of_channels, tx_power)
label = full(number_of_channels, label)
return SpectralInformation(frequency=frequency, slot_width=slot_width,
signal=signal, nli=nli, ase=ase,
baud_rate=baud_rate, roll_off=roll_off,
chromatic_dispersion=chromatic_dispersion,
pmd=pmd, pdl=pdl, latency=latency,
delta_pdb_per_channel=delta_pdb_per_channel,
tx_osnr=tx_osnr, tx_power=tx_power, label=label)
except ValueError as e:
if 'could not broadcast' in str(e):
raise SpectrumError('Dimension mismatch in input fields.')
else:
raise
class SpectralInformation(namedtuple('SpectralInformation', 'pref carriers')):
def __new__(cls, pref, carriers):
return super().__new__(cls, pref, carriers)
def create_input_spectral_information(f_min, f_max, roll_off, baud_rate, spacing, tx_osnr, tx_power,
delta_pdb=0):
"""Creates a fixed slot width spectral information with flat power.
all arguments are scalar values"""
number_of_channels = automatic_nch(f_min, f_max, spacing)
frequency = [(f_min + spacing * i) for i in range(1, number_of_channels + 1)]
delta_pdb_per_channel = delta_pdb * ones(number_of_channels)
label = [f'{baud_rate * 1e-9 :.2f}G' for i in range(number_of_channels)]
return create_arbitrary_spectral_information(frequency, slot_width=spacing, signal=tx_power, baud_rate=baud_rate,
roll_off=roll_off, delta_pdb_per_channel=delta_pdb_per_channel,
tx_osnr=tx_osnr, tx_power=tx_power, label=label)
def is_in_band(frequency: float, band: dict) -> bool:
"""band has {"f_min": value, "f_max": value} format
"""
if frequency >= band['f_min'] and frequency <= band['f_max']:
return True
return False
def demuxed_spectral_information(input_si: SpectralInformation, band: dict) -> Optional[SpectralInformation]:
"""extract a si based on band
"""
filtered_indices = [i for i, f in enumerate(input_si.frequency)
if is_in_band(f - input_si.slot_width[i] / 2, band)
and is_in_band(f + input_si.slot_width[i] / 2, band)]
if filtered_indices:
frequency = input_si.frequency[filtered_indices]
baud_rate = input_si.baud_rate[filtered_indices]
slot_width = input_si.slot_width[filtered_indices]
signal = input_si.signal[filtered_indices]
nli = input_si.nli[filtered_indices]
ase = input_si.ase[filtered_indices]
roll_off = input_si.roll_off[filtered_indices]
chromatic_dispersion = input_si.chromatic_dispersion[filtered_indices]
pmd = input_si.pmd[filtered_indices]
pdl = input_si.pdl[filtered_indices]
latency = input_si.latency[filtered_indices]
delta_pdb_per_channel = input_si.delta_pdb_per_channel[filtered_indices]
tx_osnr = input_si.tx_osnr[filtered_indices]
tx_power = input_si.tx_power[filtered_indices]
label = input_si.label[filtered_indices]
return SpectralInformation(frequency=frequency, baud_rate=baud_rate, slot_width=slot_width, signal=signal,
nli=nli, ase=ase, roll_off=roll_off, chromatic_dispersion=chromatic_dispersion,
pmd=pmd, pdl=pdl, latency=latency, delta_pdb_per_channel=delta_pdb_per_channel,
tx_osnr=tx_osnr, tx_power=tx_power, label=label)
return None
def muxed_spectral_information(input_si_list: List[SpectralInformation]) -> SpectralInformation:
"""return the assembled spectrum
"""
if input_si_list and len(input_si_list) > 1:
si = input_si_list[0] + muxed_spectral_information(input_si_list[1:])
return si
elif input_si_list and len(input_si_list) == 1:
return input_si_list[0]
else:
raise ValueError('liste vide')
def carriers_to_spectral_information(initial_spectrum: dict[float, Carrier],
power: float) -> SpectralInformation:
"""Initial spectrum is a dict with key = carrier frequency, and value a Carrier object.
:param initial_spectrum: indexed by frequency in Hz, with power offset (delta_pdb), baudrate, slot width,
tx_osnr, tx_power and roll off.
:param power: power of the request
"""
frequency = list(initial_spectrum.keys())
signal = [c.tx_power for c in initial_spectrum.values()]
roll_off = [c.roll_off for c in initial_spectrum.values()]
baud_rate = [c.baud_rate for c in initial_spectrum.values()]
delta_pdb_per_channel = [c.delta_pdb for c in initial_spectrum.values()]
slot_width = [c.slot_width for c in initial_spectrum.values()]
tx_osnr = [c.tx_osnr for c in initial_spectrum.values()]
tx_power = [c.tx_power for c in initial_spectrum.values()]
label = [c.label for c in initial_spectrum.values()]
return create_arbitrary_spectral_information(frequency=frequency, signal=signal, baud_rate=baud_rate,
slot_width=slot_width, roll_off=roll_off,
delta_pdb_per_channel=delta_pdb_per_channel, tx_osnr=tx_osnr,
tx_power=tx_power, label=label)
@dataclass
class Carrier:
"""One channel in the initial mixed-type spectrum definition, each type being defined by
its delta_pdb (power offset with respect to reference power), baud rate, slot_width, roll_off
tx_power, and tx_osnr. delta_pdb offset is applied to target power out of Roadm.
Label is used to group carriers which belong to the same partition when printing results.
"""
delta_pdb: float
baud_rate: float
slot_width: float
roll_off: float
tx_osnr: float
tx_power: float
label: str
@dataclass
class ReferenceCarrier:
"""Reference channel type is used to determine target power out of ROADM for the reference channel when
constant power spectral density (PSD) equalization is set. Reference channel is the type that has been defined
in SI block and used for the initial design of the network.
Computing the power out of ROADM for the reference channel is required to correctly compute the loss
experienced by reference channel in Roadm element.
Baud rate is required to find the target power in constant PSD: power = PSD_target * baud_rate.
For example, if target PSD is 3.125e4mW/GHz and reference carrier type a 32 GBaud channel then
output power should be -20 dBm and for a 64 GBaud channel power target would need 3 dB more: -17 dBm.
Slot width is required to find the target power in constant PSW (constant power per slot width equalization):
power = PSW_target * slot_width.
For example, if target PSW is 2e4mW/GHz and reference carrier type a 32 GBaud channel in a 50GHz slot width then
output power should be -20 dBm and for a 64 GBaud channel in a 75 GHz slot width, power target would be -18.24 dBm.
Other attributes (like roll-off) may be added there for future equalization purpose.
"""
baud_rate: float
slot_width: float
def create_input_spectral_information(f_min, f_max, roll_off, baud_rate, power, spacing):
# pref in dB : convert power lin into power in dB
pref = lin2db(power * 1e3)
nb_channel = automatic_nch(f_min, f_max, spacing)
si = SpectralInformation(
pref=Pref(pref, pref, lin2db(nb_channel)),
carriers=[
Channel(f, (f_min + spacing * f),
baud_rate, roll_off, Power(power, 0, 0), 0, 0) for f in range(1, nb_channel + 1)
]
)
return si

File diff suppressed because it is too large Load Diff

View File

@@ -1,24 +1,19 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.core.parameters: parameters to configure standard network elements
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
gnpy.core.parameters
====================
This module contains all parameters to configure standard network elements.
"""
from collections import namedtuple
from copy import deepcopy
from dataclasses import dataclass
from scipy.constants import c, pi
from numpy import asarray, array, exp, sqrt, log, outer, ones, squeeze, append, flip, linspace, full
from gnpy.core.utils import convert_length
"""
from scipy.constants import c, pi
from numpy import squeeze, log10, exp
from gnpy.core.utils import db2lin, convert_length
from gnpy.core.exceptions import ParametersError
@@ -35,235 +30,110 @@ class Parameters:
class PumpParams(Parameters):
def __init__(self, power, frequency, propagation_direction):
self.power = power
self.frequency = frequency
self.propagation_direction = propagation_direction.lower()
self._power = power
self._frequency = frequency
self._propagation_direction = propagation_direction
@property
def power(self):
return self._power
@property
def frequency(self):
return self._frequency
@property
def propagation_direction(self):
return self._propagation_direction
class RamanParams(Parameters):
def __init__(self, flag=False, method='perturbative', order=2, result_spatial_resolution=10e3,
solver_spatial_resolution=10e3):
"""Simulation parameters used within the Raman Solver
def __init__(self, **kwargs):
self._flag_raman = kwargs['flag_raman']
self._space_resolution = kwargs['space_resolution'] if 'space_resolution' in kwargs else None
self._tolerance = kwargs['tolerance'] if 'tolerance' in kwargs else None
:params flag: boolean for enabling/disable the evaluation of the Raman power profile in frequency and position
:params method: Raman solver method
:params order: solution order for perturbative method
:params result_spatial_resolution: spatial resolution of the evaluated Raman power profile
:params solver_spatial_resolution: spatial step for the iterative solution of the first order ode
"""
self.flag = flag
self.method = method
self.order = order
self.result_spatial_resolution = result_spatial_resolution # [m]
self.solver_spatial_resolution = solver_spatial_resolution # [m]
@property
def flag_raman(self):
return self._flag_raman
def to_json(self):
return {"flag": self.flag,
"method": self.method,
"order": self.order,
"result_spatial_resolution": self.result_spatial_resolution,
"solver_spatial_resolution": self.solver_spatial_resolution}
@property
def space_resolution(self):
return self._space_resolution
@property
def tolerance(self):
return self._tolerance
class NLIParams(Parameters):
def __init__(self, method='gn_model_analytic', dispersion_tolerance=4, phase_shift_tolerance=0.1,
computed_channels=None, computed_number_of_channels=None):
"""Simulation parameters used within the Nli Solver
def __init__(self, **kwargs):
self._nli_method_name = kwargs['nli_method_name']
self._wdm_grid_size = kwargs['wdm_grid_size']
self._dispersion_tolerance = kwargs['dispersion_tolerance']
self._phase_shift_tolerance = kwargs['phase_shift_tolerance']
self._f_cut_resolution = None
self._f_pump_resolution = None
self._computed_channels = kwargs['computed_channels'] if 'computed_channels' in kwargs else None
:params method: formula for NLI calculation
:params dispersion_tolerance: tuning parameter for ggn model solution
:params phase_shift_tolerance: tuning parameter for ggn model solution
:params computed_channels: the NLI is evaluated for these channels and extrapolated for the others
:params computed_number_of_channels: the NLI is evaluated for this number of channels equally distributed
in the spectrum and extrapolated for the others
"""
self.method = method.lower()
self.dispersion_tolerance = dispersion_tolerance
self.phase_shift_tolerance = phase_shift_tolerance
self.computed_channels = computed_channels
self.computed_number_of_channels = computed_number_of_channels
@property
def nli_method_name(self):
return self._nli_method_name
def to_json(self):
return {"method": self.method,
"dispersion_tolerance": self.dispersion_tolerance,
"phase_shift_tolerance": self.phase_shift_tolerance,
"computed_channels": self.computed_channels,
"computed_number_of_channels": self.computed_number_of_channels}
@property
def wdm_grid_size(self):
return self._wdm_grid_size
@property
def dispersion_tolerance(self):
return self._dispersion_tolerance
@property
def phase_shift_tolerance(self):
return self._phase_shift_tolerance
@property
def f_cut_resolution(self):
return self._f_cut_resolution
@f_cut_resolution.setter
def f_cut_resolution(self, f_cut_resolution):
self._f_cut_resolution = f_cut_resolution
@property
def f_pump_resolution(self):
return self._f_pump_resolution
@f_pump_resolution.setter
def f_pump_resolution(self, f_pump_resolution):
self._f_pump_resolution = f_pump_resolution
@property
def computed_channels(self):
return self._computed_channels
class SimParams(Parameters):
_shared_dict = {'nli_params': NLIParams(), 'raman_params': RamanParams()}
@classmethod
def set_params(cls, sim_params):
cls._shared_dict['nli_params'] = NLIParams(**sim_params.get('nli_params', {}))
cls._shared_dict['raman_params'] = RamanParams(**sim_params.get('raman_params', {}))
def __init__(self, **kwargs):
try:
if 'nli_parameters' in kwargs:
self._nli_params = NLIParams(**kwargs['nli_parameters'])
else:
self._nli_params = None
if 'raman_parameters' in kwargs:
self._raman_params = RamanParams(**kwargs['raman_parameters'])
else:
self._raman_params = None
except KeyError as e:
raise ParametersError(f'Simulation parameters must include {e}. Configuration: {kwargs}')
@property
def nli_params(self):
return self._shared_dict['nli_params']
return self._nli_params
@property
def raman_params(self):
return self._shared_dict['raman_params']
class RoadmParams(Parameters):
def __init__(self, **kwargs):
self.target_pch_out_db = kwargs.get('target_pch_out_db')
self.target_psd_out_mWperGHz = kwargs.get('target_psd_out_mWperGHz')
self.target_out_mWperSlotWidth = kwargs.get('target_out_mWperSlotWidth')
equalisation_type = ['target_pch_out_db', 'target_psd_out_mWperGHz', 'target_out_mWperSlotWidth']
temp = [kwargs.get(k) is not None for k in equalisation_type]
if sum(temp) > 1:
raise ParametersError('ROADM config contains more than one equalisation type.'
+ 'Please choose only one', kwargs)
self.per_degree_pch_out_db = kwargs.get('per_degree_pch_out_db', {})
self.per_degree_pch_psd = kwargs.get('per_degree_psd_out_mWperGHz', {})
self.per_degree_pch_psw = kwargs.get('per_degree_psd_out_mWperSlotWidth', {})
try:
self.add_drop_osnr = kwargs['add_drop_osnr']
self.pmd = kwargs['pmd']
self.pdl = kwargs['pdl']
self.restrictions = kwargs['restrictions']
self.roadm_path_impairments = self.get_roadm_path_impairments(kwargs['roadm-path-impairments'])
except KeyError as e:
raise ParametersError(f'ROADM configurations must include {e}. Configuration: {kwargs}')
self.per_degree_impairments = kwargs.get('per_degree_impairments', [])
self.design_bands = kwargs.get('design_bands', [])
self.per_degree_design_bands = kwargs.get('per_degree_design_bands', {})
def get_roadm_path_impairments(self, path_impairments_list):
"""Get the ROADM list of profiles for impairments definition
transform the ietf model into gnpy internal model: add a path-type in the attributes
"""
if not path_impairments_list:
return {}
authorized_path_types = {
'roadm-express-path': 'express',
'roadm-add-path': 'add',
'roadm-drop-path': 'drop',
}
roadm_path_impairments = {}
for path_impairment in path_impairments_list:
index = path_impairment['roadm-path-impairments-id']
path_type = next(key for key in path_impairment if key in authorized_path_types.keys())
impairment_dict = {'path-type': authorized_path_types[path_type], 'impairment': path_impairment[path_type]}
roadm_path_impairments[index] = RoadmImpairment(impairment_dict)
return roadm_path_impairments
class RoadmPath:
def __init__(self, from_degree, to_degree, path_type, impairment_id=None, impairment=None):
"""Records roadm internal paths, types and impairment
path_type must be in "express", "add", "drop"
impairment_id must be one of the id detailed in equipement
"""
self.from_degree = from_degree
self.to_degree = to_degree
self.path_type = path_type
self.impairment_id = impairment_id
self.impairment = impairment
class RoadmImpairment:
"""Generic definition of impairments for express, add and drop"""
default_values = {
'roadm-pmd': None,
'roadm-cd': None,
'roadm-pdl': None,
'roadm-inband-crosstalk': None,
'roadm-maxloss': 0,
'roadm-osnr': None,
'roadm-pmax': None,
'roadm-noise-figure': None,
'minloss': None,
'typloss': None,
'pmin': None,
'ptyp': None
}
def __init__(self, params):
self.path_type = params.get('path-type')
self.impairments = params['impairment']
class FusedParams(Parameters):
def __init__(self, **kwargs):
self.loss = kwargs['loss'] if 'loss' in kwargs else 1
DEFAULT_RAMAN_COEFFICIENT = {
# SSMF Raman coefficient profile in terms of mode intensity (g0 * A_ff_overlap)
'gamma_raman': array(
[0.0, 8.524419934705497e-16, 2.643567866245371e-15, 4.410548410941305e-15, 6.153422961291078e-15,
7.484924703044943e-15, 8.452060808349209e-15, 9.101549322698156e-15, 9.57837595158966e-15,
1.0008642675474562e-14, 1.0865773569905647e-14, 1.1300776305865833e-14, 1.2143238647099625e-14,
1.3231065750676068e-14, 1.4624900971525384e-14, 1.6013330554840492e-14, 1.7458119359310242e-14,
1.9320241330434762e-14, 2.1720395392873534e-14, 2.4137337406734775e-14, 2.628163218460466e-14,
2.8041019963285974e-14, 2.9723155447089933e-14, 3.129353531005888e-14, 3.251796163324624e-14,
3.3198839487612773e-14, 3.329527690685666e-14, 3.313155691238456e-14, 3.289013852154548e-14,
3.2458917188506916e-14, 3.060684277937575e-14, 3.2660349473783173e-14, 2.957419109657689e-14,
2.518894321396672e-14, 1.734560485857344e-14, 9.902860761605233e-15, 7.219176385099358e-15,
6.079565990401311e-15, 5.828373065963427e-15, 7.20580801091692e-15, 7.561924351387493e-15,
7.621152352332206e-15, 6.8859886780643254e-15, 5.629181047471162e-15, 3.679727598966185e-15,
2.7555869742500355e-15, 2.4810133942597675e-15, 2.2160080532403624e-15, 2.1440626024765557e-15,
2.33873070799544e-15, 2.557317929858713e-15, 3.039839048226572e-15, 4.8337165515610065e-15,
5.4647431818257436e-15, 5.229187813711269e-15, 4.510768525811313e-15, 3.3213473130607794e-15,
2.2602577027996455e-15, 1.969576495866441e-15, 1.5179853954188527e-15, 1.2953988551200156e-15,
1.1304672156251838e-15, 9.10004390675213e-16, 8.432919922183503e-16, 7.849224069008326e-16,
7.827568196032024e-16, 9.000514440646232e-16, 1.3025926460013665e-15, 1.5444108938497558e-15,
1.8795594063060786e-15, 1.7796130169921014e-15, 1.5938159865046653e-15, 1.1585522355108287e-15,
8.507044444633358e-16, 7.625404663756823e-16, 8.14510750925789e-16, 9.047944693473188e-16,
9.636431901702084e-16, 9.298633899602105e-16, 8.349739503637023e-16, 7.482901278066085e-16,
6.240794767134268e-16, 5.00652535687506e-16, 3.553373263685851e-16, 2.0344217706119682e-16,
1.4267522642294203e-16, 8.980016576743517e-17, 2.9829068181832594e-17, 1.4861959129014824e-17,
7.404482113326137e-18]
), # m/W
# SSMF Raman coefficient profile
'g0': array(
[0.00000000e+00, 1.12351610e-05, 3.47838074e-05, 5.79356636e-05, 8.06921680e-05, 9.79845709e-05, 1.10454361e-04,
1.18735302e-04, 1.24736889e-04, 1.30110053e-04, 1.41001273e-04, 1.46383247e-04, 1.57011792e-04, 1.70765865e-04,
1.88408911e-04, 2.05914127e-04, 2.24074028e-04, 2.47508283e-04, 2.77729174e-04, 3.08044243e-04, 3.34764439e-04,
3.56481704e-04, 3.77127256e-04, 3.96269124e-04, 4.10955175e-04, 4.18718761e-04, 4.19511263e-04, 4.17025384e-04,
4.13565369e-04, 4.07726048e-04, 3.83671291e-04, 4.08564283e-04, 3.69571936e-04, 3.14442090e-04, 2.16074535e-04,
1.23097823e-04, 8.95457457e-05, 7.52470400e-05, 7.19806145e-05, 8.87961158e-05, 9.30812065e-05, 9.37058268e-05,
8.45719619e-05, 6.90585286e-05, 4.50407159e-05, 3.36521245e-05, 3.02292475e-05, 2.69376939e-05, 2.60020897e-05,
2.82958958e-05, 3.08667558e-05, 3.66024657e-05, 5.80610307e-05, 6.54797937e-05, 6.25022715e-05, 5.37806442e-05,
3.94996621e-05, 2.68120644e-05, 2.33038554e-05, 1.79140757e-05, 1.52472424e-05, 1.32707565e-05, 1.06541760e-05,
9.84649374e-06, 9.13999627e-06, 9.08971012e-06, 1.04227525e-05, 1.50419271e-05, 1.77838232e-05, 2.15810815e-05,
2.03744008e-05, 1.81939341e-05, 1.31862121e-05, 9.65352116e-06, 8.62698322e-06, 9.18688016e-06, 1.01737784e-05,
1.08017817e-05, 1.03903588e-05, 9.30040333e-06, 8.30809173e-06, 6.90650401e-06, 5.52238029e-06, 3.90648708e-06,
2.22908227e-06, 1.55796177e-06, 9.77218716e-07, 3.23477236e-07, 1.60602454e-07, 7.97306386e-08]
), # [1 / (W m)]
# Note the non-uniform spacing of this range; this is required for properly capturing the Raman peak shape.
'frequency_offset': array([
0., 0.5, 1., 1.5, 2., 2.5, 3., 3.5, 4., 4.5, 5., 5.5, 6., 6.5, 7., 7.5, 8., 8.5, 9., 9.5, 10., 10.5, 11., 11.5,
12., 12.5, 12.75, 13., 13.25, 13.5, 14., 14.5, 14.75, 15., 15.5, 16., 16.5, 17., 17.5, 18., 18.25, 18.5, 18.75,
19., 19.5, 20., 20.5, 21., 21.5, 22., 22.5, 23., 23.5, 24., 24.5, 25., 25.5, 26., 26.5, 27., 27.5, 28., 28.5,
29., 29.5, 30., 30.5, 31., 31.5, 32., 32.5, 33., 33.5, 34., 34.5, 35., 35.5, 36., 36.5, 37., 37.5, 38., 38.5,
39., 39.5, 40., 40.5, 41., 41.5, 42.]) * 1e12, # [Hz]
# Raman profile reference frequency
'reference_frequency': 206.184634112792e12, # [Hz] (1454 nm)
# Raman profile reference effective area
'reference_effective_area': 75.74659443542413e-12 # [m^2] (@1454 nm)
}
class RamanGainCoefficient(namedtuple('RamanGainCoefficient', 'normalized_gamma_raman frequency_offset')):
""" Raman Gain Coefficient Parameters
Based on:
Andrea DAmico, Bruno Correia, Elliot London, Emanuele Virgillito, Giacomo Borraccini, Antonio Napoli,
and Vittorio Curri, "Scalable and Disaggregated GGN Approximation Applied to a C+L+S Optical Network,"
J. Lightwave Technol. 40, 3499-3511 (2022)
Section III.D
"""
return self._raman_params
class FiberParams(Parameters):
@@ -271,95 +141,45 @@ class FiberParams(Parameters):
try:
self._length = convert_length(kwargs['length'], kwargs['length_units'])
# fixed attenuator for padding
self._att_in = kwargs.get('att_in', 0)
self._att_in = kwargs['att_in'] if 'att_in' in kwargs else 0
# if not defined in the network json connector loss in/out
# the None value will be updated in network.py[build_network]
# with default values from eqpt_config.json[Spans]
self._con_in = kwargs.get('con_in')
self._con_out = kwargs.get('con_out')
# Reference frequency (unique for all parameters: beta2, beta3, gamma, effective_area)
self._con_in = kwargs['con_in'] if 'con_in' in kwargs else None
self._con_out = kwargs['con_out'] if 'con_out' in kwargs else None
if 'ref_wavelength' in kwargs:
self._ref_wavelength = kwargs['ref_wavelength']
self._ref_frequency = c / self._ref_wavelength
self._ref_frequency = c / self.ref_wavelength
elif 'ref_frequency' in kwargs:
self._ref_frequency = kwargs['ref_frequency']
self._ref_wavelength = c / self._ref_frequency
self._ref_wavelength = c / self.ref_frequency
else:
self._ref_wavelength = 1550e-9 # conventional central C band wavelength [m]
self._ref_frequency = c / self._ref_wavelength
# Chromatic Dispersion
if 'dispersion_per_frequency' in kwargs:
# Frequency-dependent dispersion
self._dispersion = asarray(kwargs['dispersion_per_frequency']['value']) # s/m/m
self._f_dispersion_ref = asarray(kwargs['dispersion_per_frequency']['frequency']) # Hz
self._dispersion_slope = None
elif 'dispersion' in kwargs:
# Single value dispersion
self._dispersion = asarray(kwargs['dispersion']) # s/m/m
self._dispersion_slope = kwargs.get('dispersion_slope') # s/m/m/m
self._f_dispersion_ref = asarray(self._ref_frequency) # Hz
else:
# Default single value dispersion
self._dispersion = asarray(1.67e-05) # s/m/m
self._dispersion_slope = None
self._f_dispersion_ref = asarray(self.ref_frequency) # Hz
# Effective Area and Nonlinear Coefficient
self._effective_area = kwargs.get('effective_area') # m^2
self._n1 = 1.468
self._core_radius = 4.2e-6 # m
self._n2 = 2.6e-20 # m^2/W
if self._effective_area is not None:
default_gamma = 2 * pi * self._n2 / (self._ref_wavelength * self._effective_area)
self._gamma = kwargs.get('gamma', default_gamma) # 1/W/m
elif 'gamma' in kwargs:
self._gamma = kwargs['gamma'] # 1/W/m
self._effective_area = 2 * pi * self._n2 / (self._ref_wavelength * self._gamma) # m^2
else:
self._effective_area = 83e-12 # m^2
self._gamma = 2 * pi * self._n2 / (self._ref_wavelength * self._effective_area) # 1/W/m
self._contrast = 0.5 * (c / (2 * pi * self._ref_frequency * self._core_radius * self._n1) * exp(
pi * self._core_radius ** 2 / self._effective_area)) ** 2
# Raman Gain Coefficient
raman_coefficient = kwargs.get('raman_coefficient')
if raman_coefficient is None:
self._raman_reference_frequency = DEFAULT_RAMAN_COEFFICIENT['reference_frequency']
frequency_offset = asarray(DEFAULT_RAMAN_COEFFICIENT['frequency_offset'])
gamma_raman = asarray(DEFAULT_RAMAN_COEFFICIENT['gamma_raman'])
stokes_wave = self._raman_reference_frequency - frequency_offset
normalized_gamma_raman = gamma_raman / self._raman_reference_frequency # 1 / m / W / Hz
self._g0 = gamma_raman / self.effective_area_overlap(stokes_wave, self._raman_reference_frequency)
else:
self._raman_reference_frequency = raman_coefficient['reference_frequency']
frequency_offset = asarray(raman_coefficient['frequency_offset'])
stokes_wave = self._raman_reference_frequency - frequency_offset
self._g0 = asarray(raman_coefficient['g0'])
gamma_raman = self._g0 * self.effective_area_overlap(stokes_wave, self._raman_reference_frequency)
normalized_gamma_raman = gamma_raman / self._raman_reference_frequency # 1 / m / W / Hz
# Raman gain coefficient array of the frequency offset constructed such that positive frequency values
# represent a positive power transfer from higher frequency and vice versa
frequency_offset = append(-flip(frequency_offset[1:]), frequency_offset)
normalized_gamma_raman = append(- flip(normalized_gamma_raman[1:]), normalized_gamma_raman)
self._raman_coefficient = RamanGainCoefficient(normalized_gamma_raman, frequency_offset)
# Polarization Mode Dispersion
self._ref_wavelength = 1550e-9
self._ref_frequency = c / self.ref_wavelength
self._dispersion = kwargs['dispersion'] # s/m/m
self._dispersion_slope = kwargs['dispersion_slope'] if 'dispersion_slope' in kwargs else \
-2 * self._dispersion/self.ref_wavelength # s/m/m/m
self._beta2 = -(self.ref_wavelength ** 2) * self.dispersion / (2 * pi * c) # 1/(m * Hz^2)
# Eq. (3.23) in Abramczyk, Halina. "Dispersion phenomena in optical fibers." Virtual European University
# on Lasers. Available online: http://mitr.p.lodz.pl/evu/lectures/Abramczyk3.pdf
# (accessed on 25 March 2018) (2005).
self._beta3 = ((self.dispersion_slope - (4*pi*c/self.ref_wavelength**3) * self.beta2) /
(2*pi*c/self.ref_wavelength**2)**2)
self._gamma = kwargs['gamma'] # 1/W/m
self._pmd_coef = kwargs['pmd_coef'] # s/sqrt(m)
self._pmd_coef_defined = kwargs.get('pmd_coef_defined', kwargs['pmd_coef'] is True)
# Loss Coefficient
if isinstance(kwargs['loss_coef'], dict):
self._loss_coef = asarray(kwargs['loss_coef']['value']) * 1e-3 # lineic loss dB/m
self._f_loss_ref = asarray(kwargs['loss_coef']['frequency']) # Hz
if type(kwargs['loss_coef']) == dict:
self._loss_coef = squeeze(kwargs['loss_coef']['loss_coef_power']) * 1e-3 # lineic loss dB/m
self._f_loss_ref = squeeze(kwargs['loss_coef']['frequency']) # Hz
else:
self._loss_coef = asarray(kwargs['loss_coef']) * 1e-3 # lineic loss dB/m
self._f_loss_ref = asarray(self._ref_frequency) # Hz
# Lumped Losses
self._lumped_losses = kwargs['lumped_losses'] if 'lumped_losses' in kwargs else array([])
self._latency = self._length / (c / self._n1) # s
self._loss_coef = kwargs['loss_coef'] * 1e-3 # lineic loss dB/m
self._f_loss_ref = 193.5e12 # Hz
self._lin_attenuation = db2lin(self.length * self.loss_coef)
self._lin_loss_exp = self.loss_coef / (10 * log10(exp(1))) # linear power exponent loss Neper/m
self._effective_length = (1 - exp(- self.lin_loss_exp * self.length)) / self.lin_loss_exp
self._asymptotic_length = 1 / self.lin_loss_exp
# raman parameters (not compulsory)
self._raman_efficiency = kwargs['raman_efficiency'] if 'raman_efficiency' in kwargs else None
self._pumps_loss_coef = kwargs['pumps_loss_coef'] if 'pumps_loss_coef' in kwargs else None
except KeyError as e:
raise ParametersError(f'Fiber configurations json must include {e}. Configuration: {kwargs}')
@@ -392,10 +212,6 @@ class FiberParams(Parameters):
def con_out(self):
return self._con_out
@property
def lumped_losses(self):
return self._lumped_losses
@con_out.setter
def con_out(self, con_out):
self._con_out = con_out
@@ -404,10 +220,6 @@ class FiberParams(Parameters):
def dispersion(self):
return self._dispersion
@property
def f_dispersion_ref(self):
return self._f_dispersion_ref
@property
def dispersion_slope(self):
return self._dispersion_slope
@@ -416,28 +228,10 @@ class FiberParams(Parameters):
def gamma(self):
return self._gamma
def effective_area_scaling(self, frequency):
V = 2 * pi * frequency / c * self._core_radius * self._n1 * sqrt(2 * self._contrast)
w = self._core_radius / sqrt(log(V))
return asarray(pi * w ** 2)
def effective_area_overlap(self, frequency_stokes_wave, frequency_pump):
effective_area_stokes_wave = self.effective_area_scaling(frequency_stokes_wave)
effective_area_pump = self.effective_area_scaling(frequency_pump)
return squeeze(outer(effective_area_stokes_wave, ones(effective_area_pump.size)) + outer(
ones(effective_area_stokes_wave.size), effective_area_pump)) / 2
def gamma_scaling(self, frequency):
return asarray(2 * pi * self._n2 * frequency / (c * self.effective_area_scaling(frequency)))
@property
def pmd_coef(self):
return self._pmd_coef
@property
def pmd_coef_defined(self):
return self._pmd_coef_defined
@property
def ref_wavelength(self):
return self._ref_wavelength
@@ -446,6 +240,14 @@ class FiberParams(Parameters):
def ref_frequency(self):
return self._ref_frequency
@property
def beta2(self):
return self._beta2
@property
def beta3(self):
return self._beta3
@property
def loss_coef(self):
return self._loss_coef
@@ -455,277 +257,31 @@ class FiberParams(Parameters):
return self._f_loss_ref
@property
def raman_coefficient(self):
return self._raman_coefficient
def lin_loss_exp(self):
return self._lin_loss_exp
@property
def latency(self):
return self._latency
def lin_attenuation(self):
return self._lin_attenuation
@property
def effective_length(self):
return self._effective_length
@property
def asymptotic_length(self):
return self._asymptotic_length
@property
def raman_efficiency(self):
return self._raman_efficiency
@property
def pumps_loss_coef(self):
return self._pumps_loss_coef
def asdict(self):
dictionary = super().asdict()
dictionary['loss_coef'] = self.loss_coef * 1e3
dictionary['length_units'] = 'm'
if len(self.lumped_losses) == 0:
dictionary.pop('lumped_losses')
if not self.raman_coefficient:
dictionary.pop('raman_coefficient')
else:
raman_frequency_offset = \
self.raman_coefficient.frequency_offset[self.raman_coefficient.frequency_offset >= 0]
dictionary['raman_coefficient'] = {'g0': self._g0.tolist(),
'frequency_offset': raman_frequency_offset.tolist(),
'reference_frequency': self._raman_reference_frequency}
return dictionary
class EdfaParams:
default_values = {
'f_min': None,
'f_max': None,
'multi_band': None,
'bands': None,
'type_variety': '',
'type_def': '',
'gain_flatmax': None,
'gain_min': None,
'p_max': None,
'nf_model': None,
'dual_stage_model': None,
'preamp_variety': None,
'booster_variety': None,
'nf_min': None,
'nf_max': None,
'nf_coef': None,
'nf0': None,
'nf_fit_coeff': None,
'nf_ripple': 0,
'dgt': None,
'gain_ripple': 0,
'tilt_ripple': 0,
'f_ripple_ref': None,
'out_voa_auto': False,
'allowed_for_design': False,
'raman': False,
'pmd': 0,
'pdl': 0,
'advance_configurations_from_json': None
}
def __init__(self, **params):
try:
self.type_variety = params['type_variety']
self.type_def = params['type_def']
# Bandwidth
self.f_min = params['f_min']
self.f_max = params['f_max']
self.bandwidth = self.f_max - self.f_min if self.f_max and self.f_min else None
self.f_cent = (self.f_max + self.f_min) / 2 if self.f_max and self.f_min else None
self.f_ripple_ref = params['f_ripple_ref']
self.bands = [{'f_min': params['f_min'],
'f_max': params['f_max']}]
# Gain
self.gain_flatmax = params['gain_flatmax']
self.gain_min = params['gain_min']
gain_ripple = params['gain_ripple']
if gain_ripple == 0:
self.gain_ripple = asarray([0, 0])
self.f_ripple_ref = asarray([self.f_min, self.f_max])
else:
self.gain_ripple = asarray(gain_ripple)
if self.f_ripple_ref is not None:
if (self.f_ripple_ref[0] != self.f_min) or (self.f_ripple_ref[-1] != self.f_max):
raise ParametersError("The reference ripple frequency maximum and minimum have to coincide "
"with the EDFA frequency maximum and minimum.")
elif self.gain_ripple.size != self.f_ripple_ref.size:
raise ParametersError("The reference ripple frequency and the gain ripple must have the same "
"size.")
else:
self.f_ripple_ref = linspace(self.f_min, self.f_max, self.gain_ripple.size)
tilt_ripple = params['tilt_ripple']
if tilt_ripple == 0:
self.tilt_ripple = full(self.gain_ripple.size, 0)
else:
self.tilt_ripple = asarray(tilt_ripple)
if self.tilt_ripple.size != self.gain_ripple.size:
raise ParametersError("The tilt ripple and the gain ripple must have the same size.")
# Power
self.p_max = params['p_max']
# Noise Figure
self.nf_model = params['nf_model']
self.nf_min = params['nf_min']
self.nf_max = params['nf_max']
self.nf_coef = params['nf_coef']
self.nf0 = params['nf0']
self.nf_fit_coeff = params['nf_fit_coeff']
nf_ripple = params['nf_ripple']
if nf_ripple == 0:
self.nf_ripple = full(self.gain_ripple.size, 0)
else:
self.nf_ripple = asarray(nf_ripple)
if self.nf_ripple.size != self.gain_ripple.size:
raise ParametersError(
"The noise figure ripple and the gain ripple must have the same size. %s, %s",
self.nf_ripple.size, self.gain_ripple.size)
# VOA
self.out_voa_auto = params['out_voa_auto']
# Dual Stage
self.dual_stage_model = params['dual_stage_model']
if self.dual_stage_model is not None:
# Preamp
self.preamp_variety = params['preamp_variety']
self.preamp_type_def = params['preamp_type_def']
self.preamp_nf_model = params['preamp_nf_model']
self.preamp_nf_fit_coeff = params['preamp_nf_fit_coeff']
self.preamp_gain_min = params['preamp_gain_min']
self.preamp_gain_flatmax = params['preamp_gain_flatmax']
# Booster
self.booster_variety = params['booster_variety']
self.booster_type_def = params['booster_type_def']
self.booster_nf_model = params['booster_nf_model']
self.booster_nf_fit_coeff = params['booster_nf_fit_coeff']
self.booster_gain_min = params['booster_gain_min']
self.booster_gain_flatmax = params['booster_gain_flatmax']
# Others
self.pmd = params['pmd']
self.pdl = params['pdl']
self.raman = params['raman']
self.dgt = params['dgt']
self.advance_configurations_from_json = params['advance_configurations_from_json']
# Design
self.allowed_for_design = params['allowed_for_design']
except KeyError as e:
raise ParametersError(f'Edfa configurations json must include {e}. Configuration: {params}')
def update_params(self, kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class EdfaOperational:
default_values = {
'gain_target': None,
'delta_p': None,
'out_voa': None,
'in_voa': 0,
'tilt_target': None
}
def __init__(self, **operational):
self.update_attr(operational)
def update_attr(self, kwargs):
clean_kwargs = {k: v for k, v in kwargs.items() if v != ''}
for k, v in self.default_values.items():
setattr(self, k, clean_kwargs.get(k, v))
def __repr__(self):
return (f'{type(self).__name__}('
f'gain_target={self.gain_target!r}, '
f'tilt_target={self.tilt_target!r})')
DEFAULT_EDFA_CONFIG = {
"nf_ripple": [
0.0
],
"gain_ripple": [
0.0
],
"f_min": 191.275e12,
"f_max": 196.125e12,
"dgt": [
1.0, 1.017807767853702, 1.0356155337864215, 1.0534217504465226, 1.0712204022764056, 1.0895983485572227,
1.108555289615659, 1.1280891949729075, 1.1476135933863398, 1.1672278304018044, 1.1869318618366975,
1.2067249615595257, 1.2264996957264114, 1.2428104897182262, 1.2556591482982988, 1.2650555289898042,
1.2744470198196236, 1.2838336236692311, 1.2932153453410835, 1.3040618749785347, 1.316383926863083,
1.3301807335621048, 1.3439818461440451, 1.3598972673004606, 1.3779439775587023, 1.3981208704326855,
1.418273806730323, 1.4340878115214444, 1.445565137158368, 1.45273959485914, 1.4599103316162523,
1.4670307626366115, 1.474100442252211, 1.48111939735681, 1.488134243479226, 1.495145456062699,
1.502153039909686, 1.5097346239790443, 1.5178910621476225, 1.5266220576235803, 1.5353620432989845,
1.545374152761467, 1.5566577309558969, 1.569199764184379, 1.5817353179379183, 1.5986915141218316,
1.6201194134191075, 1.6460167077689267, 1.6719047669939942, 1.6918150918099673, 1.7057507692361864,
1.7137640932265894, 1.7217732861435076, 1.7297783508684146, 1.737780757913635, 1.7459181197626403,
1.7541903672600494, 1.7625959636196327, 1.7709972329654864, 1.7793941781790852, 1.7877868031023945,
1.7961751115773796, 1.8045606557581335, 1.8139629377087627, 1.824381436842932, 1.835814081380705,
1.847275503201129, 1.862235672444246, 1.8806927939516411, 1.9026104247588487, 1.9245345552113182,
1.9482128147680253, 1.9736443063300082, 2.0008103857988204, 2.0279625371819305, 2.055100772005235,
2.082225099873648, 2.1183028432496016, 2.16337565384239, 2.2174389328192197, 2.271520771371253,
2.322373696229342, 2.3699990328716107, 2.414398437185221, 2.4587748041127506, 2.499446286796604,
2.5364027376452056, 2.5696460593920065, 2.602860350286428, 2.630396440815385, 2.6521732021128046,
2.6681935771243177, 2.6841217449620203, 2.6947834587664494, 2.705443819238505, 2.714526681131686
]
}
class MultiBandParams:
default_values = {
'bands': [],
'type_variety': '',
'type_def': None,
'allowed_for_design': False
}
def __init__(self, **params):
try:
self.update_attr(params)
except KeyError as e:
raise ParametersError(f'Multiband configurations json must include {e}. Configuration: {params}')
def update_attr(self, kwargs):
clean_kwargs = {k: v for k, v in kwargs.items() if v != ''}
for k, v in self.default_values.items():
# use deepcopy to avoid sharing same object amongst all instance when v is a list or a dict!
if isinstance(v, (list, dict)):
setattr(self, k, clean_kwargs.get(k, deepcopy(v)))
else:
setattr(self, k, clean_kwargs.get(k, v))
class TransceiverParams:
def __init__(self, **params):
self.design_bands = params.get('design_bands', [])
self.per_degree_design_bands = params.get('per_degree_design_bands', {})
@dataclass
class FrequencyBand:
"""Frequency band
"""
f_min: float
f_max: float
DEFAULT_BANDS_DEFINITION = {
"LBAND": FrequencyBand(f_min=187e12, f_max=189e12),
"CBAND": FrequencyBand(f_min=191.3e12, f_max=196.0e12)
}
# use this definition to index amplifiers'element of a multiband amplifier.
# this is not the design band
def find_band_name(band: FrequencyBand) -> str:
"""return the default band name (CBAND, LBAND, ...) that corresponds to the band frequency range
Use the band center frequency: if center frequency is inside the band then returns CBAND.
This is to flexibly encompass all kind of bands definitions.
returns the first matching band name.
"""
for band_name, frequency_range in DEFAULT_BANDS_DEFINITION.items():
center_frequency = (band.f_min + band.f_max) / 2
if center_frequency >= frequency_range.f_min and center_frequency <= frequency_range.f_max:
return band_name
return 'unknown_band'

File diff suppressed because it is too large Load Diff

View File

@@ -1,24 +1,18 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.core.utils: utility functions that are used with gnpy
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
'''
gnpy.core.utils
===============
This module contains utility functions that are used with gnpy.
"""
'''
from csv import writer
from numpy import pi, cos, sqrt, log10, linspace, zeros, shape, where, logical_and, mean, array
import numpy as np
from numpy import pi, cos, sqrt, log10
from scipy import constants
from copy import deepcopy
from typing import List, Union, Dict
from gnpy.core.exceptions import ConfigurationError
@@ -76,7 +70,7 @@ def arrange_frequencies(length, start, stop):
:return: an array of frequencies determined by the spacing parameter
:rtype: numpy.ndarray
"""
return linspace(start, stop, length)
return np.linspace(start, stop, length)
def lin2db(value):
@@ -113,99 +107,7 @@ def db2lin(value):
return 10**(value / 10)
def watt2dbm(value):
"""Convert Watt units to dBm
>>> round(watt2dbm(0.001), 1)
0.0
>>> round(watt2dbm(0.02), 1)
13.0
"""
return lin2db(value * 1e3)
def dbm2watt(value):
"""Convert dBm units to Watt
>>> round(dbm2watt(0), 4)
0.001
>>> round(dbm2watt(-3), 4)
0.0005
>>> round(dbm2watt(13), 4)
0.02
"""
return db2lin(value) * 1e-3
def psd2powerdbm(psd_mwperghz, baudrate_baud):
"""computes power in dBm based on baudrate in bauds and psd in mW/GHz
>>> round(psd2powerdbm(0.031176, 64e9),3)
3.0
>>> round(psd2powerdbm(0.062352, 32e9),3)
3.0
>>> round(psd2powerdbm(0.015625, 64e9),3)
0.0
"""
return lin2db(baudrate_baud * psd_mwperghz * 1e-9)
def power_dbm_to_psd_mw_ghz(power_dbm, baudrate_baud):
"""computes power spectral density in mW/GHz based on baudrate in bauds and power in dBm
>>> power_dbm_to_psd_mw_ghz(0, 64e9)
0.015625
>>> round(power_dbm_to_psd_mw_ghz(3, 64e9), 6)
0.031176
>>> round(power_dbm_to_psd_mw_ghz(3, 32e9), 6)
0.062352
"""
return db2lin(power_dbm) / (baudrate_baud * 1e-9)
def psd_mw_per_ghz(power_watt, baudrate_baud):
"""computes power spectral density in mW/GHz based on baudrate in bauds and power in W
>>> psd_mw_per_ghz(2e-3, 32e9)
0.0625
>>> psd_mw_per_ghz(1e-3, 64e9)
0.015625
>>> psd_mw_per_ghz(0.5e-3, 32e9)
0.015625
"""
return power_watt * 1e3 / (baudrate_baud * 1e-9)
def round2float(number, step):
"""Round a floating point number so that its "resolution" is not bigger than 'step'
The finest step is fixed at 0.01; smaller values are silently changed to 0.01.
>>> round2float(123.456, 1000)
0.0
>>> round2float(123.456, 100)
100.0
>>> round2float(123.456, 10)
120.0
>>> round2float(123.456, 1)
123.0
>>> round2float(123.456, 0.1)
123.5
>>> round2float(123.456, 0.01)
123.46
>>> round2float(123.456, 0.001)
123.46
>>> round2float(123.249, 0.5)
123.0
>>> round2float(123.250, 0.5)
123.0
>>> round2float(123.251, 0.5)
123.5
>>> round2float(123.300, 0.2)
123.2
>>> round2float(123.301, 0.2)
123.4
"""
step = round(step, 1)
if step >= 0.01:
number = round(number / step, 0)
@@ -219,39 +121,25 @@ wavelength2freq = constants.lambda2nu
freq2wavelength = constants.nu2lambda
def freq2wavelength(value):
""" Converts frequency units to wavelength units.
>>> round(freq2wavelength(191.35e12) * 1e9, 3)
1566.723
>>> round(freq2wavelength(196.1e12) * 1e9, 3)
1528.773
"""
return constants.c / value
def snr_sum(snr, bw, snr_added, bw_added=12.5e9):
snr_added = snr_added - lin2db(bw / bw_added)
snr = -lin2db(db2lin(-snr) + db2lin(-snr_added))
return snr
def per_label_average(values, labels):
"""computes the average per defined spectrum band, using labels
>>> labels = ['A', 'A', 'A', 'A', 'A', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'B', 'C', 'D', 'D', 'D', 'D']
>>> values = [28.51, 28.23, 28.15, 28.17, 28.36, 28.53, 28.64, 28.68, 28.7, 28.71, 28.72, 28.73, 28.74, 28.91, 27.96, 27.85, 27.87, 28.02]
>>> per_label_average(values, labels)
{'A': 28.28, 'B': 28.68, 'C': 28.91, 'D': 27.92}
"""
label_set = sorted(set(labels))
summary = {}
for label in label_set:
vals = [val for val, lab in zip(values, labels) if lab == label]
summary[label] = round(mean(vals), 2)
return summary
def pretty_summary_print(summary):
"""Build a prettty string that shows the summary dict values per label with 2 digits"""
if len(summary) == 1:
return f'{list(summary.values())[0]:.2f}'
text = ', '.join([f'{label}: {value:.2f}' for label, value in summary.items()])
return text
def deltawl2deltaf(delta_wl, wavelength):
"""deltawl2deltaf(delta_wl, wavelength):
""" deltawl2deltaf(delta_wl, wavelength):
delta_wl is BW in wavelength units
wavelength is the center wl
units for delta_wl and wavelength must be same
@@ -269,9 +157,9 @@ def deltawl2deltaf(delta_wl, wavelength):
def deltaf2deltawl(delta_f, frequency):
"""convert delta frequency to delta wavelength
Units for delta_wl and wavelength must be same.
""" deltawl2deltaf(delta_f, frequency):
converts delta frequency to delta wavelength
units for delta_wl and wavelength must be same
:param delta_f: delta frequency in same units as frequency
:param frequency: frequency BW is relevant for
@@ -286,7 +174,8 @@ def deltaf2deltawl(delta_f, frequency):
def rrc(ffs, baud_rate, alpha):
"""compute the root-raised cosine filter function
""" rrc(ffs, baud_rate, alpha): computes the root-raised cosine filter
function.
:param ffs: A numpy array of frequencies
:param baud_rate: The Baud Rate of the System
@@ -301,18 +190,18 @@ def rrc(ffs, baud_rate, alpha):
Ts = 1 / baud_rate
l_lim = (1 - alpha) / (2 * Ts)
r_lim = (1 + alpha) / (2 * Ts)
hf = zeros(shape(ffs))
slope_inds = where(
logical_and(abs(ffs) > l_lim, abs(ffs) < r_lim))
hf = np.zeros(np.shape(ffs))
slope_inds = np.where(
np.logical_and(np.abs(ffs) > l_lim, np.abs(ffs) < r_lim))
hf[slope_inds] = 0.5 * (1 + cos((pi * Ts / alpha) *
(abs(ffs[slope_inds]) - l_lim)))
p_inds = where(logical_and(abs(ffs) > 0, abs(ffs) < l_lim))
(np.abs(ffs[slope_inds]) - l_lim)))
p_inds = np.where(np.logical_and(np.abs(ffs) > 0, np.abs(ffs) < l_lim))
hf[p_inds] = 1
return sqrt(hf)
def merge_amplifier_restrictions(dict1, dict2):
"""Update contents of dicts recursively
"""Updates contents of dicts recursively
>>> d1 = {'params': {'restrictions': {'preamp_variety_list': [], 'booster_variety_list': []}}}
>>> d2 = {'params': {'target_pch_out_db': -20}}
@@ -334,35 +223,6 @@ def merge_amplifier_restrictions(dict1, dict2):
return copy_dict1
def use_pmd_coef(dict1: dict, dict2: dict):
"""If Fiber dict1 is missing the pmd_coef value then use the one of dict2.
In addition records in "pmd_coef_defined" key the pmd_coef if is was defined in dict1.
:param dict1: A dictionnary that contains "pmd_coef" key.
:type dict1: dict
:param dict2: Another dictionnary that contains "pmd_coef" key.
:type dict2: dict
>>> dict1 = {'a': 1, 'pmd_coef': 1.5e-15}
>>> dict2 = {'a': 2, 'pmd_coef': 2e-15}
>>> use_pmd_coef(dict1, dict2)
>>> dict1
{'a': 1, 'pmd_coef': 1.5e-15, 'pmd_coef_defined': True}
>>> dict1 = {'a': 1}
>>> use_pmd_coef(dict1, dict2)
>>> dict1
{'a': 1, 'pmd_coef_defined': False, 'pmd_coef': 2e-15}
"""
if 'pmd_coef' in dict1 and not dict1['pmd_coef'] \
or ('pmd_coef' not in dict1 and 'pmd_coef' in dict2):
dict1['pmd_coef_defined'] = False
dict1['pmd_coef'] = dict2['pmd_coef']
elif 'pmd_coef' in dict1 and dict1['pmd_coef']:
dict1['pmd_coef_defined'] = True
# all other case do not need any change
def silent_remove(this_list, elem):
"""Remove matching elements from a list without raising ValueError
@@ -436,390 +296,3 @@ def convert_length(value, units):
return value * 1e3
else:
raise ConfigurationError(f'Cannot convert length in "{units}" into meters')
def replace_none(dictionary):
""" Replaces None with inf values in a frequency slots dict
>>> replace_none({'N': 3, 'M': None})
{'N': 3, 'M': inf}
"""
for key, val in dictionary.items():
if val is None:
dictionary[key] = float('inf')
if val == float('inf'):
dictionary[key] = None
return dictionary
def order_slots(slots):
""" Order frequency slots from larger slots to smaller ones up to None
>>> l = [{'N': 3, 'M': None}, {'N': 2, 'M': 1}, {'N': None, 'M': None},{'N': 7, 'M': 2},{'N': None, 'M': 1} , {'N': None, 'M': 0}]
>>> order_slots(l)
([7, 2, None, None, 3, None], [2, 1, 1, 0, None, None], [3, 1, 4, 5, 0, 2])
"""
slots_list = deepcopy(slots)
slots_list = [replace_none(e) for e in slots_list]
for i, e in enumerate(slots_list):
e['i'] = i
slots_list = sorted(slots_list, key=lambda x: (-x['M'], x['N']) if x['M'] != float('inf') else (x['M'], x['N']))
slots_list = [replace_none(e) for e in slots_list]
return [e['N'] for e in slots_list], [e['M'] for e in slots_list], [e['i'] for e in slots_list]
def restore_order(elements, order):
""" Use order to re-order the element of the list, and ignore None values
>>> restore_order([7, 2, None, None, 3, None], [3, 1, 4, 5, 0, 2])
[3, 2, 7]
"""
return [elements[i[0]] for i in sorted(enumerate(order), key=lambda x:x[1]) if elements[i[0]] is not None]
def unique_ordered(elements):
"""
"""
unique_elements = []
for element in elements:
if element not in unique_elements:
unique_elements.append(element)
return unique_elements
def convert_empty_to_none(json_data: Union[list, dict]) -> dict:
"""Convert all instances of "a": [None] into "a": None
:param json_data: the input data.
:type json_data: dict
:return: the converted data.
:rtype: dict
>>> json_data = {
... "uid": "[east edfa in Lannion",
... "type_variety": "multiband_booster",
... "metadata": {
... "location": {
... "latitude": 0.000000,
... "longitude": 0.000000,
... "city": "Zion",
... "region": ""
... }
... },
... "type": "Multiband_amplifier",
... "amplifiers": [{
... "type_variety": "multiband_booster_LOW_C",
... "operational": {
... "gain_target": 12.22,
... "delta_p": 4.19,
... "out_voa": [None],
... "tilt_target": 0.00,
... "f_min": 191.3,
... "f_max": 196.1
... }
... }, {
... "type_variety": "multiband_booster_LOW_L",
... "operational": {
... "gain_target": 12.05,
... "delta_p": 4.19,
... "out_voa": [None],
... "tilt_target": 0.00,
... "f_min": 186.1,
... "f_max": 190.9
... }
... }
... ]
... }
>>> convert_empty_to_none(json_data)
{'uid': '[east edfa in Lannion', 'type_variety': 'multiband_booster', \
'metadata': {'location': {'latitude': 0.0, 'longitude': 0.0, 'city': 'Zion', 'region': ''}}, \
'type': 'Multiband_amplifier', 'amplifiers': [{'type_variety': 'multiband_booster_LOW_C', \
'operational': {'gain_target': 12.22, 'delta_p': 4.19, 'out_voa': None, 'tilt_target': 0.0, \
'f_min': 191.3, 'f_max': 196.1}}, {'type_variety': 'multiband_booster_LOW_L', \
'operational': {'gain_target': 12.05, 'delta_p': 4.19, 'out_voa': None, 'tilt_target': 0.0, \
'f_min': 186.1, 'f_max': 190.9}}]}
"""
if isinstance(json_data, dict):
for key, value in json_data.items():
json_data[key] = convert_empty_to_none(value)
elif isinstance(json_data, list):
if len(json_data) == 1 and json_data[0] is None:
return None
for i, elem in enumerate(json_data):
json_data[i] = convert_empty_to_none(elem)
return json_data
def convert_none_to_empty(json_data: Union[list, dict]) -> dict:
"""Convert all instances of "a": None into "a": [None], to be compliant with RFC7951.
:param json_data: the input data.
:type json_data: dict
:return: the converted data.
:rtype: dict
>>> a = {'uid': '[east edfa in Lannion', 'type_variety': 'multiband_booster',
... 'metadata': {'location': {'latitude': 0.0, 'longitude': 0.0, 'city': 'Zion', 'region': ''}},
... 'type': 'Multiband_amplifier', 'amplifiers': [{'type_variety': 'multiband_booster_LOW_C',
... 'operational': {'gain_target': 12.22, 'delta_p': 4.19, 'out_voa': None, 'tilt_target': 0.0,
... 'f_min': 191.3, 'f_max': 196.1}}, {'type_variety': 'multiband_booster_LOW_L',
... 'operational': {'gain_target': 12.05, 'delta_p': 4.19, 'out_voa': None, 'tilt_target': 0.0,
... 'f_min': 186.1, 'f_max': 190.9}}]}
>>> convert_none_to_empty(a)
{'uid': '[east edfa in Lannion', 'type_variety': 'multiband_booster', \
'metadata': {'location': {'latitude': 0.0, 'longitude': 0.0, 'city': 'Zion', 'region': ''}}, \
'type': 'Multiband_amplifier', 'amplifiers': [{'type_variety': 'multiband_booster_LOW_C', \
'operational': {'gain_target': 12.22, 'delta_p': 4.19, 'out_voa': [None], 'tilt_target': 0.0, \
'f_min': 191.3, 'f_max': 196.1}}, {'type_variety': 'multiband_booster_LOW_L', \
'operational': {'gain_target': 12.05, 'delta_p': 4.19, 'out_voa': [None], 'tilt_target': 0.0, \
'f_min': 186.1, 'f_max': 190.9}}]}
"""
if json_data == [None]:
# already conformed
return json_data
if isinstance(json_data, dict):
for key, value in json_data.items():
json_data[key] = convert_none_to_empty(value)
elif isinstance(json_data, list):
for i, elem in enumerate(json_data):
json_data[i] = convert_none_to_empty(elem)
elif json_data is None:
return [None]
return json_data
def calculate_absolute_min_or_zero(x: array) -> array:
"""Calculates the element-wise absolute minimum between the x and zero.
Parameters:
x (array): The first input array.
Returns:
array: The element-wise absolute minimum between x and zero.
Example:
>>> x = array([-1, 2, -3])
>>> calculate_absolute_min_or_zero(x)
array([1., 0., 3.])
"""
return (abs(x) - x) / 2
def nice_column_str(data: List[List[str]], max_length: int = 30, padding: int = 1) -> str:
"""data is a list of rows, creates strings with nice alignment per colum and padding with spaces
letf justified
>>> table_data = [['aaa', 'b', 'c'], ['aaaaaaaa', 'bbb', 'c'], ['a', 'bbbbbbbbbb', 'c']]
>>> print(nice_column_str(table_data))
aaa b c
aaaaaaaa bbb c
a bbbbbbbbbb c
"""
# transpose data to determine size of columns
transposed_data = list(map(list, zip(*data)))
column_width = [max(len(word) for word in column) + padding for column in transposed_data]
nice_str = []
for row in data:
column = ''.join(word[0:max_length].ljust(min(width, max_length)) for width, word in zip(column_width, row))
nice_str.append(f'{column}')
return '\n'.join(nice_str)
def filter_valid_amp_bands(amp_bands: List[List[dict]]) -> List[List[dict]]:
"""Filter out invalid amplifier bands that lack f_min or f_max.
:param amp_bands: A list of lists containing amplifier band dictionaries.
:type amp_bands: List[List[dict]]
:return: A filtered list of amplifier bands that contain valid f_min and f_max.
:rtype: List[List[dict]]
"""
return [amp for amp in amp_bands if all(band.get('f_min') is not None and band.get('f_max') is not None
for band in amp)]
def remove_duplicates(amp_bands: List[List[dict]]) -> List[List[dict]]:
"""Remove duplicate amplifier bands.
:param amp_bands: A list of lists containing amplifier band dictionaries.
:type amp_bands: List[List[dict]]
:return: A list of unique amplifier bands.
:rtype: List[List[dict]]
"""
unique_amp_bands = []
for amp in amp_bands:
if amp not in unique_amp_bands:
unique_amp_bands.append(amp)
return unique_amp_bands
def calculate_spacing(first: dict, second: dict, default_spacing: float, default_design_bands: Union[List[Dict], None],
f_min: float, f_max: float) -> float:
"""Calculate the spacing for the given frequency range.
:param first: The first amplifier band dictionary.
:type first: dict
:param second: The second amplifier band dictionary.
:type second: dict
:param default_spacing: The default spacing to use if no specific spacing can be determined.
:type default_spacing: float
:param default_design_bands: Optional list of design bands to determine spacing from.
:type default_design_bands: Union[List[Dict], None]
:param f_min: The minimum frequency of the range.
:type f_min: float
:param f_max: The maximum frequency of the range.
:type f_max: float
:return: The calculated spacing for the given frequency range.
:rtype: float
"""
if first.get('spacing') is not None and second.get('spacing') is not None:
return max(first['spacing'], second['spacing'])
elif first.get('spacing') is not None:
return first['spacing']
elif second.get('spacing') is not None:
return second['spacing']
elif default_design_bands:
temp = get_spacing_from_band(default_design_bands, f_min, f_max)
return temp if temp is not None else default_spacing
return default_spacing
def find_common_range(amp_bands: List[List[dict]], default_band_f_min: Union[float, None],
default_band_f_max: Union[float, None], default_spacing: float,
default_design_bands: Union[List[Dict], None] = None) -> List[dict]:
"""
Find the common frequency range of amplifier bands.
If there are no amplifiers in the path, then use the default band parameters.
:param amp_bands: A list of lists containing amplifier band dictionaries, each with 'f_min', 'f_max',
and optionally 'spacing'.
:type amp_bands: List[List[dict]]
:param default_band_f_min: The minimum frequency of the default band.
:type default_band_f_min: Union[float, None]
:param default_band_f_max: The maximum frequency of the default band.
:type default_band_f_max: Union[float, None]
:param default_spacing: The default spacing to use if no specific spacing can be determined.
:type default_spacing: float
:param default_design_bands: Optional list of design bands to determine spacing from.
:type default_design_bands: Union[List[Dict], None]
:return: A list of dictionaries representing the common frequency ranges with their respective spacings.
:rtype: List[dict]
>>> amp_bands = [[{'f_min': 191e12, 'f_max' : 195e12, 'spacing': 70e9}, {'f_min': 186e12, 'f_max' : 190e12}], \
[{'f_min': 185e12, 'f_max' : 189e12}, {'f_min': 192e12, 'f_max' : 196e12}], \
[{'f_min': 186e12, 'f_max': 193e12}]]
>>> find_common_range(amp_bands, 190e12, 195e12, 50e9)
[{'f_min': 186000000000000.0, 'f_max': 189000000000000.0, 'spacing': 50000000000.0}, \
{'f_min': 192000000000000.0, 'f_max': 193000000000000.0, 'spacing': 70000000000.0}]
>>> amp_bands = [[{'f_min': 191e12, 'f_max' : 195e12}, {'f_min': 186e12, 'f_max' : 190e12}], \
[{'f_min': 185e12, 'f_max' : 189e12}, {'f_min': 192e12, 'f_max' : 196e12}], \
[{'f_min': 186e12, 'f_max': 192e12}]]
>>> find_common_range(amp_bands, 190e12, 195e12, 50e9)
[{'f_min': 186000000000000.0, 'f_max': 189000000000000.0, 'spacing': 50000000000.0}]
"""
# Step 1: Filter and sort amplifier bands
_amp_bands = [sorted(amp, key=lambda x: x['f_min']) for amp in filter_valid_amp_bands(amp_bands)]
unique_amp_bands = remove_duplicates(_amp_bands)
# Step 2: Handle cases with no valid bands
if unique_amp_bands:
common_range = unique_amp_bands[0]
else:
if default_band_f_min is None or default_band_f_max is None:
return []
return [{'f_min': default_band_f_min, 'f_max': default_band_f_max, 'spacing': None}]
# Step 3: Calculate common frequency range
for bands in unique_amp_bands:
new_common_range = []
for first in common_range:
for second in bands:
f_min = max(first['f_min'], second['f_min'])
f_max = min(first['f_max'], second['f_max'])
if f_min < f_max:
spacing = calculate_spacing(first, second, default_spacing, default_design_bands, f_min, f_max)
new_common_range.append({'f_min': f_min, 'f_max': f_max, 'spacing': spacing})
common_range = new_common_range
return sorted(common_range, key=lambda x: x['f_min'])
def transform_data(data: str) -> Union[List[int], None]:
"""Transforms a float into an list of one integer or a string separated by "|" into a list of integers.
Args:
data (float or str): The data to transform.
Returns:
list of int: The transformed data as a list of integers.
Examples:
>>> transform_data(5.0)
[5]
>>> transform_data('1 | 2 | 3')
[1, 2, 3]
"""
if isinstance(data, float):
return [int(data)]
if isinstance(data, str):
return [int(x) for x in data.split(' | ')]
return None
def convert_pmd_lineic(pmd: Union[float, None], length: float, length_unit: str) -> Union[float, None]:
"""Convert PMD value of the span in ps into pmd_lineic in s/sqrt(km)
:param pmd: value in ps
:type pmd: Union[float, None]
:param length: value in length_unit
:type length: float
:param length_unit: 'km' or 'm'
:type length_unit: str
:return: lineic PMD s/sqrt(m)
:rtype: Union[float, None]
>>> convert_pmd_lineic(10, 0.001, 'km')
1e-11
"""
if pmd:
return pmd * 1e-12 / sqrt(convert_length(length, length_unit))
return None
def get_spacing_from_band(design_bands: List[Dict], f_min, f_max):
"""Retrieve the spacing for a frequency range based on design bands.
This function checks if the midpoint of the provided frequency range (f_min, f_max)
falls within any of the design bands. If it does, the corresponding spacing is returned.
:param design_bands: A list of design band dictionaries, each containing 'f_min', 'f_max', and 'spacing'.
:type design_bands: List[Dict]
:param f_min: The minimum frequency of the range.
:type f_min: float
:param f_max: The maximum frequency of the range.
:type f_max: float
:return: The spacing corresponding to the design band that contains the midpoint of the range,
or None if no such band exists.
:rtype: Union[float, None]
"""
midpoint = (f_min + f_max) / 2
for band in design_bands:
if midpoint >= band['f_min'] and midpoint <= band['f_max']:
return band['spacing']
return None
def reorder_per_degree_design_bands(per_degree_design_bands: dict):
"""Sort the design bands for each degree by their minimum frequency (f_min).
This function modifies the input dictionary in place, sorting the design bands for each unique identifier.
:param per_degree_design_bands: A dictionary where keys are unique identifiers and values are lists of design band dictionaries.
:type per_degree_design_bands: Dict[str, List[Dict]]
"""
for uid, design_bands in per_degree_design_bands.items():
per_degree_design_bands[uid] = sorted(design_bands, key=lambda x: x['f_min'])

View File

@@ -1,160 +1,160 @@
{
"nf_fit_coeff": [
0.0008,
0.0272,
-0.2249,
6.4902
],
"f_min": 191.4e12,
"f_max": 196.1e12,
"nf_ripple": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"gain_ripple": [
-0.15656302345061,
-0.22244242043552,
-0.25188965661642,
-0.23575900335007,
-0.20897508375209,
-0.19440221943049,
-0.18324644053602,
-0.18053287269681,
-0.17113588777219,
-0.15460322445561,
-0.13550774706866,
-0.10606051088777,
-0.0765630234506,
-0.04962835008375,
-0.01319618927973,
0.01027114740367,
0.03378873534338,
0.04961788107202,
0.04494451423784,
0.0399193886097,
0.01584903685091,
-0.00420121440538,
-0.01847257118928,
-0.02475397822447,
-0.01053287269681,
0.01509526800668,
0.05921587102177,
0.1191656197655,
0.18147717755444,
0.23579878559464,
0.26941687604691,
0.27836159966498,
0.26956762981574,
0.23826109715241,
0.18936662479061,
0.1204721524288,
0.0453465242881,
-0.00877407872698,
-0.02199015912898,
0.00107516750419,
0.02795958961474,
0.02740682579566,
-0.01028161641541,
-0.05982935510889,
-0.06701528475711,
0.00223094639866,
0.14157768006701,
0.15017064489112
],
"dgt": [
1.0,
1.03941448941778,
1.07773189112355,
1.11575888725852,
1.15209185089701,
1.18632744096844,
1.21911100318577,
1.24931318255134,
1.27657903892303,
1.30069883494415,
1.32210817897091,
1.3405812000038,
1.35690844654118,
1.3710092503689,
1.38430337205545,
1.3966294751726,
1.40864903907609,
1.42089447397912,
1.43476940680732,
1.44977369463316,
1.46637521309853,
1.48420288841848,
1.50335352244996,
1.5242627235492,
1.54578500307573,
1.56750088631614,
1.58973304612691,
1.61073904908309,
1.63068023161292,
1.64799163036252,
1.66286684904577,
1.6761448370895,
1.68845480656382,
1.70379790088896,
1.72461030013125,
1.75428006928365,
1.79748596476494,
1.85543800978691,
1.92915262384742,
2.01414465424155,
2.10336369905543,
2.19013043016015,
2.26678136721453,
2.33147727493671,
2.38192717604575,
2.41879254989742,
2.44342862248888,
2.4553191172498
]
"nf_fit_coeff": [
0.0008,
0.0272,
-0.2249,
6.4902
],
"f_min": 191.35e12,
"f_max": 196.1e12,
"nf_ripple": [
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
],
"gain_ripple": [
0.15017064489112,
0.14157768006701,
0.00223094639866,
-0.06701528475711,
-0.05982935510889,
-0.01028161641541,
0.02740682579566,
0.02795958961474,
0.00107516750419,
-0.02199015912898,
-0.00877407872698,
0.0453465242881,
0.1204721524288,
0.18936662479061,
0.23826109715241,
0.26956762981574,
0.27836159966498,
0.26941687604691,
0.23579878559464,
0.18147717755444,
0.1191656197655,
0.05921587102177,
0.01509526800668,
-0.01053287269681,
-0.02475397822447,
-0.01847257118928,
-0.00420121440538,
0.01584903685091,
0.0399193886097,
0.04494451423784,
0.04961788107202,
0.03378873534338,
0.01027114740367,
-0.01319618927973,
-0.04962835008375,
-0.0765630234506,
-0.10606051088777,
-0.13550774706866,
-0.15460322445561,
-0.17113588777219,
-0.18053287269681,
-0.18324644053602,
-0.19440221943049,
-0.20897508375209,
-0.23575900335007,
-0.25188965661642,
-0.22244242043552,
-0.15656302345061
],
"dgt": [
2.4553191172498,
2.44342862248888,
2.41879254989742,
2.38192717604575,
2.33147727493671,
2.26678136721453,
2.19013043016015,
2.10336369905543,
2.01414465424155,
1.92915262384742,
1.85543800978691,
1.79748596476494,
1.75428006928365,
1.72461030013125,
1.70379790088896,
1.68845480656382,
1.6761448370895,
1.66286684904577,
1.64799163036252,
1.63068023161292,
1.61073904908309,
1.58973304612691,
1.56750088631614,
1.54578500307573,
1.5242627235492,
1.50335352244996,
1.48420288841848,
1.46637521309853,
1.44977369463316,
1.43476940680732,
1.42089447397912,
1.40864903907609,
1.3966294751726,
1.38430337205545,
1.3710092503689,
1.35690844654118,
1.3405812000038,
1.32210817897091,
1.30069883494415,
1.27657903892303,
1.24931318255134,
1.21911100318577,
1.18632744096844,
1.15209185089701,
1.11575888725852,
1.07773189112355,
1.03941448941778,
1.0
]
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# Utility functions that creates an Eqpt sheet template
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
create_eqpt_sheet.py
====================

View File

@@ -0,0 +1,106 @@
{
"nf_ripple": [
0.0
],
"gain_ripple": [
0.0
],
"dgt": [
2.714526681131686,
2.705443819238505,
2.6947834587664494,
2.6841217449620203,
2.6681935771243177,
2.6521732021128046,
2.630396440815385,
2.602860350286428,
2.5696460593920065,
2.5364027376452056,
2.499446286796604,
2.4587748041127506,
2.414398437185221,
2.3699990328716107,
2.322373696229342,
2.271520771371253,
2.2174389328192197,
2.16337565384239,
2.1183028432496016,
2.082225099873648,
2.055100772005235,
2.0279625371819305,
2.0008103857988204,
1.9736443063300082,
1.9482128147680253,
1.9245345552113182,
1.9026104247588487,
1.8806927939516411,
1.862235672444246,
1.847275503201129,
1.835814081380705,
1.824381436842932,
1.8139629377087627,
1.8045606557581335,
1.7961751115773796,
1.7877868031023945,
1.7793941781790852,
1.7709972329654864,
1.7625959636196327,
1.7541903672600494,
1.7459181197626403,
1.737780757913635,
1.7297783508684146,
1.7217732861435076,
1.7137640932265894,
1.7057507692361864,
1.6918150918099673,
1.6719047669939942,
1.6460167077689267,
1.6201194134191075,
1.5986915141218316,
1.5817353179379183,
1.569199764184379,
1.5566577309558969,
1.545374152761467,
1.5353620432989845,
1.5266220576235803,
1.5178910621476225,
1.5097346239790443,
1.502153039909686,
1.495145456062699,
1.488134243479226,
1.48111939735681,
1.474100442252211,
1.4670307626366115,
1.4599103316162523,
1.45273959485914,
1.445565137158368,
1.4340878115214444,
1.418273806730323,
1.3981208704326855,
1.3779439775587023,
1.3598972673004606,
1.3439818461440451,
1.3301807335621048,
1.316383926863083,
1.3040618749785347,
1.2932153453410835,
1.2838336236692311,
1.2744470198196236,
1.2650555289898042,
1.2556591482982988,
1.2428104897182262,
1.2264996957264114,
1.2067249615595257,
1.1869318618366975,
1.1672278304018044,
1.1476135933863398,
1.1280891949729075,
1.108555289615659,
1.0895983485572227,
1.0712204022764056,
1.0534217504465226,
1.0356155337864215,
1.017807767853702,
1.0
]
}

View File

@@ -1,81 +1,80 @@
{
"network_name": "EDFA Example Network - P2P",
"elements": [
{
"uid": "Site_A",
"type": "Transceiver",
"metadata": {
"location": {
"city": "Site A",
"region": "",
"latitude": 0,
"longitude": 0
"network_name": "EDFA Example Network - P2P",
"elements": [{
"uid": "Site_A",
"type": "Transceiver",
"metadata": {
"location": {
"city": "Site A",
"region": "",
"latitude": 0,
"longitude": 0
}
}
},
{
"uid": "Span1",
"type": "Fiber",
"type_variety": "SSMF",
"params": {
"length": 80,
"loss_coef": 0.2,
"length_units": "km",
"att_in": 0,
"con_in": 0.5,
"con_out": 0.5
},
"metadata": {
"location": {
"region": "",
"latitude": 1,
"longitude": 0
}
}
},
{
"uid": "Edfa1",
"type": "Edfa",
"type_variety": "std_low_gain",
"operational": {
"gain_target": 17,
"tilt_target": 0,
"out_voa": 0
},
"metadata": {
"location": {
"region": "",
"latitude": 2,
"longitude": 0
}
}
},
{
"uid": "Site_B",
"type": "Transceiver",
"metadata": {
"location": {
"city": "Site B",
"region": "",
"latitude": 2,
"longitude": 0
}
}
}
}
},
{
"uid": "Span1",
"type": "Fiber",
"type_variety": "SSMF",
"params": {
"length": 80,
"loss_coef": 0.2,
"length_units": "km",
"att_in": 0,
"con_in": 0.5,
"con_out": 0.5,
"pmd_coef": 3.0e-15
},
"metadata": {
"location": {
"region": "",
"latitude": 1,
"longitude": 0
],
"connections": [{
"from_node": "Site_A",
"to_node": "Span1"
},
{
"from_node": "Span1",
"to_node": "Edfa1"
},
{
"from_node": "Edfa1",
"to_node": "Site_B"
}
}
},
{
"uid": "Edfa1",
"type": "Edfa",
"type_variety": "std_low_gain",
"operational": {
"gain_target": 17,
"tilt_target": 0,
"out_voa": 0
},
"metadata": {
"location": {
"region": "",
"latitude": 2,
"longitude": 0
}
}
},
{
"uid": "Site_B",
"type": "Transceiver",
"metadata": {
"location": {
"city": "Site B",
"region": "",
"latitude": 2,
"longitude": 0
}
}
}
],
"connections": [
{
"from_node": "Site_A",
"to_node": "Span1"
},
{
"from_node": "Span1",
"to_node": "Edfa1"
},
{
"from_node": "Edfa1",
"to_node": "Site_B"
}
]
]
}

View File

@@ -1,20 +1,18 @@
.. _amp_models:
**********************************
Amplifier models and Configuration
**********************************
*********************************************
Amplifier models and configuration
*********************************************
1. Equipment configuration description
======================================
#######################################
Equipment description defines equipment types and parameters.
It takes place in the equipment library such as **eqpt_config.json** file defined in example-data folder.
It takes place in the default **eqpt_config.json** file.
By default **gnpy-transmission-example** uses **eqpt_config.json** file and that
can be changed with **-e** or **--equipment** command line parameter.
2. Amplifier parameters and subtypes
====================================
#######################################
Several amplifiers can be used by GNpy, so they are defined as an array of equipment parameters in **eqpt_config.json** file.
@@ -30,16 +28,9 @@ Several amplifiers can be used by GNpy, so they are defined as an array of equip
- *"variable_gain"*
- *"fixed_gain"*
- *"dual_stage"*
- *"multi_band"*
- *"openroadm"*
*see next section for a full description of these models*
- *"default_config_from_json"*:
Use a custom per frequency dynamic gain tilt, gain and noise ripple arrays defined in the file specified with
this option, instead of the default values from GNPy.
- *"advanced_config_from_json"*:
**This parameter is only applicable to the _"advanced_model"_ model**
@@ -144,7 +135,7 @@ Several amplifiers can be used by GNpy, so they are defined as an array of equip
3. Amplifier models
===================
#######################################
In an opensource and multi-vendor environnement, it is needed to support different use cases and context. Therefore several models are supported for amplifiers.
@@ -188,7 +179,7 @@ In an opensource and multi-vendor environnement, it is needed to support differe
- *"variable_gain"*
This model is refered as an operator model because a lower level of knowledge is required. A full polynomial description of the NF cross the gain range is not required. Instead, NF_min and NF_max values are required and used by the code to model a dual stage amplifier with an internal mid stage VOA. NF_min and NF_max values are typically available from equipment suppliers data-sheet.
There is a default configuration to enforce 0 tilt and ripple values because GNPy core algorithm is a multi-carrier propagation.
There is a default JSON file ”default_edfa_config.json”* to enforce 0 tilt and ripple values because GNpy core algorithm is a multi-carrier propogation.
- gain_ripple =[0,...,0]
- nf_ripple = [0,...,0]
- dgt = [...] generic dgt comb
@@ -236,7 +227,7 @@ In an opensource and multi-vendor environnement, it is needed to support differe
.. code-block:: json-object
"Edfa":[{
"type_variety": "openroadm_ila_low_noise",
"type_variety": "low_noise",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 12,
@@ -259,7 +250,7 @@ In an opensource and multi-vendor environnement, it is needed to support differe
- gain_min indicates to auto_design when this dual_stage should be used
But unlike other models the 1st stage input will not be padded: it is always operated to its maximum gain and min NF. Therefore if gain adaptation and padding is needed it will be performed by the 2nd stage.
But unlike other models the 1st stage input will not be padded: it is always operated to its maximu gain and min NF. Therefore if gain adaptation and padding is needed it will be performed by the 2nd stage.
.. code-block:: json
@@ -272,18 +263,8 @@ In an opensource and multi-vendor environnement, it is needed to support differe
"allowed_for_design": true
}
- *"multiband"*
This model enables the definition of multiband amplifiers that consist of multiple single-band
amplifier elements, with each amplifier responsible for amplifying a different portion of the spectrum.
The types of single-band amplifiers that can be included in these multiband amplifiers are specified,
allowing for multiple options to be available for the same spectrum band (for instance, providing
several permitted type varieties for both the C-band and the L-band). The actual element utilizing the
type_variety must implement only one option for each band.
4. advanced_config_from_json
============================
#######################################
The build_oa_json.py library in ``gnpy/example-data/edfa_model/`` can be used to build the json file required for the amplifier advanced_model type_def:
@@ -316,3 +297,4 @@ the json input file should have the following fields:
"gain_ripple": "DFG_filename.txt",
"dgt": "DGT_filename.txt"
}

View File

@@ -1,11 +1,5 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# update an existing json file with all the 96ch txt files for a given amplifier type
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
Created on Tue Jan 30 12:32:00 2018

View File

@@ -1,444 +1,312 @@
{
"Edfa": [
{
"type_variety": "high_detail_model_example",
"type_def": "advanced_model",
"gain_flatmax": 25,
"gain_min": 15,
"p_max": 21,
"advanced_config_from_json": "std_medium_gain_advanced_config.json",
"out_voa_auto": false,
"allowed_for_design": false
},
{
"type_variety": "Juniper_BoosterHG",
"type_def": "advanced_model",
"gain_flatmax": 25,
"gain_min": 10,
"p_max": 21,
"advanced_config_from_json": "Juniper-BoosterHG.json",
"out_voa_auto": false,
"allowed_for_design": false
},
{
"type_variety": "operator_model_example",
"type_def": "variable_gain",
"gain_flatmax": 26,
"gain_min": 15,
"p_max": 23,
"nf_min": 6,
"nf_max": 10,
"out_voa_auto": false,
"allowed_for_design": false
},
{
"type_variety": "openroadm_ila_low_noise",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"nf_coef": [
-8.104e-4,
-6.221e-2,
-5.889e-1,
37.62
],
"allowed_for_design": false
},
{
"type_variety": "openroadm_ila_standard",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"nf_coef": [
-5.952e-4,
-6.250e-2,
-1.071,
28.99
],
"allowed_for_design": false
},
{
"type_variety": "openroadm_mw_mw_preamp",
"type_def": "openroadm_preamp",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"allowed_for_design": false
},
{
"type_variety": "openroadm_mw_mw_preamp_typical_ver5",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"nf_coef": [
-5.952e-4,
-6.250e-2,
-1.071,
28.99
],
"allowed_for_design": false
},
{
"type_variety": "openroadm_mw_mw_preamp_worstcase_ver5",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"nf_coef": [
-5.952e-4,
-6.250e-2,
-1.071,
27.99
],
"allowed_for_design": false
},
{
"type_variety": "openroadm_mw_mw_booster",
"type_def": "openroadm_booster",
"gain_flatmax": 32,
"gain_min": 0,
"p_max": 22,
"allowed_for_design": false
},
{
"type_variety": "std_high_gain",
"type_def": "variable_gain",
"gain_flatmax": 35,
"gain_min": 25,
"p_max": 21,
"nf_min": 5.5,
"nf_max": 7,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_medium_gain",
"type_def": "variable_gain",
"gain_flatmax": 26,
"gain_min": 15,
"p_max": 23,
"nf_min": 6,
"nf_max": 10,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_low_gain",
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 23,
"nf_min": 6.5,
"nf_max": 11,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "high_power",
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 25,
"nf_min": 9,
"nf_max": 15,
"out_voa_auto": false,
"allowed_for_design": false
},
{
"type_variety": "std_fixed_gain",
"type_def": "fixed_gain",
"gain_flatmax": 21,
"gain_min": 20,
"p_max": 21,
"nf0": 5.5,
"allowed_for_design": false
},
{
"type_variety": "4pumps_raman",
"type_def": "fixed_gain",
"gain_flatmax": 12,
"gain_min": 12,
"p_max": 21,
"nf0": -1,
"allowed_for_design": false
},
{
"type_variety": "hybrid_4pumps_lowgain",
"type_def": "dual_stage",
"raman": true,
"gain_min": 25,
"preamp_variety": "4pumps_raman",
"booster_variety": "std_low_gain",
"allowed_for_design": true
},
{
"type_variety": "hybrid_4pumps_mediumgain",
"type_def": "dual_stage",
"raman": true,
"gain_min": 25,
"preamp_variety": "4pumps_raman",
"booster_variety": "std_medium_gain",
"allowed_for_design": true
},
{
"type_variety": "medium+low_gain",
"type_def": "dual_stage",
"gain_min": 25,
"preamp_variety": "std_medium_gain",
"booster_variety": "std_low_gain",
"allowed_for_design": true
},
{
"type_variety": "medium+high_power",
"type_def": "dual_stage",
"gain_min": 25,
"preamp_variety": "std_medium_gain",
"booster_variety": "high_power",
"allowed_for_design": false
}
],
"Fiber": [
{
"type_variety": "SSMF",
"dispersion": 1.67e-05,
"effective_area": 83e-12,
"pmd_coef": 1.265e-15
},
{
"type_variety": "NZDF",
"dispersion": 0.5e-05,
"effective_area": 72e-12,
"pmd_coef": 1.265e-15
},
{
"type_variety": "LOF",
"dispersion": 2.2e-05,
"effective_area": 125e-12,
"pmd_coef": 1.265e-15
}
],
"RamanFiber": [
{
"type_variety": "SSMF",
"dispersion": 1.67e-05,
"effective_area": 83e-12,
"pmd_coef": 1.265e-15
}
],
"Span": [
{
"power_mode": true,
"delta_power_range_db": [
-2,
3,
0.5
],
"max_fiber_lineic_loss_for_raman": 0.25,
"target_extended_gain": 2.5,
"max_length": 150,
"length_units": "km",
"max_loss": 28,
"padding": 10,
"EOL": 0,
"con_in": 0,
"con_out": 0
}
],
"Roadm": [
{
"target_pch_out_db": -20,
"add_drop_osnr": 38,
"pmd": 0,
"pdl": 0,
"restrictions": {
"preamp_variety_list": [],
"booster_variety_list": []
}
},
{
"type_variety": "roadm_type_1",
"target_pch_out_db": -18,
"add_drop_osnr": 35,
"pmd": 0,
"pdl": 0,
"restrictions": {
"preamp_variety_list": [],
"booster_variety_list": []
},
"roadm-path-impairments": []
},
{
"type_variety": "detailed_impairments",
"target_pch_out_db": -20,
"add_drop_osnr": 38,
"pmd": 0,
"pdl": 0,
"restrictions": {
"preamp_variety_list": [],
"booster_variety_list": []
},
"roadm-path-impairments": [
{
"roadm-path-impairments-id": 0,
"roadm-express-path": [
{ "Edfa":[{
"type_variety": "high_detail_model_example",
"type_def": "advanced_model",
"gain_flatmax": 25,
"gain_min": 15,
"p_max": 21,
"advanced_config_from_json": "std_medium_gain_advanced_config.json",
"out_voa_auto": false,
"allowed_for_design": false
}, {
"type_variety": "Juniper_BoosterHG",
"type_def": "advanced_model",
"gain_flatmax": 25,
"gain_min": 10,
"p_max": 21,
"advanced_config_from_json": "Juniper-BoosterHG.json",
"out_voa_auto": false,
"allowed_for_design": false
},
{
"frequency-range": {
"lower-frequency": 191.3e12,
"upper-frequency": 196.1e12
},
"roadm-pmd": 0,
"roadm-cd": 0,
"roadm-pdl": 0,
"roadm-inband-crosstalk": 0,
"roadm-maxloss": 16.5
}
]
},
{
"roadm-path-impairments-id": 1,
"roadm-add-path": [
"type_variety": "operator_model_example",
"type_def": "variable_gain",
"gain_flatmax": 26,
"gain_min": 15,
"p_max": 23,
"nf_min": 6,
"nf_max": 10,
"out_voa_auto": false,
"allowed_for_design": false
},
{
"frequency-range": {
"lower-frequency": 191.3e12,
"upper-frequency": 196.1e12
},
"roadm-pmd": 0,
"roadm-cd": 0,
"roadm-pdl": 0,
"roadm-inband-crosstalk": 0,
"roadm-maxloss": 11.5,
"roadm-pmax": 2.5,
"roadm-osnr": 41,
"roadm-noise-figure": 23
}
]
},
{
"roadm-path-impairments-id": 2,
"roadm-drop-path": [
"type_variety": "low_noise",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 12,
"p_max": 22,
"nf_coef": [-8.104e-4,-6.221e-2,-5.889e-1,37.62],
"allowed_for_design": false
},
{
"frequency-range": {
"lower-frequency": 191.3e12,
"upper-frequency": 196.1e12
},
"roadm-pmd": 0,
"roadm-cd": 0,
"roadm-pdl": 0,
"roadm-inband-crosstalk": 0,
"roadm-maxloss": 11.5,
"roadm-minloss": 7.5,
"roadm-typloss": 10,
"roadm-pmin": -13.5,
"roadm-pmax": -9.5,
"roadm-ptyp": -12,
"roadm-osnr": 41,
"roadm-noise-figure": 15
}
]
}
]
}
],
"SI": [
{
"f_min": 191.3e12,
"baud_rate": 32e9,
"f_max": 195.1e12,
"spacing": 50e9,
"power_dbm": 0,
"power_range_db": [
0,
0,
1
"type_variety": "standard",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 12,
"p_max": 22,
"nf_coef": [-5.952e-4,-6.250e-2,-1.071,28.99],
"allowed_for_design": false
},
{
"type_variety": "std_high_gain",
"type_def": "variable_gain",
"gain_flatmax": 35,
"gain_min": 25,
"p_max": 21,
"nf_min": 5.5,
"nf_max": 7,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_medium_gain",
"type_def": "variable_gain",
"gain_flatmax": 26,
"gain_min": 15,
"p_max": 23,
"nf_min": 6,
"nf_max": 10,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_low_gain",
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 23,
"nf_min": 6.5,
"nf_max": 11,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "high_power",
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 25,
"nf_min": 9,
"nf_max": 15,
"out_voa_auto": false,
"allowed_for_design": false
},
{
"type_variety": "std_fixed_gain",
"type_def": "fixed_gain",
"gain_flatmax": 21,
"gain_min": 20,
"p_max": 21,
"nf0": 5.5,
"allowed_for_design": false
},
{
"type_variety": "4pumps_raman",
"type_def": "fixed_gain",
"gain_flatmax": 12,
"gain_min": 12,
"p_max": 21,
"nf0": -1,
"allowed_for_design": false
},
{
"type_variety": "hybrid_4pumps_lowgain",
"type_def": "dual_stage",
"raman": true,
"gain_min": 25,
"preamp_variety": "4pumps_raman",
"booster_variety": "std_low_gain",
"allowed_for_design": true
},
{
"type_variety": "hybrid_4pumps_mediumgain",
"type_def": "dual_stage",
"raman": true,
"gain_min": 25,
"preamp_variety": "4pumps_raman",
"booster_variety": "std_medium_gain",
"allowed_for_design": true
},
{
"type_variety": "medium+low_gain",
"type_def": "dual_stage",
"gain_min": 25,
"preamp_variety": "std_medium_gain",
"booster_variety": "std_low_gain",
"allowed_for_design": true
},
{
"type_variety": "medium+high_power",
"type_def": "dual_stage",
"gain_min": 25,
"preamp_variety": "std_medium_gain",
"booster_variety": "high_power",
"allowed_for_design": false
}
],
"tx_power_dbm": 0,
"roll_off": 0.15,
"tx_osnr": 40,
"sys_margins": 2,
"use_si_channel_count_for_design": true
}
],
"Transceiver": [
{
"type_variety": "vendorA_trx-type1",
"frequency": {
"min": 191.35e12,
"max": 196.1e12
},
"mode": [
{
"format": "mode 1",
"baud_rate": 32e9,
"OSNR": 11,
"bit_rate": 100e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 37.5e9,
"cost": 1
},
{
"format": "mode 2",
"baud_rate": 66e9,
"OSNR": 15,
"bit_rate": 200e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 75e9,
"cost": 1
}
"Fiber":[{
"type_variety": "SSMF",
"dispersion": 1.67e-05,
"gamma": 0.00127,
"pmd_coef": 1.265e-15
},
{
"type_variety": "NZDF",
"dispersion": 0.5e-05,
"gamma": 0.00146,
"pmd_coef": 1.265e-15
},
{
"type_variety": "LOF",
"dispersion": 2.2e-05,
"gamma": 0.000843,
"pmd_coef": 1.265e-15
}
],
"RamanFiber":[{
"type_variety": "SSMF",
"dispersion": 1.67e-05,
"gamma": 0.00127,
"pmd_coef": 1.265e-15,
"raman_efficiency": {
"cr":[
0, 9.4E-06, 2.92E-05, 4.88E-05, 6.82E-05, 8.31E-05, 9.4E-05, 0.0001014, 0.0001069, 0.0001119,
0.0001217, 0.0001268, 0.0001365, 0.000149, 0.000165, 0.000181, 0.0001977, 0.0002192, 0.0002469,
0.0002749, 0.0002999, 0.0003206, 0.0003405, 0.0003592, 0.000374, 0.0003826, 0.0003841, 0.0003826,
0.0003802, 0.0003756, 0.0003549, 0.0003795, 0.000344, 0.0002933, 0.0002024, 0.0001158, 8.46E-05,
7.14E-05, 6.86E-05, 8.5E-05, 8.93E-05, 9.01E-05, 8.15E-05, 6.67E-05, 4.37E-05, 3.28E-05, 2.96E-05,
2.65E-05, 2.57E-05, 2.81E-05, 3.08E-05, 3.67E-05, 5.85E-05, 6.63E-05, 6.36E-05, 5.5E-05, 4.06E-05,
2.77E-05, 2.42E-05, 1.87E-05, 1.6E-05, 1.4E-05, 1.13E-05, 1.05E-05, 9.8E-06, 9.8E-06, 1.13E-05,
1.64E-05, 1.95E-05, 2.38E-05, 2.26E-05, 2.03E-05, 1.48E-05, 1.09E-05, 9.8E-06, 1.05E-05, 1.17E-05,
1.25E-05, 1.21E-05, 1.09E-05, 9.8E-06, 8.2E-06, 6.6E-06, 4.7E-06, 2.7E-06, 1.9E-06, 1.2E-06, 4E-07,
2E-07, 1E-07
],
"frequency_offset":[
0, 0.5e12, 1e12, 1.5e12, 2e12, 2.5e12, 3e12, 3.5e12, 4e12, 4.5e12, 5e12, 5.5e12, 6e12, 6.5e12, 7e12,
7.5e12, 8e12, 8.5e12, 9e12, 9.5e12, 10e12, 10.5e12, 11e12, 11.5e12, 12e12, 12.5e12, 12.75e12,
13e12, 13.25e12, 13.5e12, 14e12, 14.5e12, 14.75e12, 15e12, 15.5e12, 16e12, 16.5e12, 17e12,
17.5e12, 18e12, 18.25e12, 18.5e12, 18.75e12, 19e12, 19.5e12, 20e12, 20.5e12, 21e12, 21.5e12,
22e12, 22.5e12, 23e12, 23.5e12, 24e12, 24.5e12, 25e12, 25.5e12, 26e12, 26.5e12, 27e12, 27.5e12, 28e12,
28.5e12, 29e12, 29.5e12, 30e12, 30.5e12, 31e12, 31.5e12, 32e12, 32.5e12, 33e12, 33.5e12, 34e12, 34.5e12,
35e12, 35.5e12, 36e12, 36.5e12, 37e12, 37.5e12, 38e12, 38.5e12, 39e12, 39.5e12, 40e12, 40.5e12, 41e12,
41.5e12, 42e12
]
}
}
],
"Span":[{
"power_mode":true,
"delta_power_range_db": [-2,3,0.5],
"max_fiber_lineic_loss_for_raman": 0.25,
"target_extended_gain": 2.5,
"max_length": 150,
"length_units": "km",
"max_loss": 28,
"padding": 10,
"EOL": 0,
"con_in": 0,
"con_out": 0
}
],
"Roadm":[{
"target_pch_out_db": -20,
"add_drop_osnr": 38,
"pmd": 0,
"restrictions": {
"preamp_variety_list":[],
"booster_variety_list":[]
}
}],
"SI":[{
"f_min": 191.3e12,
"baud_rate": 32e9,
"f_max":195.1e12,
"spacing": 50e9,
"power_dbm": 0,
"power_range_db": [0,0,1],
"roll_off": 0.15,
"tx_osnr": 40,
"sys_margins": 2
}],
"Transceiver":[
{
"type_variety": "vendorA_trx-type1",
"frequency":{
"min": 191.35e12,
"max": 196.1e12
},
"mode":[
{
"format": "mode 1",
"baud_rate": 32e9,
"OSNR": 11,
"bit_rate": 100e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 37.5e9,
"cost":1
},
{
"format": "mode 2",
"baud_rate": 66e9,
"OSNR": 15,
"bit_rate": 200e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 75e9,
"cost":1
}
]
},
{
"type_variety": "Voyager",
"frequency":{
"min": 191.35e12,
"max": 196.1e12
},
"mode":[
{
"format": "mode 1",
"baud_rate": 32e9,
"OSNR": 12,
"bit_rate": 100e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 37.5e9,
"cost":1
},
{
"format": "mode 3",
"baud_rate": 44e9,
"OSNR": 18,
"bit_rate": 300e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 62.5e9,
"cost":1
},
{
"format": "mode 2",
"baud_rate": 66e9,
"OSNR": 21,
"bit_rate": 400e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 75e9,
"cost":1
},
{
"format": "mode 4",
"baud_rate": 66e9,
"OSNR": 16,
"bit_rate": 200e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 75e9,
"cost":1
}
]
}
]
},
{
"type_variety": "Voyager",
"frequency": {
"min": 191.35e12,
"max": 196.1e12
},
"mode": [
{
"format": "mode 1",
"baud_rate": 32e9,
"OSNR": 12,
"bit_rate": 100e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 37.5e9,
"cost": 1
},
{
"format": "mode 3",
"baud_rate": 44e9,
"OSNR": 18,
"bit_rate": 300e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 62.5e9,
"cost": 1
},
{
"format": "mode 2",
"baud_rate": 66e9,
"OSNR": 21,
"bit_rate": 400e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 75e9,
"cost": 1
},
{
"format": "mode 4",
"baud_rate": 66e9,
"OSNR": 16,
"bit_rate": 200e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 75e9,
"cost": 1
}
]
}
]
}

View File

@@ -1,479 +0,0 @@
{
"Edfa": [
{
"type_variety": "std_high_gain",
"type_def": "variable_gain",
"gain_flatmax": 35,
"gain_min": 25,
"p_max": 21,
"nf_min": 5.5,
"nf_max": 7,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_medium_gain",
"type_def": "variable_gain",
"gain_flatmax": 26,
"gain_min": 15,
"p_max": 23,
"nf_min": 6,
"nf_max": 10,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_low_gain_reduced",
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 23,
"nf_min": 6.5,
"nf_max": 11,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "high_power",
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 25,
"nf_min": 9,
"nf_max": 15,
"out_voa_auto": false,
"allowed_for_design": false
},
{
"type_variety": "std_fixed_gain",
"type_def": "fixed_gain",
"gain_flatmax": 21,
"gain_min": 20,
"p_max": 21,
"nf0": 5.5,
"allowed_for_design": false
},
{
"type_variety": "4pumps_raman",
"type_def": "fixed_gain",
"gain_flatmax": 12,
"gain_min": 12,
"p_max": 21,
"nf0": -1,
"allowed_for_design": false
},
{
"type_variety": "hybrid_4pumps_lowgain",
"type_def": "dual_stage",
"raman": true,
"gain_min": 25,
"preamp_variety": "4pumps_raman",
"booster_variety": "std_low_gain",
"allowed_for_design": true
},
{
"type_variety": "hybrid_4pumps_mediumgain",
"type_def": "dual_stage",
"raman": true,
"gain_min": 25,
"preamp_variety": "4pumps_raman",
"booster_variety": "std_medium_gain",
"allowed_for_design": true
},
{
"type_variety": "medium+low_gain",
"type_def": "dual_stage",
"gain_min": 25,
"preamp_variety": "std_medium_gain",
"booster_variety": "std_low_gain",
"allowed_for_design": true
},
{
"type_variety": "medium+high_power",
"type_def": "dual_stage",
"gain_min": 25,
"preamp_variety": "std_medium_gain",
"booster_variety": "high_power",
"allowed_for_design": false
},
{
"type_variety": "std_medium_gain_C",
"f_min": 191.225e12,
"f_max": 196.125e12,
"type_def": "variable_gain",
"gain_flatmax": 26,
"gain_min": 15,
"p_max": 21,
"nf_min": 6,
"nf_max": 10,
"out_voa_auto": false,
"allowed_for_design": false
},
{
"type_variety": "std_medium_gain_L",
"f_min": 186.5e12,
"f_max": 190.1e12,
"type_def": "variable_gain",
"gain_flatmax": 26,
"gain_min": 15,
"p_max": 21,
"nf_min": 6,
"nf_max": 10,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_low_gain",
"f_min": 191.25e12,
"f_max": 196.15e12,
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 21,
"nf_min": 7,
"nf_max": 11,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_low_gain_reduced_band",
"f_min": 192.25e12,
"f_max": 196.15e12,
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 21,
"nf_min": 7,
"nf_max": 11,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_low_gain_bis",
"f_min": 191.25e12,
"f_max": 196.15e12,
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 21,
"nf_min": 6,
"nf_max": 10,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_low_gain_L_ter",
"f_min": 186.55e12,
"f_max": 190.05e12,
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 16,
"nf_min": 7,
"nf_max": 11,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_low_gain_L",
"f_min": 186.55e12,
"f_max": 190.05e12,
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 21,
"nf_min": 7,
"nf_max": 11,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "std_low_gain_L_reduced_band",
"f_min": 187.3e12,
"f_max": 190.05e12,
"type_def": "variable_gain",
"gain_flatmax": 16,
"gain_min": 8,
"p_max": 21,
"nf_min": 7,
"nf_max": 11,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "test",
"type_def": "variable_gain",
"gain_flatmax": 25,
"gain_min": 15,
"p_max": 21,
"nf_min": 5.8,
"nf_max": 10,
"out_voa_auto": false,
"allowed_for_design": true
},
{
"type_variety": "test_fixed_gain",
"type_def": "fixed_gain",
"gain_flatmax": 21,
"gain_min": 20,
"p_max": 21,
"nf0": 5,
"allowed_for_design": true
},
{
"type_variety": "std_booster",
"type_def": "fixed_gain",
"gain_flatmax": 21,
"gain_min": 20,
"p_max": 21,
"nf0": 5,
"allowed_for_design": false
},
{
"type_variety": "std_booster_L",
"f_min": 186.55e12,
"f_max": 190.05e12,
"type_def": "fixed_gain",
"gain_flatmax": 21,
"gain_min": 20,
"p_max": 21,
"nf0": 5,
"allowed_for_design": false
},
{
"type_variety": "std_booster_multiband",
"type_def": "multi_band",
"amplifiers": [
"std_booster",
"std_booster_L"
],
"allowed_for_design": false
},
{
"type_variety": "std_medium_gain_multiband",
"type_def": "multi_band",
"amplifiers": [
"std_medium_gain_C",
"std_medium_gain_L"
],
"allowed_for_design": false
},
{
"type_variety": "std_low_gain_multiband",
"type_def": "multi_band",
"amplifiers": [
"std_low_gain",
"std_low_gain_L"
],
"allowed_for_design": false
},
{
"type_variety": "std_low_gain_multiband_ter",
"type_def": "multi_band",
"amplifiers": [
"std_low_gain",
"std_low_gain_L_ter"
],
"allowed_for_design": false
},
{
"type_variety": "std_low_gain_multiband_bis",
"type_def": "multi_band",
"amplifiers": [
"std_low_gain_bis",
"std_low_gain_L"
],
"allowed_for_design": true
},
{
"type_variety": "std_low_gain_multiband_reduced",
"type_def": "multi_band",
"amplifiers": [
"std_low_gain_reduced",
"std_low_gain_L"
],
"allowed_for_design": true
},
{
"type_variety": "std_low_gain_multiband_reduced_bis",
"type_def": "multi_band",
"amplifiers": [
"std_low_gain_bis",
"std_low_gain_L_reduced_band"
],
"allowed_for_design": true
}
],
"Fiber": [
{
"type_variety": "SSMF",
"dispersion": 1.67e-05,
"effective_area": 83e-12,
"pmd_coef": 1.265e-15
},
{
"type_variety": "NZDF",
"dispersion": 0.5e-05,
"effective_area": 72e-12,
"pmd_coef": 1.265e-15
},
{
"type_variety": "LOF",
"dispersion": 2.2e-05,
"effective_area": 125e-12,
"pmd_coef": 1.265e-15
}
],
"RamanFiber": [
{
"type_variety": "SSMF",
"dispersion": 1.67e-05,
"effective_area": 83e-12,
"pmd_coef": 1.265e-15
}
],
"Span": [
{
"power_mode": true,
"delta_power_range_db": [
-2,
3,
0.5
],
"max_fiber_lineic_loss_for_raman": 0.25,
"target_extended_gain": 2.5,
"max_length": 150,
"length_units": "km",
"max_loss": 28,
"padding": 10,
"EOL": 0,
"con_in": 0,
"con_out": 0
}
],
"Roadm": [
{
"target_pch_out_db": -20,
"add_drop_osnr": 38,
"pmd": 0,
"pdl": 0,
"restrictions": {
"preamp_variety_list": [],
"booster_variety_list": []
}
}
],
"SI": [
{
"f_min": 191.3e12,
"baud_rate": 32e9,
"f_max": 195.1e12,
"spacing": 50e9,
"power_dbm": 0,
"power_range_db": [
0,
0,
1
],
"roll_off": 0.15,
"tx_osnr": 40,
"sys_margins": 2
},
{
"type_variety": "lband",
"f_min": 186.3e12,
"baud_rate": 32e9,
"f_max": 190.1e12,
"spacing": 50e9,
"power_dbm": 0,
"power_range_db": [
0,
0,
1
],
"roll_off": 0.15,
"tx_osnr": 40,
"sys_margins": 2
}
],
"Transceiver": [
{
"type_variety": "vendorA_trx-type1",
"frequency": {
"min": 191.35e12,
"max": 196.1e12
},
"mode": [
{
"format": "mode 1",
"baud_rate": 32e9,
"OSNR": 11,
"bit_rate": 100e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 37.5e9,
"cost": 1
},
{
"format": "mode 2",
"baud_rate": 66e9,
"OSNR": 15,
"bit_rate": 200e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 75e9,
"cost": 1
}
]
},
{
"type_variety": "Voyager",
"frequency": {
"min": 191.35e12,
"max": 196.1e12
},
"mode": [
{
"format": "mode 1",
"baud_rate": 32e9,
"OSNR": 12,
"bit_rate": 100e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 37.5e9,
"cost": 1
},
{
"format": "mode 3",
"baud_rate": 44e9,
"OSNR": 18,
"bit_rate": 300e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 62.5e9,
"cost": 1
},
{
"format": "mode 2",
"baud_rate": 66e9,
"OSNR": 21,
"bit_rate": 400e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 75e9,
"cost": 1
},
{
"format": "mode 4",
"baud_rate": 66e9,
"OSNR": 16,
"bit_rate": 200e9,
"roll_off": 0.15,
"tx_osnr": 40,
"min_spacing": 75e9,
"cost": 1
}
]
}
]
}

View File

@@ -1,371 +0,0 @@
{
"Edfa": [
{
"type_variety": "openroadm_ila_low_noise",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"nf_coef": [
-8.104e-4,
-6.221e-2,
-5.889e-1,
37.62
],
"pmd": 3e-12,
"pdl": 0.7,
"allowed_for_design": true
},
{
"type_variety": "openroadm_ila_standard",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"nf_coef": [
-5.952e-4,
-6.250e-2,
-1.071,
28.99
],
"pmd": 3e-12,
"pdl": 0.7,
"allowed_for_design": true
},
{
"type_variety": "openroadm_mw_mw_preamp",
"type_def": "openroadm_preamp",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"pmd": 0,
"pdl": 0,
"allowed_for_design": false
},
{
"type_variety": "openroadm_mw_mw_booster",
"type_def": "openroadm_booster",
"gain_flatmax": 32,
"gain_min": 0,
"p_max": 22,
"pmd": 0,
"pdl": 0,
"allowed_for_design": false
}
],
"Fiber": [
{
"type_variety": "SSMF",
"dispersion": 1.67e-05,
"effective_area": 83e-12,
"pmd_coef": 1.265e-15
},
{
"type_variety": "NZDF",
"dispersion": 0.5e-05,
"effective_area": 72e-12,
"pmd_coef": 1.265e-15
},
{
"type_variety": "LOF",
"dispersion": 2.2e-05,
"effective_area": 125e-12,
"pmd_coef": 1.265e-15
}
],
"RamanFiber": [
{
"type_variety": "SSMF",
"dispersion": 1.67e-05,
"effective_area": 83e-12,
"pmd_coef": 1.265e-15
}
],
"Span": [
{
"power_mode": true,
"delta_power_range_db": [
0,
0,
0
],
"max_fiber_lineic_loss_for_raman": 0.25,
"target_extended_gain": 0,
"max_length": 135,
"length_units": "km",
"max_loss": 28,
"padding": 11,
"EOL": 0,
"con_in": 0,
"con_out": 0
}
],
"Roadm": [
{
"target_pch_out_db": -20,
"add_drop_osnr": 30,
"pmd": 3e-12,
"pdl": 1.5,
"restrictions": {
"preamp_variety_list": [
"openroadm_mw_mw_preamp"
],
"booster_variety_list": [
"openroadm_mw_mw_booster"
]
}
}
],
"SI": [
{
"f_min": 191.3e12,
"baud_rate": 31.57e9,
"f_max": 196.1e12,
"spacing": 50e9,
"power_dbm": 2,
"power_range_db": [
0,
0,
1
],
"roll_off": 0.15,
"tx_osnr": 35,
"sys_margins": 2
}
],
"Transceiver": [
{
"type_variety": "OpenROADM MSA ver. 4.0",
"frequency": {
"min": 191.35e12,
"max": 196.1e12
},
"mode": [
{
"format": "100 Gbit/s, 27.95 Gbaud, DP-QPSK",
"baud_rate": 27.95e9,
"OSNR": 17,
"bit_rate": 100e9,
"roll_off": null,
"tx_osnr": 33,
"penalties": [
{
"chromatic_dispersion": 4e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 18e3,
"penalty_value": 0.5
},
{
"pmd": 10,
"penalty_value": 0
},
{
"pmd": 30,
"penalty_value": 0.5
},
{
"pdl": 1,
"penalty_value": 0.5
},
{
"pdl": 2,
"penalty_value": 1
},
{
"pdl": 4,
"penalty_value": 2.5
},
{
"pdl": 6,
"penalty_value": 4
}
],
"min_spacing": 50e9,
"cost": 1
},
{
"format": "100 Gbit/s, 31.57 Gbaud, DP-QPSK",
"baud_rate": 31.57e9,
"OSNR": 12,
"bit_rate": 100e9,
"roll_off": 0.15,
"tx_osnr": 35,
"penalties": [
{
"chromatic_dispersion": -1e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 4e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 40e3,
"penalty_value": 0.5
},
{
"pmd": 10,
"penalty_value": 0
},
{
"pmd": 30,
"penalty_value": 0.5
},
{
"pdl": 1,
"penalty_value": 0.5
},
{
"pdl": 2,
"penalty_value": 1
},
{
"pdl": 4,
"penalty_value": 2.5
},
{
"pdl": 6,
"penalty_value": 4
}
],
"min_spacing": 50e9,
"cost": 1
},
{
"format": "200 Gbit/s, DP-QPSK",
"baud_rate": 63.1e9,
"OSNR": 17,
"bit_rate": 200e9,
"roll_off": 0.15,
"tx_osnr": 36,
"penalties": [
{
"chromatic_dispersion": -1e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 4e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 24e3,
"penalty_value": 0.5
},
{
"pmd": 10,
"penalty_value": 0
},
{
"pmd": 25,
"penalty_value": 0.5
},
{
"pdl": 1,
"penalty_value": 0.5
},
{
"pdl": 2,
"penalty_value": 1
},
{
"pdl": 4,
"penalty_value": 2.5
}
],
"min_spacing": 87.5e9,
"cost": 1
},
{
"format": "300 Gbit/s, DP-8QAM",
"baud_rate": 63.1e9,
"OSNR": 21,
"bit_rate": 300e9,
"roll_off": 0.15,
"tx_osnr": 36,
"penalties": [
{
"chromatic_dispersion": -1e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 4e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 18e3,
"penalty_value": 0.5
},
{
"pmd": 10,
"penalty_value": 0
},
{
"pmd": 25,
"penalty_value": 0.5
},
{
"pdl": 1,
"penalty_value": 0.5
},
{
"pdl": 2,
"penalty_value": 1
},
{
"pdl": 4,
"penalty_value": 2.5
}
],
"min_spacing": 87.5e9,
"cost": 1
},
{
"format": "400 Gbit/s, DP-16QAM",
"baud_rate": 63.1e9,
"OSNR": 24,
"bit_rate": 400e9,
"roll_off": 0.15,
"tx_osnr": 36,
"penalties": [
{
"chromatic_dispersion": -1e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 4e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 12e3,
"penalty_value": 0.5
},
{
"pmd": 10,
"penalty_value": 0
},
{
"pmd": 20,
"penalty_value": 0.5
},
{
"pdl": 1,
"penalty_value": 0.5
},
{
"pdl": 2,
"penalty_value": 1
},
{
"pdl": 4,
"penalty_value": 2.5
}
],
"min_spacing": 87.5e9,
"cost": 1
}
]
}
]
}

View File

@@ -1,441 +0,0 @@
{
"Edfa": [
{
"type_variety": "openroadm_ila_low_noise",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"nf_coef": [
-8.104e-4,
-6.221e-2,
-5.889e-1,
37.62
],
"pmd": 3e-12,
"pdl": 0.7,
"allowed_for_design": true
},
{
"type_variety": "openroadm_ila_standard",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"nf_coef": [
-5.952e-4,
-6.250e-2,
-1.071,
28.99
],
"pmd": 3e-12,
"pdl": 0.7,
"allowed_for_design": true
},
{
"type_variety": "openroadm_mw_mw_preamp_typical_ver5",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"nf_coef": [
-5.952e-4,
-6.250e-2,
-1.071,
28.99
],
"pmd": 0,
"pdl": 0,
"allowed_for_design": false
},
{
"type_variety": "openroadm_mw_mw_preamp_worstcase_ver5",
"type_def": "openroadm",
"gain_flatmax": 27,
"gain_min": 0,
"p_max": 22,
"nf_coef": [
-5.952e-4,
-6.250e-2,
-1.071,
27.99
],
"pmd": 0,
"pdl": 0,
"allowed_for_design": false
},
{
"type_variety": "openroadm_mw_mw_booster",
"type_def": "openroadm_booster",
"gain_flatmax": 32,
"gain_min": 0,
"p_max": 22,
"pmd": 0,
"pdl": 0,
"allowed_for_design": false
}
],
"Fiber": [
{
"type_variety": "SSMF",
"dispersion": 1.67e-05,
"effective_area": 83e-12,
"pmd_coef": 1.265e-15
},
{
"type_variety": "NZDF",
"dispersion": 0.5e-05,
"effective_area": 72e-12,
"pmd_coef": 1.265e-15
},
{
"type_variety": "LOF",
"dispersion": 2.2e-05,
"effective_area": 125e-12,
"pmd_coef": 1.265e-15
}
],
"RamanFiber": [
{
"type_variety": "SSMF",
"dispersion": 1.67e-05,
"effective_area": 83e-12,
"pmd_coef": 1.265e-15
}
],
"Span": [
{
"power_mode": true,
"delta_power_range_db": [
0,
0,
0
],
"max_fiber_lineic_loss_for_raman": 0.25,
"target_extended_gain": 0,
"max_length": 135,
"length_units": "km",
"max_loss": 28,
"padding": 11,
"EOL": 0,
"con_in": 0,
"con_out": 0
}
],
"Roadm": [
{
"target_pch_out_db": -20,
"add_drop_osnr": 33,
"pmd": 3e-12,
"pdl": 1.5,
"restrictions": {
"preamp_variety_list": [
"openroadm_mw_mw_preamp_worstcase_ver5"
],
"booster_variety_list": [
"openroadm_mw_mw_booster"
]
}
}
],
"SI": [
{
"f_min": 191.3e12,
"baud_rate": 31.57e9,
"f_max": 196.1e12,
"spacing": 50e9,
"power_dbm": 2,
"power_range_db": [
0,
0,
1
],
"roll_off": 0.15,
"tx_osnr": 35,
"sys_margins": 2
}
],
"Transceiver": [
{
"type_variety": "OpenROADM MSA ver. 5.0",
"frequency": {
"min": 191.35e12,
"max": 196.1e12
},
"mode": [
{
"format": "100 Gbit/s, 27.95 Gbaud, DP-QPSK",
"baud_rate": 27.95e9,
"OSNR": 17,
"bit_rate": 100e9,
"roll_off": null,
"tx_osnr": 33,
"penalties": [
{
"chromatic_dispersion": 4e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 18e3,
"penalty_value": 0.5
},
{
"pmd": 10,
"penalty_value": 0
},
{
"pmd": 30,
"penalty_value": 0.5
},
{
"pdl": 1,
"penalty_value": 0.5
},
{
"pdl": 2,
"penalty_value": 1
},
{
"pdl": 4,
"penalty_value": 2.5
},
{
"pdl": 6,
"penalty_value": 4
}
],
"min_spacing": 50e9,
"cost": 1
},
{
"format": "100 Gbit/s, 31.57 Gbaud, DP-QPSK",
"baud_rate": 31.57e9,
"OSNR": 12,
"bit_rate": 100e9,
"roll_off": 0.15,
"tx_osnr": 36,
"penalties": [
{
"chromatic_dispersion": -1e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 4e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 48e3,
"penalty_value": 0.5
},
{
"pmd": 10,
"penalty_value": 0
},
{
"pmd": 30,
"penalty_value": 0.5
},
{
"pdl": 1,
"penalty_value": 0.5
},
{
"pdl": 2,
"penalty_value": 1
},
{
"pdl": 4,
"penalty_value": 2.5
},
{
"pdl": 6,
"penalty_value": 4
}
],
"min_spacing": 50e9,
"cost": 1
},
{
"format": "200 Gbit/s, 31.57 Gbaud, DP-16QAM",
"baud_rate": 31.57e9,
"OSNR": 20.5,
"bit_rate": 100e9,
"roll_off": 0.15,
"tx_osnr": 36,
"penalties": [
{
"chromatic_dispersion": -1e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 4e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 24e3,
"penalty_value": 0.5
},
{
"pmd": 10,
"penalty_value": 0
},
{
"pmd": 30,
"penalty_value": 0.5
},
{
"pdl": 1,
"penalty_value": 0.5
},
{
"pdl": 2,
"penalty_value": 1
},
{
"pdl": 4,
"penalty_value": 2.5
},
{
"pdl": 6,
"penalty_value": 4
}
],
"min_spacing": 50e9,
"cost": 1
},
{
"format": "200 Gbit/s, DP-QPSK",
"baud_rate": 63.1e9,
"OSNR": 17,
"bit_rate": 200e9,
"roll_off": 0.15,
"tx_osnr": 36,
"penalties": [
{
"chromatic_dispersion": -1e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 4e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 24e3,
"penalty_value": 0.5
},
{
"pmd": 10,
"penalty_value": 0
},
{
"pmd": 25,
"penalty_value": 0.5
},
{
"pdl": 1,
"penalty_value": 0.5
},
{
"pdl": 2,
"penalty_value": 1
},
{
"pdl": 4,
"penalty_value": 2.5
}
],
"min_spacing": 87.5e9,
"cost": 1
},
{
"format": "300 Gbit/s, DP-8QAM",
"baud_rate": 63.1e9,
"OSNR": 21,
"bit_rate": 300e9,
"roll_off": 0.15,
"tx_osnr": 36,
"penalties": [
{
"chromatic_dispersion": -1e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 4e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 18e3,
"penalty_value": 0.5
},
{
"pmd": 10,
"penalty_value": 0
},
{
"pmd": 25,
"penalty_value": 0.5
},
{
"pdl": 1,
"penalty_value": 0.5
},
{
"pdl": 2,
"penalty_value": 1
},
{
"pdl": 4,
"penalty_value": 2.5
}
],
"min_spacing": 87.5e9,
"cost": 1
},
{
"format": "400 Gbit/s, DP-16QAM",
"baud_rate": 63.1e9,
"OSNR": 24,
"bit_rate": 400e9,
"roll_off": 0.15,
"tx_osnr": 36,
"penalties": [
{
"chromatic_dispersion": -1e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 4e3,
"penalty_value": 0
},
{
"chromatic_dispersion": 12e3,
"penalty_value": 0.5
},
{
"pmd": 10,
"penalty_value": 0
},
{
"pmd": 20,
"penalty_value": 0.5
},
{
"pdl": 1,
"penalty_value": 0.5
},
{
"pdl": 2,
"penalty_value": 1
},
{
"pdl": 4,
"penalty_value": 2.5
}
],
"min_spacing": 87.5e9,
"cost": 1
}
]
}
]
}

View File

@@ -1,74 +0,0 @@
{
"Edfa": [
{
"type_variety": "user_defined",
"type_def": "variable_gain",
"f_min": 192.0e12,
"f_max": 195.9e12,
"gain_flatmax": 25,
"gain_min": 15,
"p_max": 21,
"nf_min": 6,
"nf_max": 10,
"default_config_from_json": "user_edfa_config.json",
"out_voa_auto": false,
"allowed_for_design": true
}, {
"type_variety": "user_high_detail_model_example",
"type_def": "advanced_model",
"gain_flatmax": 25,
"gain_min": 15,
"p_max": 21,
"advanced_config_from_json": "std_medium_gain_advanced_config.json",
"out_voa_auto": false,
"allowed_for_design": false
}
],
"Transceiver": [
{
"type_variety": "ZR400G",
"frequency": {
"min": 191.3e12,
"max": 196.1e12
},
"mode": [
{
"format": "SFF-ID:70",
"baud_rate": 60138546798,
"OSNR": 24,
"bit_rate": 400e9,
"roll_off": 0.2,
"tx_osnr": 34,
"min_spacing": 75e9,
"penalties": [
{
"chromatic_dispersion": 20e3,
"penalty_value": 0.5
},
{
"chromatic_dispersion": 0,
"penalty_value": 0
},
{
"pmd": 20,
"penalty_value": 0.5
},
{
"pdl": 1.5,
"penalty_value": 0
},
{
"pdl": 3.5,
"penalty_value": 1.8
},
{
"pdl": 3,
"penalty_value": 1.3
}
],
"cost": 1
}
]
}
]
}

View File

@@ -1,12 +0,0 @@
{
"spectrum": [
{
"f_min": 191.35e12,
"f_max": 195.1e12,
"baud_rate": 32e9,
"slot_width": 50e9,
"roll_off": 0.15,
"tx_osnr": 40
}
]
}

View File

@@ -1,23 +0,0 @@
{
"spectrum": [
{
"f_min": 191.4e12,
"f_max": 193.1e12,
"baud_rate": 32e9,
"slot_width": 50e9,
"delta_pdb": 0,
"roll_off": 0.15,
"tx_osnr": 40,
"label": "mode_1"
},
{
"f_min": 193.1625e12,
"f_max": 195e12,
"baud_rate": 64e9,
"slot_width": 75e9,
"roll_off": 0.15,
"tx_osnr": 40,
"label": "mode_2"
}
]
}

View File

@@ -624,70 +624,6 @@
"con_out": null
}
},
{
"uid": "west edfa in Quimper",
"metadata": {
"location": {
"city": "Quimper",
"region": "RLD",
"latitude": 1.0,
"longitude": 1.0
}
},
"type": "Edfa",
"operational": {
"gain_target": null,
"tilt_target": 0
}
},
{
"uid": "west edfa in Ploermel",
"metadata": {
"location": {
"city": "Ploermel",
"region": "RLD",
"latitude": 1.0,
"longitude": 2.0
}
},
"type": "Edfa",
"operational": {
"gain_target": null,
"tilt_target": 0
}
},
{
"uid": "east edfa in Quimper",
"metadata": {
"location": {
"city": "Quimper",
"region": "RLD",
"latitude": 1.0,
"longitude": 1.0
}
},
"type": "Edfa",
"operational": {
"gain_target": null,
"tilt_target": 0
}
},
{
"uid": "east edfa in Ploermel",
"metadata": {
"location": {
"city": "Ploermel",
"region": "RLD",
"latitude": 1.0,
"longitude": 2.0
}
},
"type": "Edfa",
"operational": {
"gain_target": null,
"tilt_target": 0
}
},
{
"uid": "east edfa in Lannion_CAS to Corlay",
"metadata": {
@@ -699,7 +635,7 @@
}
},
"type": "Edfa",
"type_variety": "std_medium_gain",
"type_variety": "std_low_gain",
"operational": {
"gain_target": null,
"delta_p": 1.0,
@@ -707,21 +643,6 @@
"out_voa": null
}
},
{
"uid": "east edfa in Lorient_KMA to Vannes_KBE",
"metadata": {
"location": {
"city": "Lorient_KMA",
"region": "RLD",
"latitude": 2.0,
"longitude": 3.0
}
},
"type": "Fused",
"params": {
"loss": 0
}
},
{
"uid": "east edfa in Lannion_CAS to Stbrieuc",
"metadata": {
@@ -733,7 +654,7 @@
}
},
"type": "Edfa",
"type_variety": "std_medium_gain",
"type_variety": "std_low_gain",
"operational": {
"gain_target": null,
"delta_p": 1.0,
@@ -771,7 +692,7 @@
}
},
"type": "Edfa",
"type_variety": "std_medium_gain",
"type_variety": "std_low_gain",
"operational": {
"gain_target": null,
"delta_p": 1.0,
@@ -790,7 +711,7 @@
}
},
"type": "Edfa",
"type_variety": "std_medium_gain",
"type_variety": "std_low_gain",
"operational": {
"gain_target": null,
"delta_p": 1.0,
@@ -809,7 +730,7 @@
}
},
"type": "Edfa",
"type_variety": "std_medium_gain",
"type_variety": "std_low_gain",
"operational": {
"gain_target": null,
"delta_p": 1.0,
@@ -828,7 +749,7 @@
}
},
"type": "Edfa",
"type_variety": "std_medium_gain",
"type_variety": "std_low_gain",
"operational": {
"gain_target": null,
"delta_p": 1.0,
@@ -847,7 +768,7 @@
}
},
"type": "Edfa",
"type_variety": "std_medium_gain",
"type_variety": "std_low_gain",
"operational": {
"gain_target": null,
"delta_p": 1.0,
@@ -866,7 +787,7 @@
}
},
"type": "Edfa",
"type_variety": "std_high_gain",
"type_variety": "std_low_gain",
"operational": {
"gain_target": null,
"delta_p": 1.0,
@@ -961,7 +882,7 @@
}
},
"type": "Edfa",
"type_variety": "std_high_gain",
"type_variety": "std_low_gain",
"operational": {
"gain_target": null,
"delta_p": 1.0,
@@ -980,7 +901,7 @@
}
},
"type": "Edfa",
"type_variety": "std_medium_gain",
"type_variety": "std_low_gain",
"operational": {
"gain_target": null,
"delta_p": 1.0,
@@ -1025,6 +946,21 @@
"tilt_target": 0,
"out_voa": null
}
},
{
"uid": "east edfa in Lorient_KMA to Vannes_KBE",
"metadata": {
"location": {
"city": "Lorient_KMA",
"region": "RLD",
"latitude": 2.0,
"longitude": 3.0
}
},
"type": "Fused",
"params": {
"loss": 0
}
}
],
"connections": [
@@ -1254,34 +1190,18 @@
},
{
"from_node": "fiber (Brest_KLA → Quimper)-",
"to_node": "west edfa in Quimper"
},
{
"from_node": "west edfa in Quimper",
"to_node": "fiber (Quimper → Lorient_KMA)-"
},
{
"from_node": "fiber (Lorient_KMA → Quimper)-",
"to_node": "east edfa in Quimper"
},
{
"from_node": "east edfa in Quimper",
"to_node": "fiber (Quimper → Brest_KLA)-"
},
{
"from_node": "fiber (Vannes_KBE → Ploermel)-",
"to_node": "west edfa in Ploermel"
},
{
"from_node": "west edfa in Ploermel",
"to_node": "fiber (Ploermel → Rennes_STA)-"
},
{
"from_node": "fiber (Rennes_STA → Ploermel)-",
"to_node": "east edfa in Ploermel"
},
{
"from_node": "east edfa in Ploermel",
"to_node": "fiber (Ploermel → Vannes_KBE)-"
},
{
@@ -1325,4 +1245,4 @@
"to_node": "trx Brest_KLA"
}
]
}
}

View File

@@ -52,8 +52,8 @@
"explicit-route-objects": {
"route-object-include-exclude": [
{
"index": 0,
"explicit-route-usage": "route-include-ero",
"index": 0,
"num-unnum-hop": {
"node-id": "roadm Brest_KLA",
"link-tp-id": "link-tp-id is not used",
@@ -61,8 +61,8 @@
}
},
{
"index": 1,
"explicit-route-usage": "route-include-ero",
"index": 1,
"num-unnum-hop": {
"node-id": "roadm Lannion_CAS",
"link-tp-id": "link-tp-id is not used",
@@ -70,8 +70,8 @@
}
},
{
"index": 2,
"explicit-route-usage": "route-include-ero",
"index": 2,
"num-unnum-hop": {
"node-id": "roadm Lorient_KMA",
"link-tp-id": "link-tp-id is not used",
@@ -79,8 +79,8 @@
}
},
{
"index": 3,
"explicit-route-usage": "route-include-ero",
"index": 3,
"num-unnum-hop": {
"node-id": "roadm Vannes_KBE",
"link-tp-id": "link-tp-id is not used",

File diff suppressed because it is too large Load Diff

View File

@@ -1,24 +0,0 @@
{
"spectrum": [
{
"f_min": 191.25e12,
"baud_rate": 32e9,
"f_max": 195.1e12,
"slot_width": 50e9,
"delta_pdb": 0,
"roll_off": 0.15,
"tx_osnr": 40,
"label": "cband"
},
{
"f_min": 186.3e12,
"baud_rate": 32e9,
"f_max": 190.1e12,
"slot_width": 50e9,
"delta_pdb": 0,
"roll_off": 0.15,
"tx_osnr": 40,
"label": "lband"
}
]
}

View File

@@ -20,18 +20,19 @@
"temperature": 283,
"raman_pumps": [
{
"power": 224.403e-3,
"power": 200e-3,
"frequency": 205e12,
"propagation_direction": "counterprop"
},
{
"power": 231.135e-3,
"power": 206e-3,
"frequency": 201e12,
"propagation_direction": "counterprop"
}
]
},
"params": {
"type_variety": "SSMF",
"length": 80.0,
"loss_coef": 0.2,
"length_units": "km",
@@ -48,21 +49,6 @@
}
}
},
{
"uid": "Fused1",
"type": "Fused",
"params": {
"loss": 0
},
"metadata": {
"location": {
"latitude": 1.5,
"longitude": 0,
"city": null,
"region": ""
}
}
},
{
"uid": "Edfa1",
"type": "Edfa",
@@ -102,10 +88,6 @@
},
{
"from_node": "Span1",
"to_node": "Fused1"
},
{
"from_node": "Fused1",
"to_node": "Edfa1"
},
{

View File

@@ -1,22 +0,0 @@
{
"path-request": [
{
"request-id": "0",
"source": "trx Brest_KLA",
"destination": "trx Lannion_CAS",
"src-tp-id": "trx Brest_KLA",
"dst-tp-id": "trx Lannion_CAS",
"bidirectional": false,
"path-constraints": {
"te-bandwidth": {
"technology": "flexi-grid",
"trx_type": "ZR400G",
"trx_mode": "SFF-ID:70",
"spacing": 100000000000.0,
"tx_power": 0.0015,
"path_bandwidth": 400000000000.0
}
}
}
]
}

View File

@@ -1,19 +1,14 @@
{
"raman_params": {
"flag": true,
"result_spatial_resolution": 10e3,
"solver_spatial_resolution": 50
"raman_parameters": {
"flag_raman": true,
"space_resolution": 10e3,
"tolerance": 1e-8
},
"nli_params": {
"method": "ggn_spectrally_separated",
"dispersion_tolerance": 1,
"phase_shift_tolerance": 0.1,
"computed_channels": [
1,
18,
37,
56,
75
]
"nli_parameters": {
"nli_method_name": "ggn_spectrally_separated",
"wdm_grid_size": 50e9,
"dispersion_tolerance": 1,
"phase_shift_tolerance": 0.1,
"computed_channels": [1, 18, 37, 56, 75]
}
}
}

View File

@@ -1,304 +1,303 @@
{
"nf_fit_coeff": [
0.000168241,
0.0469961,
0.0359549,
5.82851
],
"f_min": 191.275e12,
"f_max": 196.125e12,
"nf_ripple": [
0.4372876328262819,
0.4372876328262819,
0.41270842850729195,
0.38814205928193013,
0.36358851509924695,
0.3390191214858807,
0.30474360397422756,
0.27048596623174515,
0.23624619427167134,
0.202035284929368,
0.1694483010211072,
0.13687829834471027,
0.1043252636301016,
0.07184040799914815,
0.061288823415841555,
0.050742731588695494,
0.04020212822983975,
0.029667009055877668,
0.01913736978785662,
0.00861320615127981,
-0.010157321677553965,
-0.028982516728038848,
-0.04779792991567815,
-0.06660356886269536,
-0.06256260169582961,
-0.05832916277634124,
-0.05409792133358102,
-0.04990610405914272,
-0.05078533294804249,
-0.05166410580536087,
-0.05254242298580185,
-0.05342028484370278,
-0.051742390657545205,
-0.050039429413028365,
-0.048337350303318156,
-0.04663615264317309,
-0.04493583574805963,
-0.043236398934156144,
-0.035622012697103154,
-0.027999803010447587,
-0.02038153550619876,
-0.012779471908040341,
-0.006436207679519103,
-9.622162373026585e-05,
0.006240488799898697,
0.012573926129294415,
0.021418708618354456,
0.030289222542492025,
0.03915515813685565,
0.047899419704645264,
0.04256372893215024,
0.03723078993416436,
0.03190060058247842,
0.02657315875107553,
0.021248462316134083,
0.01605877647020772,
0.02326948274513522,
0.03047647598902483,
0.037679759069084225,
0.044883315610536455,
0.052470799141237305,
0.06005437964543287,
0.0676340601339394,
0.07521193198077789,
0.08415906712621996,
0.09310160456603413,
0.1020395478432815,
0.11079585523492333,
0.1018180306253394,
0.09284481475528361,
0.0838762040768461,
0.07482015390297145,
0.05670549786742816,
0.03860013139908377,
0.020504047353947653,
0.0024172385953583004,
-0.015660302006048,
-0.03372858157230583,
-0.07037375788020579,
-0.10709599992470213,
-0.14379944379052215,
-0.18048410390821285,
-0.20911178784023846,
-0.23772399031437283,
-0.26632156113294336,
-0.2949045115165272,
-0.30206775396360075,
-0.30915729645781326,
-0.31624321721895354,
-0.3233255190215882,
-0.32037911876162584,
-0.3172854168606314,
-0.31419329378173544,
-0.31110274831665313,
-0.3110761646066259,
-0.3110761646066259
],
"dgt": [
1.0,
1.017807767853702,
1.0356155337864215,
1.0534217504465226,
1.0712204022764056,
1.0895983485572227,
1.108555289615659,
1.1280891949729075,
1.1476135933863398,
1.1672278304018044,
1.1869318618366975,
1.2067249615595257,
1.2264996957264114,
1.2428104897182262,
1.2556591482982988,
1.2650555289898042,
1.2744470198196236,
1.2838336236692311,
1.2932153453410835,
1.3040618749785347,
1.316383926863083,
1.3301807335621048,
1.3439818461440451,
1.3598972673004606,
1.3779439775587023,
1.3981208704326855,
1.418273806730323,
1.4340878115214444,
1.445565137158368,
1.45273959485914,
1.4599103316162523,
1.4670307626366115,
1.474100442252211,
1.48111939735681,
1.488134243479226,
1.495145456062699,
1.502153039909686,
1.5097346239790443,
1.5178910621476225,
1.5266220576235803,
1.5353620432989845,
1.545374152761467,
1.5566577309558969,
1.569199764184379,
1.5817353179379183,
1.5986915141218316,
1.6201194134191075,
1.6460167077689267,
1.6719047669939942,
1.6918150918099673,
1.7057507692361864,
1.7137640932265894,
1.7217732861435076,
1.7297783508684146,
1.737780757913635,
1.7459181197626403,
1.7541903672600494,
1.7625959636196327,
1.7709972329654864,
1.7793941781790852,
1.7877868031023945,
1.7961751115773796,
1.8045606557581335,
1.8139629377087627,
1.824381436842932,
1.835814081380705,
1.847275503201129,
1.862235672444246,
1.8806927939516411,
1.9026104247588487,
1.9245345552113182,
1.9482128147680253,
1.9736443063300082,
2.0008103857988204,
2.0279625371819305,
2.055100772005235,
2.082225099873648,
2.1183028432496016,
2.16337565384239,
2.2174389328192197,
2.271520771371253,
2.322373696229342,
2.3699990328716107,
2.414398437185221,
2.4587748041127506,
2.499446286796604,
2.5364027376452056,
2.5696460593920065,
2.602860350286428,
2.630396440815385,
2.6521732021128046,
2.6681935771243177,
2.6841217449620203,
2.6947834587664494,
2.705443819238505,
2.714526681131686
],
"gain_ripple": [
0.07704745697916238,
0.06479749697916048,
0.05257029697916238,
0.040326236979161934,
0.028098946979159933,
0.01393231697916164,
-0.0021726530208390216,
-0.01819858302084043,
-0.03218106302083967,
-0.042428283020839785,
-0.05095282302083959,
-0.05947139302083926,
-0.06968090302083851,
-0.07844600302084004,
-0.08407607302083875,
-0.0865687230208394,
-0.08906007302083907,
-0.0913487130208388,
-0.09343261302083761,
-0.09717347302083823,
-0.1027863830208382,
-0.11089282302084058,
-0.11963431302083904,
-0.1279646530208396,
-0.13525493302083902,
-0.1409032730208395,
-0.14591937302083835,
-0.14823350302084037,
-0.1484450830208388,
-0.1455411330208385,
-0.14160178302083892,
-0.1353792530208402,
-0.12789859302083784,
-0.11916081302083725,
-0.11041488302083735,
-0.10103437302083762,
-0.09101254302083817,
-0.07868024302083754,
-0.06468462302083822,
-0.051112303020840244,
-0.039618433020837784,
-0.028748483020837767,
-0.016475303020840215,
-0.006936193020838033,
-0.0015763130208377163,
0.0007104669791608842,
0.0040435869791615175,
0.006965146979162284,
0.00842583697916055,
0.00874012697916271,
0.00936596697916059,
0.01030063697916006,
0.011234826979162449,
0.013321846979160057,
0.01659282697915998,
0.023488786979161347,
0.03285456697916089,
0.04072968697916224,
0.04467697697916151,
0.04551704697916037,
0.04717897697916129,
0.04946107697915991,
0.05154489697916276,
0.05447361697916264,
0.05848224697916038,
0.06916723697916183,
0.08548825697916129,
0.10802383697916085,
0.13114358697916018,
0.15216302697916007,
0.17037189697916233,
0.1767381569791624,
0.1739275269791598,
0.15945681697916214,
0.14239527697916188,
0.12276252697916235,
0.10313984697916112,
0.08731066697916035,
0.07533675697916209,
0.07114372697916238,
0.07094413697916124,
0.07091459697916136,
0.0670723869791594,
0.054956336979159914,
0.038328296979159404,
0.017572956979162058,
-0.0028138630208403015,
-0.016792253020838643,
-0.0246928330208398,
-0.018326963020840026,
-0.0036199830208403228,
0.02602813697916062,
0.06245819697916133,
0.09542181697916163,
0.11822862697916037,
0.1359703369791596
]
}
{ "nf_fit_coeff": [
0.000168241,
0.0469961,
0.0359549,
5.82851
],
"f_min": 191.35e12,
"f_max": 196.1e12,
"nf_ripple": [
-0.3110761646066259,
-0.3110761646066259,
-0.31110274831665313,
-0.31419329378173544,
-0.3172854168606314,
-0.32037911876162584,
-0.3233255190215882,
-0.31624321721895354,
-0.30915729645781326,
-0.30206775396360075,
-0.2949045115165272,
-0.26632156113294336,
-0.23772399031437283,
-0.20911178784023846,
-0.18048410390821285,
-0.14379944379052215,
-0.10709599992470213,
-0.07037375788020579,
-0.03372858157230583,
-0.015660302006048,
0.0024172385953583004,
0.020504047353947653,
0.03860013139908377,
0.05670549786742816,
0.07482015390297145,
0.0838762040768461,
0.09284481475528361,
0.1018180306253394,
0.11079585523492333,
0.1020395478432815,
0.09310160456603413,
0.08415906712621996,
0.07521193198077789,
0.0676340601339394,
0.06005437964543287,
0.052470799141237305,
0.044883315610536455,
0.037679759069084225,
0.03047647598902483,
0.02326948274513522,
0.01605877647020772,
0.021248462316134083,
0.02657315875107553,
0.03190060058247842,
0.03723078993416436,
0.04256372893215024,
0.047899419704645264,
0.03915515813685565,
0.030289222542492025,
0.021418708618354456,
0.012573926129294415,
0.006240488799898697,
-9.622162373026585e-05,
-0.006436207679519103,
-0.012779471908040341,
-0.02038153550619876,
-0.027999803010447587,
-0.035622012697103154,
-0.043236398934156144,
-0.04493583574805963,
-0.04663615264317309,
-0.048337350303318156,
-0.050039429413028365,
-0.051742390657545205,
-0.05342028484370278,
-0.05254242298580185,
-0.05166410580536087,
-0.05078533294804249,
-0.04990610405914272,
-0.05409792133358102,
-0.05832916277634124,
-0.06256260169582961,
-0.06660356886269536,
-0.04779792991567815,
-0.028982516728038848,
-0.010157321677553965,
0.00861320615127981,
0.01913736978785662,
0.029667009055877668,
0.04020212822983975,
0.050742731588695494,
0.061288823415841555,
0.07184040799914815,
0.1043252636301016,
0.13687829834471027,
0.1694483010211072,
0.202035284929368,
0.23624619427167134,
0.27048596623174515,
0.30474360397422756,
0.3390191214858807,
0.36358851509924695,
0.38814205928193013,
0.41270842850729195,
0.4372876328262819,
0.4372876328262819
],
"dgt": [
2.714526681131686,
2.705443819238505,
2.6947834587664494,
2.6841217449620203,
2.6681935771243177,
2.6521732021128046,
2.630396440815385,
2.602860350286428,
2.5696460593920065,
2.5364027376452056,
2.499446286796604,
2.4587748041127506,
2.414398437185221,
2.3699990328716107,
2.322373696229342,
2.271520771371253,
2.2174389328192197,
2.16337565384239,
2.1183028432496016,
2.082225099873648,
2.055100772005235,
2.0279625371819305,
2.0008103857988204,
1.9736443063300082,
1.9482128147680253,
1.9245345552113182,
1.9026104247588487,
1.8806927939516411,
1.862235672444246,
1.847275503201129,
1.835814081380705,
1.824381436842932,
1.8139629377087627,
1.8045606557581335,
1.7961751115773796,
1.7877868031023945,
1.7793941781790852,
1.7709972329654864,
1.7625959636196327,
1.7541903672600494,
1.7459181197626403,
1.737780757913635,
1.7297783508684146,
1.7217732861435076,
1.7137640932265894,
1.7057507692361864,
1.6918150918099673,
1.6719047669939942,
1.6460167077689267,
1.6201194134191075,
1.5986915141218316,
1.5817353179379183,
1.569199764184379,
1.5566577309558969,
1.545374152761467,
1.5353620432989845,
1.5266220576235803,
1.5178910621476225,
1.5097346239790443,
1.502153039909686,
1.495145456062699,
1.488134243479226,
1.48111939735681,
1.474100442252211,
1.4670307626366115,
1.4599103316162523,
1.45273959485914,
1.445565137158368,
1.4340878115214444,
1.418273806730323,
1.3981208704326855,
1.3779439775587023,
1.3598972673004606,
1.3439818461440451,
1.3301807335621048,
1.316383926863083,
1.3040618749785347,
1.2932153453410835,
1.2838336236692311,
1.2744470198196236,
1.2650555289898042,
1.2556591482982988,
1.2428104897182262,
1.2264996957264114,
1.2067249615595257,
1.1869318618366975,
1.1672278304018044,
1.1476135933863398,
1.1280891949729075,
1.108555289615659,
1.0895983485572227,
1.0712204022764056,
1.0534217504465226,
1.0356155337864215,
1.017807767853702,
1.0
],
"gain_ripple": [
0.1359703369791596,
0.11822862697916037,
0.09542181697916163,
0.06245819697916133,
0.02602813697916062,
-0.0036199830208403228,
-0.018326963020840026,
-0.0246928330208398,
-0.016792253020838643,
-0.0028138630208403015,
0.017572956979162058,
0.038328296979159404,
0.054956336979159914,
0.0670723869791594,
0.07091459697916136,
0.07094413697916124,
0.07114372697916238,
0.07533675697916209,
0.08731066697916035,
0.10313984697916112,
0.12276252697916235,
0.14239527697916188,
0.15945681697916214,
0.1739275269791598,
0.1767381569791624,
0.17037189697916233,
0.15216302697916007,
0.13114358697916018,
0.10802383697916085,
0.08548825697916129,
0.06916723697916183,
0.05848224697916038,
0.05447361697916264,
0.05154489697916276,
0.04946107697915991,
0.04717897697916129,
0.04551704697916037,
0.04467697697916151,
0.04072968697916224,
0.03285456697916089,
0.023488786979161347,
0.01659282697915998,
0.013321846979160057,
0.011234826979162449,
0.01030063697916006,
0.00936596697916059,
0.00874012697916271,
0.00842583697916055,
0.006965146979162284,
0.0040435869791615175,
0.0007104669791608842,
-0.0015763130208377163,
-0.006936193020838033,
-0.016475303020840215,
-0.028748483020837767,
-0.039618433020837784,
-0.051112303020840244,
-0.06468462302083822,
-0.07868024302083754,
-0.09101254302083817,
-0.10103437302083762,
-0.11041488302083735,
-0.11916081302083725,
-0.12789859302083784,
-0.1353792530208402,
-0.14160178302083892,
-0.1455411330208385,
-0.1484450830208388,
-0.14823350302084037,
-0.14591937302083835,
-0.1409032730208395,
-0.13525493302083902,
-0.1279646530208396,
-0.11963431302083904,
-0.11089282302084058,
-0.1027863830208382,
-0.09717347302083823,
-0.09343261302083761,
-0.0913487130208388,
-0.08906007302083907,
-0.0865687230208394,
-0.08407607302083875,
-0.07844600302084004,
-0.06968090302083851,
-0.05947139302083926,
-0.05095282302083959,
-0.042428283020839785,
-0.03218106302083967,
-0.01819858302084043,
-0.0021726530208390216,
0.01393231697916164,
0.028098946979159933,
0.040326236979161934,
0.05257029697916238,
0.06479749697916048,
0.07704745697916238
]
}

View File

@@ -1,11 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# Reads JSON path result file and writes results to a CSV file
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
write_path_jsontocsv.py
========================
@@ -23,9 +18,9 @@ from gnpy.tools.json_io import load_equipment
from gnpy.topology.request import jsontocsv
parser = ArgumentParser(description='Converting JSON path results into a CSV')
parser.add_argument('filename', type=Path)
parser.add_argument('output_filename', type=Path)
parser = ArgumentParser(description='A function that writes json path results in an excel sheet.')
parser.add_argument('filename', nargs='?', type=Path)
parser.add_argument('output_filename', nargs='?', type=Path)
parser.add_argument('eqpt_filename', nargs='?', type=Path, default=Path(__file__).parent / 'eqpt_config.json')
if __name__ == '__main__':

View File

@@ -1,5 +1,5 @@
"""
'''
Processing of data via :py:mod:`.json_io`.
Utilities for Excel conversion in :py:mod:`.convert` and :py:mod:`.service_sheet`.
Example code in :py:mod:`.cli_examples` and :py:mod:`.plots`.
"""
'''

View File

@@ -1,43 +1,40 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.tools.cli_examples: Common code for CLI examples
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
'''
gnpy.tools.cli_examples
=======================
Common code for CLI examples
"""
'''
import argparse
from json import dumps
import logging
import os.path
import sys
from pathlib import Path
from typing import Union, List
from math import ceil
from numpy import mean
from gnpy.core import ansi_escapes
from numpy import linspace, mean
from pathlib import Path
import gnpy.core.ansi_escapes as ansi_escapes
from gnpy.core.elements import Transceiver, Fiber, RamanFiber
from gnpy.core import exceptions
from gnpy.core.equipment import trx_mode_params
import gnpy.core.exceptions as exceptions
from gnpy.core.network import build_network
from gnpy.core.parameters import SimParams
from gnpy.core.utils import lin2db, pretty_summary_print, per_label_average, watt2dbm
from gnpy.topology.request import (ResultElement, jsontocsv, BLOCKING_NOPATH)
from gnpy.tools.json_io import (load_equipments_and_configs, load_network, load_json, load_requests, save_network,
requests_from_json, save_json, load_initial_spectrum, DEFAULT_EQPT_CONFIG)
from gnpy.core.science_utils import Simulation
from gnpy.core.utils import db2lin, lin2db, automatic_nch
from gnpy.topology.request import (ResultElement, jsontocsv, compute_path_dsjctn, requests_aggregation,
BLOCKING_NOPATH, correct_json_route_list,
deduplicate_disjunctions, compute_path_with_disjunction,
PathRequest, compute_constrained_path, propagate2)
from gnpy.topology.spectrum_assignment import build_oms_list, pth_assign_spectrum
from gnpy.tools.json_io import load_equipment, load_network, load_json, load_requests, save_network, \
requests_from_json, disjunctions_from_json, save_json
from gnpy.tools.plots import plot_baseline, plot_results
from gnpy.tools.worker_utils import designed_network, transmission_simulation, planning
_logger = logging.getLogger(__name__)
_examples_dir = Path(__file__).parent.parent / 'example-data'
_default_config_files = ['example-data/std_medium_gain_advanced_config.json',
'example-data/Juniper-BoosterHG.json',
'parameters.DEFAULT_EDFA_CONFIG']
_help_footer = '''
This program is part of GNPy, https://github.com/TelecomInfraProject/oopt-gnpy
@@ -49,60 +46,38 @@ _help_fname_json_csv = 'FILE.(json|csv)'
def show_example_data_dir():
"""Print the example data directory path."""
print(f'{_examples_dir}/')
def load_common_data(equipment_filename: Path,
extra_equipment_filenames: List[Path], extra_config_filenames: List[Path],
topology_filename: Path, simulation_filename: Path, save_raw_network_filename: Path):
"""Load common configuration from JSON files, merging additional equipment if provided.
def load_common_data(equipment_filename, topology_filename, simulation_filename, save_raw_network_filename):
'''Load common configuration from JSON files'''
:param equipment_filename: Path to the main equipment configuration file.
:type equipment_filename: Path
:param extra_equipment_filenames: List of additional equipment configuration files.
:type extra_equipment_filenames: List[Path]
:param extra_config_filenames: List of additional configuration files.
:type extra_config_filenames: List[Path]
:param topology_filename: Path to the network topology file.
:type topology_filename: Path
:param simulation_filename: Path to the simulation parameters file.
:type simulation_filename: Path
:param save_raw_network_filename: Path to save the raw network configuration.
:type save_raw_network_filename: Path
:raises exceptions.EquipmentConfigError: If there is a configuration error in the equipment library.
:raises exceptions.NetworkTopologyError: If the network definition is invalid.
:raises exceptions.ParametersError: If there is an error with simulation parameters.
:raises exceptions.ConfigurationError: If there is a general configuration error.
:raises exceptions.ServiceError: If there is a service-related error.
"""
try:
equipment = load_equipments_and_configs(equipment_filename, extra_equipment_filenames, extra_config_filenames)
equipment = load_equipment(equipment_filename)
network = load_network(topology_filename, equipment)
if save_raw_network_filename is not None:
save_network(network, save_raw_network_filename)
print(f'{ansi_escapes.blue}Raw network (no optimizations) saved to {save_raw_network_filename}{ansi_escapes.reset}')
if not simulation_filename:
sim_params = {}
sim_params = SimParams(**load_json(simulation_filename)) if simulation_filename is not None else None
if not sim_params:
if next((node for node in network if isinstance(node, RamanFiber)), None) is not None:
print(f'{ansi_escapes.red}Invocation error:{ansi_escapes.reset} '
f'RamanFiber requires passing simulation params via --sim-params')
sys.exit(1)
else:
sim_params = load_json(simulation_filename)
SimParams.set_params(sim_params)
Simulation.set_params(sim_params)
except exceptions.EquipmentConfigError as e:
print(f'{ansi_escapes.red}Configuration error in the equipment library:{ansi_escapes.reset} {e}')
sys.exit(1)
except exceptions.NetworkTopologyError as e:
print(f'{ansi_escapes.red}Invalid network definition:{ansi_escapes.reset} {e}')
sys.exit(1)
except exceptions.ParametersError as e:
print(f'{ansi_escapes.red}Simulation parameters error:{ansi_escapes.reset} {e}')
sys.exit(1)
except exceptions.ConfigurationError as e:
print(f'{ansi_escapes.red}Configuration error:{ansi_escapes.reset} {e}')
sys.exit(1)
except exceptions.ParametersError as e:
print(f'{ansi_escapes.red}Simulation parameters error:{ansi_escapes.reset} {e}')
sys.exit(1)
except exceptions.ServiceError as e:
print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {e}')
sys.exit(1)
@@ -110,30 +85,18 @@ def load_common_data(equipment_filename: Path,
return (equipment, network)
def _setup_logging(args: argparse.Namespace):
"""Set up logging based on verbosity level.
:param args: The parsed command-line arguments.
:type args: argparse.Namespace
"""
logging.basicConfig(level={2: logging.DEBUG, 1: logging.INFO, 0: logging.WARNING}.get(args.verbose, logging.DEBUG))
def _setup_logging(args):
logging.basicConfig(level={2: logging.DEBUG, 1: logging.INFO, 0: logging.CRITICAL}.get(args.verbose, logging.DEBUG))
def _add_common_options(parser: argparse.ArgumentParser, network_default: Path):
"""Add common command-line options to the argument parser.
:param parser: The argument parser to which options will be added.
:type parser: argparse.ArgumentParser
:param network_default: The default path for the network topology file.
:type network_default: Path
"""
parser.add_argument('topology', nargs='?', type=Path, metavar='NETWORK-TOPOLOGY.(json|xls|xlsx)',
default=network_default,
help='Input network topology')
parser.add_argument('-v', '--verbose', action='count', default=0,
help='Increase verbosity (can be specified several times)')
parser.add_argument('-e', '--equipment', type=Path, metavar=_help_fname_json,
default=DEFAULT_EQPT_CONFIG, help='Equipment library')
default=_examples_dir / 'eqpt_config.json', help='Equipment library')
parser.add_argument('--sim-params', type=Path, metavar=_help_fname_json,
default=None, help='Path to the JSON containing simulation parameters (required for Raman). '
f'Example: {_examples_dir / "sim_params.json"}')
@@ -141,47 +104,26 @@ def _add_common_options(parser: argparse.ArgumentParser, network_default: Path):
help='Save the final network as a JSON file')
parser.add_argument('--save-network-before-autodesign', type=Path, metavar=_help_fname_json,
help='Dump the network into a JSON file prior to autodesign')
parser.add_argument('--no-insert-edfas', action='store_true',
help='Disable insertion of EDFAs after ROADMs and fibers '
'as well as splitting of fibers by auto-design.')
# Option for additional equipment files
parser.add_argument('--extra-equipment', nargs='+', type=Path,
metavar=_help_fname_json, default=None,
help='List of additional equipment files to complement the main equipment file.')
# Option for additional config files
parser.add_argument('--extra-config', nargs='+', type=Path,
metavar=_help_fname_json,
help='List of additional config files as referenced in equipment files with '
'"advanced_config_from_json" or "default_config_from_json".'
f'Existing configs:\n{_default_config_files}')
def transmission_main_example(args: Union[List[str], None] = None):
"""Main script running a single simulation. It returns the detailed power across crossed elements and
average performance accross all channels.
:param args: Command-line arguments (default is None).
:type args: Union[List[str], None]
"""
def transmission_main_example(args=None):
parser = argparse.ArgumentParser(
description='Send a full spectrum load through the network from point A to point B',
epilog=_help_footer,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
)
_add_common_options(parser, network_default=_examples_dir / 'edfa_example_network.json')
parser.add_argument('--show-channels', action='store_true', help='Show final per-channel OSNR and GSNR summary')
parser.add_argument('--show-channels', action='store_true', help='Show final per-channel OSNR summary')
parser.add_argument('-pl', '--plot', action='store_true')
parser.add_argument('-l', '--list-nodes', action='store_true', help='list all transceiver nodes')
parser.add_argument('-po', '--power', default=0, help='channel ref power in dBm')
parser.add_argument('--spectrum', type=Path, help='user defined mixed rate spectrum JSON file')
parser.add_argument('source', nargs='?', help='source node')
parser.add_argument('destination', nargs='?', help='destination node')
args = parser.parse_args(args if args is not None else sys.argv[1:])
_setup_logging(args)
(equipment, network) = load_common_data(args.equipment, args.extra_equipment, args.extra_config, args.topology,
args.sim_params, args.save_network_before_autodesign)
(equipment, network) = load_common_data(args.equipment, args.topology, args.sim_params, args.save_network_before_autodesign)
if args.plot:
plot_baseline(network)
@@ -199,17 +141,19 @@ def transmission_main_example(args: Union[List[str], None] = None):
sys.exit()
# First try to find exact match if source/destination provided
source = None
if args.source:
source = transceivers.pop(args.source, None)
valid_source = bool(source)
valid_source = True if source else False
else:
source = None
_logger.info('No source node specified: picking random transceiver')
destination = None
nodes_list = []
loose_list = []
if args.destination:
destination = transceivers.pop(args.destination, None)
valid_destination = bool(destination)
valid_destination = True if destination else False
else:
destination = None
_logger.info('No destination node specified: picking random transceiver')
# If no exact match try to find partial match
if args.source and not source:
@@ -226,84 +170,85 @@ def transmission_main_example(args: Union[List[str], None] = None):
if not source:
source = list(transceivers.values())[0]
del transceivers[source.uid]
_logger.info('No source node specified: picking random transceiver')
if not destination:
destination = list(transceivers.values())[0]
nodes_list = [destination.uid]
loose_list = ['STRICT']
_logger.info('No destination node specified: picking random transceiver')
_logger.info(f'source = {source.uid!r}')
_logger.info(f'destination = {destination.uid!r}')
_logger.info(f'source = {args.source!r}')
_logger.info(f'destination = {args.destination!r}')
params = {}
params['request_id'] = 0
params['trx_type'] = ''
params['trx_mode'] = ''
params['source'] = source.uid
params['destination'] = destination.uid
params['bidir'] = False
params['nodes_list'] = [destination.uid]
params['loose_list'] = ['strict']
params['format'] = ''
params['path_bandwidth'] = 0
params['effective_freq_slot'] = None
trx_params = trx_mode_params(equipment)
if args.power:
trx_params['power'] = db2lin(float(args.power)) * 1e-3
params.update(trx_params)
req = PathRequest(**params)
initial_spectrum = None
if args.spectrum:
# use the spectrum defined by user for the propagation.
# the nb of channel for design remains the one of the reference channel
initial_spectrum = load_initial_spectrum(args.spectrum)
print('User input for spectrum used for propagation instead of SI')
power_mode = equipment['Span']['default'].power_mode
print('\n'.join([f'Power mode is set to {power_mode}',
'=> it can be modified in eqpt_config.json - Span']))
f'=> it can be modified in eqpt_config.json - Span']))
# Simulate !
try:
network, req, ref_req = designed_network(equipment, network, source.uid, destination.uid,
nodes_list=nodes_list, loose_list=loose_list,
args_power=args.power,
initial_spectrum=initial_spectrum,
no_insert_edfas=args.no_insert_edfas)
path, propagations_for_path, powers_dbm, infos = transmission_simulation(equipment, network, req, ref_req)
except exceptions.NetworkTopologyError as e:
print(f'{ansi_escapes.red}Invalid network definition:{ansi_escapes.reset} {e}')
sys.exit(1)
except exceptions.ConfigurationError as e:
print(f'{ansi_escapes.red}Configuration error:{ansi_escapes.reset} {e}')
sys.exit(1)
except exceptions.ServiceError as e:
print(f'Service error: {e}')
sys.exit(1)
except ValueError:
sys.exit(1)
# print or export results
spans = [s.params.length for s in path if isinstance(s, (Fiber, RamanFiber))]
print(f'\nThere are {len(spans)} fiber spans over {sum(spans) / 1000:.0f} km between {source.uid} '
pref_ch_db = lin2db(req.power * 1e3) # reference channel power / span (SL=20dB)
pref_total_db = pref_ch_db + lin2db(req.nb_channel) # reference total power / span (SL=20dB)
build_network(network, equipment, pref_ch_db, pref_total_db)
path = compute_constrained_path(network, req)
spans = [s.params.length for s in path if isinstance(s, RamanFiber) or isinstance(s, Fiber)]
print(f'\nThere are {len(spans)} fiber spans over {sum(spans)/1000:.0f} km between {source.uid} '
f'and {destination.uid}')
print(f'\nNow propagating between {source.uid} and {destination.uid}:')
print(f'Reference used for design: (Input optical power reference in span = {watt2dbm(ref_req.power):.2f}dBm,\n'
+ f' spacing = {ref_req.spacing * 1e-9:.2f}GHz\n'
+ f' nb_channels = {ref_req.nb_channel})')
print('\nChannels propagating: (Input optical power deviation in span = '
+ f'{pretty_summary_print(per_label_average(infos.delta_pdb_per_channel, infos.label))}dB,\n'
+ ' spacing = '
+ f'{pretty_summary_print(per_label_average(infos.slot_width * 1e-9, infos.label))}GHz,\n'
+ ' transceiver output power = '
+ f'{pretty_summary_print(per_label_average(watt2dbm(infos.tx_power), infos.label))}dBm,\n'
+ f' nb_channels = {infos.number_of_channels})')
for mypath, power_dbm in zip(propagations_for_path, powers_dbm):
try:
p_start, p_stop, p_step = equipment['SI']['default'].power_range_db
p_num = abs(int(round((p_stop - p_start) / p_step))) + 1 if p_step != 0 else 1
power_range = list(linspace(p_start, p_stop, p_num))
except TypeError:
print('invalid power range definition in eqpt_config, should be power_range_db: [lower, upper, step]')
power_range = [0]
if not power_mode:
# power cannot be changed in gain mode
power_range = [0]
for dp_db in power_range:
req.power = db2lin(pref_ch_db + dp_db) * 1e-3
if power_mode:
print(f'Input optical power reference in span = {ansi_escapes.cyan}{power_dbm:.2f} '
+ f'dBm{ansi_escapes.reset}:')
print(f'\nPropagating with input power = {ansi_escapes.cyan}{lin2db(req.power*1e3):.2f} dBm{ansi_escapes.reset}:')
else:
print('\nPropagating in {ansi_escapes.cyan}gain mode{ansi_escapes.reset}: power cannot be set manually')
if len(powers_dbm) == 1:
for elem in mypath:
print(f'\nPropagating in {ansi_escapes.cyan}gain mode{ansi_escapes.reset}: power cannot be set manually')
infos = propagate2(path, req, equipment)
if len(power_range) == 1:
for elem in path:
print(elem)
if power_mode:
print(f'\nTransmission result for input optical power reference in span = {power_dbm:.2f} dBm:')
print(f'\nTransmission result for input power = {lin2db(req.power*1e3):.2f} dBm:')
else:
print('\nTransmission results:')
print(f' Final GSNR (0.1 nm): {ansi_escapes.cyan}{mean(destination.snr_01nm):.02f} dB{ansi_escapes.reset}')
print(f'\nTransmission results:')
print(f' Final SNR total (0.1 nm): {ansi_escapes.cyan}{mean(destination.snr_01nm):.02f} dB{ansi_escapes.reset}')
else:
print(mypath[-1])
print(path[-1])
# print(f'\n !!!!!!!!!!!!!!!!! TEST POINT !!!!!!!!!!!!!!!!!!!!!')
# print(f'carriers ase output of {path[1]} =\n {list(path[1].carriers("out", "nli"))}')
# => use "in" or "out" parameter
# => use "nli" or "ase" or "signal" or "total" parameter
if args.save_network is not None:
save_network(network, args.save_network)
print(f'{ansi_escapes.blue}Network (after autodesign) saved to {args.save_network}{ansi_escapes.reset}')
if args.show_channels:
print('\nThe GSNR per channel at the end of the line is:')
print('\nThe total SNR per channel at the end of the line is:')
print(
'{:>5}{:>26}{:>26}{:>28}{:>28}{:>28}' .format(
'Ch. #',
@@ -311,15 +256,15 @@ def transmission_main_example(args: Union[List[str], None] = None):
'Channel power (dBm)',
'OSNR ASE (signal bw, dB)',
'SNR NLI (signal bw, dB)',
'GSNR (signal bw, dB)'))
'SNR total (signal bw, dB)'))
for final_carrier, ch_osnr, ch_snr_nl, ch_snr in zip(
infos.carriers, path[-1].osnr_ase, path[-1].osnr_nli, path[-1].snr):
infos[path[-1]][1].carriers, path[-1].osnr_ase, path[-1].osnr_nli, path[-1].snr):
ch_freq = final_carrier.frequency * 1e-12
ch_power = lin2db(final_carrier.power.signal * 1e3)
print(
'{:5}{:26.5f}{:26.2f}{:28.2f}{:28.2f}{:28.2f}' .format(
'{:5}{:26.2f}{:26.2f}{:28.2f}{:28.2f}{:28.2f}' .format(
final_carrier.channel_number, round(
ch_freq, 5), round(
ch_freq, 2), round(
ch_power, 2), round(
ch_osnr, 2), round(
ch_snr_nl, 2), round(
@@ -336,7 +281,7 @@ def transmission_main_example(args: Union[List[str], None] = None):
print(f'\n(Invalid destination node {args.destination!r} replaced with {destination.uid})')
if args.plot:
plot_results(network, path, source, destination)
plot_results(network, path, source, destination, infos)
def _path_result_json(pathresult):
@@ -344,17 +289,11 @@ def _path_result_json(pathresult):
def path_requests_run(args=None):
"""Main script running several services simulations. It returns a summary of the average performance
for each service.
:param args: Command-line arguments (default is None).
:type args: Union[List[str], None]
"""
parser = argparse.ArgumentParser(
description='Compute performance for a list of services provided in a json file or an excel sheet',
epilog=_help_footer,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
)
_add_common_options(parser, network_default=_examples_dir / 'meshTopologyExampleV2.xls')
parser.add_argument('service_filename', nargs='?', type=Path, metavar='SERVICES-REQUESTS.(json|xls|xlsx)',
default=_examples_dir / 'meshTopologyExampleV2.xls',
@@ -363,52 +302,81 @@ def path_requests_run(args=None):
help='considers that all demands are bidir')
parser.add_argument('-o', '--output', type=Path, metavar=_help_fname_json_csv,
help='Store satisifed requests into a JSON or CSV file')
parser.add_argument('--redesign-per-request', action='store_true', help='Redesign the network at each request'
+ ' computation using the request as the reference channel')
args = parser.parse_args(args if args is not None else sys.argv[1:])
_setup_logging(args)
_logger.info(f'Computing path requests {args.service_filename.name} into JSON format')
_logger.info(f'Computing path requests {args.service_filename} into JSON format')
print(f'{ansi_escapes.blue}Computing path requests {os.path.relpath(args.service_filename)} into JSON format{ansi_escapes.reset}')
(equipment, network) = \
load_common_data(args.equipment, args.extra_equipment, args.extra_config, args.topology, args.sim_params,
args.save_network_before_autodesign)
(equipment, network) = load_common_data(args.equipment, args.topology, args.sim_params, args.save_network_before_autodesign)
# Build the network once using the default power defined in SI in eqpt config
# TODO power density: db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by
# spacing, f_min and f_max
p_db = equipment['SI']['default'].power_dbm
p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,
equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
build_network(network, equipment, p_db, p_total_db)
if args.save_network is not None:
save_network(network, args.save_network)
print(f'{ansi_escapes.blue}Network (after autodesign) saved to {args.save_network}{ansi_escapes.reset}')
oms_list = build_oms_list(network, equipment)
try:
network, _, _ = designed_network(equipment, network, no_insert_edfas=args.no_insert_edfas)
data = load_requests(args.service_filename, equipment, bidir=args.bidir,
network=network, network_filename=args.topology)
_data = requests_from_json(data, equipment)
_, propagatedpths, reversed_propagatedpths, rqs, dsjn, result = \
planning(network, equipment, data, redesign=args.redesign_per_request)
except exceptions.NetworkTopologyError as e:
print(f'{ansi_escapes.red}Invalid network definition:{ansi_escapes.reset} {e}')
sys.exit(1)
except exceptions.ConfigurationError as e:
print(f'{ansi_escapes.red}Configuration error:{ansi_escapes.reset} {e}')
rqs = requests_from_json(data, equipment)
except exceptions.ServiceError as e:
print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {e}')
sys.exit(1)
# check that request ids are unique. Non unique ids, may
# mess the computation: better to stop the computation
all_ids = [r.request_id for r in rqs]
if len(all_ids) != len(set(all_ids)):
for item in list(set(all_ids)):
all_ids.remove(item)
msg = f'Requests id {all_ids} are not unique'
_logger.critical(msg)
sys.exit()
rqs = correct_json_route_list(network, rqs)
# pths = compute_path(network, equipment, rqs)
dsjn = disjunctions_from_json(data)
print(f'{ansi_escapes.blue}List of disjunctions{ansi_escapes.reset}')
print(dsjn)
# need to warn or correct in case of wrong disjunction form
# disjunction must not be repeated with same or different ids
dsjn = deduplicate_disjunctions(dsjn)
# Aggregate demands with same exact constraints
print(f'{ansi_escapes.blue}Aggregating similar requests{ansi_escapes.reset}')
rqs, dsjn = requests_aggregation(rqs, dsjn)
# TODO export novel set of aggregated demands in a json file
print(f'{ansi_escapes.blue}The following services have been requested:{ansi_escapes.reset}')
print(rqs)
print(f'{ansi_escapes.blue}Computing all paths with constraints{ansi_escapes.reset}')
try:
pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
except exceptions.DisjunctionError as this_e:
print(f'{ansi_escapes.red}Disjunction error:{ansi_escapes.reset} {this_e}')
sys.exit(1)
except exceptions.ServiceError as e:
print(f'Service error: {e}')
sys.exit(1)
except ValueError:
sys.exit(1)
print(f'{ansi_escapes.blue}List of disjunctions{ansi_escapes.reset}')
print(dsjn)
print(f'{ansi_escapes.blue}The following services have been requested:{ansi_escapes.reset}')
print(_data)
if args.save_network is not None:
save_network(network, args.save_network)
print(f'Network (after autodesign) saved to {args.save_network}')
print(f'{ansi_escapes.blue}Propagating on selected path{ansi_escapes.reset}')
propagatedpths, reversed_pths, reversed_propagatedpths = compute_path_with_disjunction(network, equipment, rqs, pths)
# Note that deepcopy used in compute_path_with_disjunction returns
# a list of nodes which are not belonging to network (they are copies of the node objects).
# so there can not be propagation on these nodes.
pth_assign_spectrum(pths, rqs, oms_list, reversed_pths)
print(f'{ansi_escapes.blue}Result summary{ansi_escapes.reset}')
header = ['req id', ' demand', ' GSNR@bandwidth A-Z (Z-A)', ' GSNR@0.1nm A-Z (Z-A)',
header = ['req id', ' demand', ' snr@bandwidth A-Z (Z-A)', ' snr@0.1nm A-Z (Z-A)',
' Receiver minOSNR', ' mode', ' Gbit/s', ' nb of tsp pairs',
'N,M or blocking reason']
data = []
@@ -416,27 +384,26 @@ def path_requests_run(args=None):
for i, this_p in enumerate(propagatedpths):
rev_pth = reversed_propagatedpths[i]
if rev_pth and this_p:
psnrb = f'{round(mean(this_p[-1].snr), 2)} ({round(mean(rev_pth[-1].snr), 2)})'
psnrb = f'{round(mean(this_p[-1].snr),2)} ({round(mean(rev_pth[-1].snr),2)})'
psnr = f'{round(mean(this_p[-1].snr_01nm), 2)}' +\
f' ({round(mean(rev_pth[-1].snr_01nm), 2)})'
f' ({round(mean(rev_pth[-1].snr_01nm),2)})'
elif this_p:
psnrb = f'{round(mean(this_p[-1].snr), 2)}'
psnr = f'{round(mean(this_p[-1].snr_01nm), 2)}'
psnrb = f'{round(mean(this_p[-1].snr),2)}'
psnr = f'{round(mean(this_p[-1].snr_01nm),2)}'
try:
if rqs[i].blocking_reason in BLOCKING_NOPATH:
line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} :',
'-', '-', '-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9, 2)}',
'-', '{rqs[i].blocking_reason}']
f'-', f'-', f'-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9,2)}',
f'-', f'{rqs[i].blocking_reason}']
else:
line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', psnrb,
psnr, '-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9, 2)}',
'-', f'{rqs[i].blocking_reason}']
psnr, f'-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9, 2)}',
f'-', f'{rqs[i].blocking_reason}']
except AttributeError:
line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', psnrb,
psnr, f'{rqs[i].OSNR + equipment["SI"]["default"].sys_margins}',
f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9, 2)}',
f'{ceil(rqs[i].path_bandwidth / rqs[i].bit_rate)}', f'({rqs[i].N},{rqs[i].M})']
psnr, f'{rqs[i].OSNR}', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9,2)}',
f'{ceil(rqs[i].path_bandwidth / rqs[i].bit_rate) }', f'({rqs[i].N},{rqs[i].M})']
data.append(line)
col_width = max(len(word) for row in data for word in row[2:]) # padding
@@ -447,7 +414,7 @@ def path_requests_run(args=None):
secondcol = ''.join(row[1].ljust(secondcol_width))
remainingcols = ''.join(word.center(col_width, ' ') for word in row[2:])
print(f'{firstcol} {secondcol} {remainingcols}')
print(f'{ansi_escapes.yellow}Result summary shows mean GSNR and OSNR (average over all channels){ansi_escapes.reset}')
print(f'{ansi_escapes.yellow}Result summary shows mean SNR and OSNR (average over all channels){ansi_escapes.reset}')
if args.output:
result = []

File diff suppressed because it is too large Load Diff

View File

@@ -1,243 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# JSON files format conversion legacy <-> YANG
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
YANG formatted to legacy format conversion
==========================================
"""
from argparse import ArgumentParser
from pathlib import Path
from copy import deepcopy
import json
from typing import Dict
from gnpy.tools.yang_convert_utils import convert_degree, convert_back_degree, \
convert_delta_power_range, convert_back_delta_power_range, \
convert_dict, convert_back, \
remove_null_region_city, remove_union_that_fail, \
convert_design_band, convert_back_design_band, \
convert_none_to_empty, convert_empty_to_none, \
convert_loss_coeff_list, convert_back_loss_coeff_list, \
ELEMENTS_KEY, PATH_REQUEST_KEY, RESPONSE_KEY, SPECTRUM_KEY, EQPT_TYPES, EDFA_CONFIG_KEYS, SIM_PARAMS_KEYS, \
TOPO_NMSP, SERV_NMSP, EQPT_NMSP, SPECTRUM_NMSP, SIM_PARAMS_NMSP, EDFA_CONFIG_NMSP, RESP_NMSP, \
dump_data, add_missing_default_type_variety, \
remove_namespace_context, load_data, reorder_route_objects, reorder_lumped_losses_objects, \
reorder_raman_pumps, convert_raman_coef, convert_back_raman_coef, convert_raman_efficiency, \
convert_back_raman_efficiency, convert_nf_coef, convert_back_nf_coef, \
convert_nf_fit_coef, convert_back_nf_fit_coef
def legacy_to_yang(json_data: Dict) -> Dict:
"""Convert legacy format to GNPy YANG format.
This function adds the required namespace if not present and processes the input JSON data
based on its structure to convert it to the appropriate YANG format. There is no validation
of yang formatted data.
:param json_data: The input JSON data to convert.
:type json_data: Dict
:return: The converted JSON data in GNPy YANG format.
:rtype: Dict
"""
json_data = convert_none_to_empty(deepcopy(json_data))
# case of topology json
if ELEMENTS_KEY in json_data:
json_data = reorder_raman_pumps(json_data)
json_data = reorder_lumped_losses_objects(json_data)
json_data = remove_null_region_city(json_data)
json_data = convert_degree(json_data)
json_data = convert_design_band(json_data)
json_data = convert_loss_coeff_list(json_data)
json_data = convert_raman_coef(json_data)
json_data = {TOPO_NMSP: json_data}
elif TOPO_NMSP in json_data:
# then this is a new format topology json, ensure that there are no issues
json_data[TOPO_NMSP] = convert_degree(json_data[TOPO_NMSP])
json_data[TOPO_NMSP] = convert_design_band(json_data[TOPO_NMSP])
json_data[TOPO_NMSP] = convert_loss_coeff_list(json_data[TOPO_NMSP])
json_data[TOPO_NMSP] = remove_null_region_city(json_data[TOPO_NMSP])
# case of equipment json
elif any(k in json_data for k in EQPT_TYPES):
json_data = convert_raman_efficiency(json_data)
json_data = convert_delta_power_range(json_data)
json_data = convert_nf_coef(json_data)
json_data = add_missing_default_type_variety(json_data)
json_data = {EQPT_NMSP: json_data}
elif EQPT_NMSP in json_data:
# then this is already a new format topology json, ensure that there are no issues
json_data[EQPT_NMSP] = convert_raman_efficiency(json_data[EQPT_NMSP])
json_data[EQPT_NMSP] = convert_delta_power_range(json_data[EQPT_NMSP])
json_data[EQPT_NMSP] = convert_nf_coef(json_data[EQPT_NMSP])
json_data[EQPT_NMSP] = add_missing_default_type_variety(json_data[EQPT_NMSP])
# case of service json
elif PATH_REQUEST_KEY in json_data:
json_data = reorder_route_objects(json_data)
json_data = remove_union_that_fail(json_data)
json_data = {SERV_NMSP: json_data}
elif SERV_NMSP in json_data:
json_data[SERV_NMSP] = reorder_route_objects(json_data[SERV_NMSP])
json_data[SERV_NMSP] = remove_union_that_fail(json_data[SERV_NMSP])
# case of edfa_config json
elif any(k in json_data for k in EDFA_CONFIG_KEYS):
json_data = convert_nf_fit_coef(json_data)
json_data = {EDFA_CONFIG_NMSP: json_data}
elif EDFA_CONFIG_NMSP in json_data:
json_data[EDFA_CONFIG_NMSP] = convert_nf_fit_coef(json_data[EDFA_CONFIG_NMSP])
# case of spectrum json
elif SPECTRUM_KEY in json_data:
json_data = {SPECTRUM_NMSP: json_data[SPECTRUM_KEY]}
# case of sim_params json
elif any(k in json_data for k in SIM_PARAMS_KEYS):
json_data = {SIM_PARAMS_NMSP: json_data}
# case of response json
elif RESPONSE_KEY in json_data:
json_data = {RESP_NMSP: json_data}
elif any(k in json_data for k in [SPECTRUM_NMSP, SIM_PARAMS_NMSP, RESP_NMSP]):
# then this is a new format json, nothing to convert
pass
else:
raise ValueError('Unrecognized type of content (not topology, service or equipment)')
json_data = convert_dict(json_data)
return json_data
def yang_to_legacy(json_data: Dict) -> Dict:
"""Convert GNPy YANG format to legacy format.
This function processes the input JSON data to convert it from the new GNPy YANG format
back to the legacy format. It handles various types of content, including topology,
equipment, and service jsons, ensuring that the necessary conversions are applied.
The input data is validated with oopt-gnpy-libyang.
:param json_data: The input JSON data in GNPy YANG format to convert.
:type json_data: Dict
:return: The converted JSON data in legacy format.
:rtype: Dict
:raises ValueError: If the input JSON data does not match any recognized content type
(not topology, service, or equipment).
"""
# validate data compliance: make sure that this is yang formated data before validation.
load_data(json.dumps(legacy_to_yang(json_data)))
json_data = convert_empty_to_none(json_data)
json_data = convert_back(json_data)
# case of topology json
if ELEMENTS_KEY in json_data:
json_data = convert_back_degree(json_data)
json_data = convert_back_design_band(json_data)
json_data = convert_back_loss_coeff_list(json_data)
json_data = convert_back_raman_coef(json_data)
elif TOPO_NMSP in json_data:
json_data = convert_back_degree(json_data[TOPO_NMSP])
json_data = convert_back_design_band(json_data)
json_data = convert_back_loss_coeff_list(json_data)
json_data = convert_back_raman_coef(json_data)
# case of equipment json
elif any(k in json_data for k in EQPT_TYPES):
json_data = convert_back_delta_power_range(json_data)
json_data = convert_back_raman_efficiency(json_data)
json_data = convert_back_nf_coef(json_data)
json_data = remove_namespace_context(json_data, "gnpy-eqpt-config:")
elif EQPT_NMSP in json_data:
json_data[EQPT_NMSP] = convert_back_delta_power_range(json_data[EQPT_NMSP])
json_data[EQPT_NMSP] = convert_back_raman_efficiency(json_data[EQPT_NMSP])
json_data[EQPT_NMSP] = convert_back_nf_coef(json_data[EQPT_NMSP])
json_data = remove_namespace_context(json_data[EQPT_NMSP], "gnpy-eqpt-config:")
# case of EDFA config json
elif any(k in json_data for k in EDFA_CONFIG_KEYS):
json_data = convert_back_nf_fit_coef(json_data)
elif EDFA_CONFIG_NMSP in json_data:
json_data[EDFA_CONFIG_NMSP] = convert_back_nf_fit_coef(json_data[EDFA_CONFIG_NMSP])
# case of service json
elif SERV_NMSP in json_data:
json_data = json_data[SERV_NMSP]
# case of sim_params json
elif SIM_PARAMS_NMSP in json_data:
json_data = json_data[SIM_PARAMS_NMSP]
# case of spectrum json
elif SPECTRUM_NMSP in json_data:
json_data = {SPECTRUM_KEY: json_data[SPECTRUM_NMSP]}
# case of planning response json
elif RESP_NMSP in json_data:
json_data = json_data[RESP_NMSP]
elif any(k in json_data for k in SIM_PARAMS_KEYS + [SPECTRUM_KEY, RESPONSE_KEY, PATH_REQUEST_KEY]):
# then this is a legacy format json, nothing to convert
pass
else:
raise ValueError('Unrecognized type of content (not topology, service or equipment)')
return json_data
def main():
"""Conversion function
"""
parser = ArgumentParser()
parser.add_argument('--legacy-to-yang', nargs='?', type=Path,
help='convert file with this name into yangconformedname.json')
parser.add_argument('--yang-to-legacy', nargs='?', type=Path,
help='convert file with this name into gnpy'
+ ' using decimal instead of strings and null instead of [null]')
parser.add_argument('--validate', nargs='?', type=Path,
help='validate yang conformity')
parser.add_argument('-o', '--output', type=Path,
help='Stores into file with this name; default = GNPy_legacy_formatted-<file_name>.json or'
+ 'GNPy_yang_formatted-<file_name>.json')
args = parser.parse_args()
if not (args.legacy_to_yang or args.yang_to_legacy or args.validate):
parser.error("You must specify at least one of --legacy-to-yang, --yang-to-legacy, or --validate ")
output = None
converted = None
if args.validate:
with open(args.validate, 'r', encoding='utf-8') as f:
json_data = json.load(f)
load_data(json.dumps(json_data))
return 0
elif args.legacy_to_yang:
prefix = 'GNPy_yang_formatted-'
with open(args.legacy_to_yang, 'r', encoding='utf-8') as f:
json_data = json.load(f)
# note that dump_data automatically validate date against yang models
converted = dump_data(legacy_to_yang(json_data))
output = prefix + str(args.legacy_to_yang.name)
elif args.yang_to_legacy:
prefix = 'GNPy_legacy_formatted-'
with open(args.yang_to_legacy, 'r', encoding='utf-8') as f:
json_data = json.load(f)
converted = json.dumps(yang_to_legacy(json_data), indent=2, ensure_ascii=False)
output = prefix + str(args.yang_to_legacy.name)
if args.output:
output = args.output
with open(output, 'w', encoding='utf-8') as f:
f.write(converted)
if __name__ == '__main__':
main()

View File

@@ -1,149 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# Utility functions that creates an Eqpt sheet template
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
create_eqpt_sheet.py
====================
XLS parser that can be called to create a "City" column in the "Eqpt" sheet.
If not present in the "Nodes" sheet, the "Type" column will be implicitly
determined based on the topology.
"""
from argparse import ArgumentParser
from pathlib import Path
import csv
from typing import List, Dict, Optional
from logging import getLogger
import dataclasses
from gnpy.core.exceptions import NetworkTopologyError
from gnpy.tools.xls_utils import generic_open_workbook, get_sheet, XLS_EXCEPTIONS, all_rows, fast_get_sheet_rows, \
WorkbookType, SheetType
logger = getLogger(__name__)
EXAMPLE_DATA_DIR = Path(__file__).parent.parent / 'example-data'
PARSER = ArgumentParser()
PARSER.add_argument('workbook', type=Path, nargs='?', default=f'{EXAMPLE_DATA_DIR}/meshTopologyExampleV2.xls',
help='create the mandatory columns in Eqpt sheet')
PARSER.add_argument('-o', '--output', type=Path, help='Store CSV file')
@dataclasses.dataclass
class Node:
"""Represents a network node with a unique identifier, connected nodes, and equipment type.
:param uid: Unique identifier of the node.
:type uid: str
:param to_node: List of connected node identifiers.
:type to_node: List[str.]
:param eqpt: Equipment type associated with the node (ROADM, ILA, FUSED).
:type eqpt: str
"""
def __init__(self, uid: str, to_node: List[str], eqpt: str = None):
self.uid = uid
self.to_node = to_node
self.eqpt = eqpt
def open_sheet_with_error_handling(wb: WorkbookType, sheet_name: str, is_xlsx: bool) -> SheetType:
"""Opens a sheet from the workbook with error handling.
:param wb: The opened workbook.
:type wb: WorkbookType
:param sheet_name: Name of the sheet to open.
:type sheet_name: str
:param is_xlsx: Boolean indicating if the file is XLSX format.
:type is_xlsx: bool
:return: The worksheet object.
:rtype: SheetType
:raises NetworkTopologyError: If the sheet is not found.
"""
try:
sheet = get_sheet(wb, sheet_name, is_xlsx)
return sheet
except XLS_EXCEPTIONS as exc:
msg = f'Error: no {sheet_name} sheet in the file.'
raise NetworkTopologyError(msg) from exc
def read_excel(input_filename: Path) -> Dict[str, Node]:
"""Reads the 'Nodes' and 'Links' sheets from an Excel file to build a network graph.
:param input_filename: Path to the Excel file.
:type input_filename: Path
:return: Dictionary of nodes with their connectivity and equipment type.
:rtype: Dict[str, Node]
"""
wobo, is_xlsx = generic_open_workbook(input_filename)
links_sheet = open_sheet_with_error_handling(wobo, 'Links', is_xlsx)
get_rows_links = fast_get_sheet_rows(links_sheet) if is_xlsx else None
nodes = {}
for row in all_rows(links_sheet, is_xlsx, start=5, get_rows=get_rows_links):
node_a, node_z = row[0].value, row[1].value
# Add connection in both directions
for node1, node2 in [(node_a, node_z), (node_z, node_a)]:
if node1 in nodes:
nodes[node1].to_node.append(node2)
else:
nodes[node1] = Node(node1, [node2])
nodes_sheet = open_sheet_with_error_handling(wobo, 'Nodes', is_xlsx)
get_rows_nodes = fast_get_sheet_rows(nodes_sheet) if is_xlsx else None
for row in all_rows(nodes_sheet, is_xlsx, start=5, get_rows=get_rows_nodes):
node = row[0].value
eqpt = row[6].value
if node not in nodes:
raise NetworkTopologyError(f'Error: node {node} is not listed on the links sheet.')
if eqpt == 'ILA' and len(nodes[node].to_node) != 2:
degree = len(nodes[node].to_node)
raise NetworkTopologyError(f'Error: node {node} has an incompatible node degree ({degree}) '
+ 'for its equipment type (ILA).')
if eqpt == '' and len(nodes[node].to_node) == 2:
nodes[node].eqpt = 'ILA'
elif eqpt == '' and len(nodes[node].to_node) != 2:
nodes[node].eqpt = 'ROADM'
else:
nodes[node].eqpt = eqpt
return nodes
def create_eqpt_template(nodes: Dict[str, Node], input_filename: Path, output_filename: Optional[Path] = None):
"""Creates a CSV template to help users populate equipment types for nodes.
:param nodes: Dictionary of nodes.
:type nodes: Dict[str, Node]
:param input_filename: Path to the original Excel file.
:type input_filename: Path
:param output_filename: Path to save the CSV file; generated if None.
:type output_filename: Optional(Path)
"""
if output_filename is None:
output_filename = input_filename.parent / (input_filename.with_suffix('').stem + '_eqpt_sheet.csv')
with open(output_filename, mode='w', encoding='utf-8', newline='') as output_file:
output_writer = csv.writer(output_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
amp_header = ['amp_type', 'att_in', 'amp_gain', 'tilt', 'att_out', 'delta_p']
output_writer.writerow(['node_a', 'node_z'] + amp_header + amp_header)
for node in nodes.values():
if node.eqpt == 'ILA':
output_writer.writerow([node.uid, node.to_node[0]])
if node.eqpt == 'ROADM':
for to_node in node.to_node:
output_writer.writerow([node.uid, to_node])
msg = f'File {output_filename} successfully created.'
logger.info(msg)
if __name__ == '__main__':
ARGS = PARSER.parse_args()
create_eqpt_template(read_excel(ARGS.workbook), ARGS.workbook, ARGS.output)

View File

@@ -1,72 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.tools.default_edfa_configs: loads JSON configuration files at module initialization time
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
gnpy.tools.default_edfa_config
==============================
Default configs for pre defined amplifiers:
- Juniper-BoosterHG.json,
- std_medium_gain_advanced_config.json
"""
from logging import getLogger
from typing import Dict, Optional
from json import JSONDecodeError, load
from pathlib import Path
from gnpy.core.exceptions import ConfigurationError
from gnpy.tools.convert_legacy_yang import yang_to_legacy
_logger = getLogger(__name__)
_examples_dir = Path(__file__).parent.parent / 'example-data'
def _load_json_file(file_path: Path) -> Optional[Dict]:
"""Load and parse a JSON file.
:param file_path: Path to the JSON file to load
:type file_path: Path
:return: Dict containing the parsed JSON data or None if loading fails
:rtype: Optional[Dict]
"""
try:
with open(file_path, 'r', encoding='utf-8') as file:
return yang_to_legacy(load(file))
except FileNotFoundError:
msg = f"Configuration file not found: {file_path}"
_logger.error(msg)
return None
except JSONDecodeError as e:
msg = f"Invalid JSON in configuration file {file_path}: {e}"
_logger.error(msg)
return None
# Default files to load
_files_to_load = {
"std_medium_gain_advanced_config.json": _examples_dir / "std_medium_gain_advanced_config.json",
"Juniper-BoosterHG.json": _examples_dir / "Juniper-BoosterHG.json"
}
# Load configurations
_configs: Dict = {}
for key, filepath in _files_to_load.items():
config_data = _load_json_file(filepath)
if config_data is not None:
_configs[key] = config_data
else:
_msg = f"Failed to load configuration: {key}. Using empty dict as fallback."
_logger.error(_msg)
raise ConfigurationError
# Expose the constant
DEFAULT_EXTRA_CONFIG: Dict[str, Dict] = _configs
DEFAULT_EQPT_CONFIG: Path = _examples_dir / "eqpt_config.json"

File diff suppressed because it is too large Load Diff

View File

@@ -1,55 +1,62 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.tools.plots: Graphs and plots usable from a CLI application
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
'''
gnpy.tools.plots
================
Graphs and plots usable from a CLI application
"""
Graphs and plots usable form a CLI application
'''
from matplotlib.pyplot import show, axis, figure, title, text
from networkx import draw_networkx
from networkx import draw_networkx_nodes, draw_networkx_edges, draw_networkx_labels
from gnpy.core.elements import Transceiver
def _try_city(node):
return node.location.city if node.location.city else node.uid
def plot_baseline(network):
edges = set(network.edges())
pos = {n: (n.lng, n.lat) for n in network.nodes()}
labels = {n: _try_city(n) for n in network.nodes() if isinstance(n, Transceiver)}
draw_networkx(network, pos=pos, node_size=50, node_color='#ababab', edge_color='#ababab',
labels=labels, font_size=14)
labels = {n: n.location.city for n in network.nodes() if isinstance(n, Transceiver)}
city_labels = set(labels.values())
for n in network.nodes():
if n.location.city and n.location.city not in city_labels:
labels[n] = n.location.city
city_labels.add(n.location.city)
label_pos = pos
fig = figure()
kwargs = {'figure': fig, 'pos': pos}
plot = draw_networkx_nodes(network, nodelist=network.nodes(), node_color='#ababab', **kwargs)
draw_networkx_edges(network, edgelist=edges, edge_color='#ababab', **kwargs)
draw_networkx_labels(network, labels=labels, font_size=14, **{**kwargs, 'pos': label_pos})
axis('off')
show()
def plot_results(network, path, source, destination):
def plot_results(network, path, source, destination, infos):
path_edges = set(zip(path[:-1], path[1:]))
edges = set(network.edges()) - path_edges
nodes = [n for n in network.nodes() if n not in path]
pos = {n: (n.lng, n.lat) for n in network.nodes()}
nodes_by_pos = {}
nodes = {}
for k, (x, y) in pos.items():
nodes_by_pos.setdefault((round(x, 1), round(y, 1)), []).append(k)
labels = {n: _try_city(n) for n in network.nodes() if isinstance(n, Transceiver)}
nodes.setdefault((round(x, 1), round(y, 1)), []).append(k)
labels = {n: n.location.city for n in network.nodes() if isinstance(n, Transceiver)}
city_labels = set(labels.values())
for n in network.nodes():
if n.location.city and n.location.city not in city_labels:
labels[n] = n.location.city
city_labels.add(n.location.city)
label_pos = pos
fig = figure()
draw_networkx(network, pos=pos, labels=labels, font_size=14,
nodelist=nodes, node_color='#ababab', node_size=50,
edgelist=edges, edge_color='#ababab')
draw_networkx(network, pos=pos, with_labels=False,
nodelist=path, node_color='#ff0000', node_size=55,
edgelist=path_edges, edge_color='#ff0000')
title(f'Propagating from {_try_city(source)} to {_try_city(destination)}')
kwargs = {'figure': fig, 'pos': pos}
all_nodes = [n for n in network.nodes() if n not in path]
plot = draw_networkx_nodes(network, nodelist=all_nodes, node_color='#ababab', node_size=50, **kwargs)
draw_networkx_nodes(network, nodelist=path, node_color='#ff0000', node_size=55, **kwargs)
draw_networkx_edges(network, edgelist=edges, edge_color='#ababab', **kwargs)
draw_networkx_edges(network, edgelist=path_edges, edge_color='#ff0000', **kwargs)
draw_networkx_labels(network, labels=labels, font_size=14, **{**kwargs, 'pos': label_pos})
title(f'Propagating from {source.loc.city} to {destination.loc.city}')
axis('off')
heading = 'Spectral Information\n\n'
@@ -58,7 +65,7 @@ def plot_results(network, path, source, destination):
bbox={'boxstyle': 'round', 'facecolor': 'wheat', 'alpha': 0.5})
msgs = {(x, y): heading + '\n\n'.join(str(n) for n in ns if n in path)
for (x, y), ns in nodes_by_pos.items()}
for (x, y), ns in nodes.items()}
def hover(event):
if event.xdata is None or event.ydata is None:

172
gnpy/tools/rest_example.py Normal file
View File

@@ -0,0 +1,172 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
gnpy.tools.rest_example
=======================
GNPy as a rest API example
'''
import json
import logging
import os
import re
from logging.handlers import RotatingFileHandler
from pathlib import Path
import werkzeug
from flask import Flask, request
from numpy import mean
from werkzeug.exceptions import InternalServerError
import gnpy.core.ansi_escapes as ansi_escapes
import gnpy.core.exceptions as exceptions
from gnpy.core.network import build_network
from gnpy.core.utils import lin2db, automatic_nch
from gnpy.tools.json_io import requests_from_json, disjunctions_from_json, _equipment_from_json, network_from_json
from gnpy.topology.request import (ResultElement, compute_path_dsjctn, requests_aggregation,
correct_json_route_list,
deduplicate_disjunctions, compute_path_with_disjunction)
from gnpy.topology.spectrum_assignment import build_oms_list, pth_assign_spectrum
_logger = logging.getLogger(__name__)
_examples_dir = Path(__file__).parent.parent / 'example-data'
_reaesc = re.compile(r'\x1b[^m]*m')
app = Flask(__name__)
@app.route('/api/v1/path-computation', methods=['POST'])
def compute_path():
data = request.json
service = data['gnpy-api:service']
topology = data['gnpy-api:topology']
equipment = _equipment_from_json(data['gnpy-api:equipment'],
os.path.join(_examples_dir, 'std_medium_gain_advanced_config.json'))
network = network_from_json(topology, equipment)
propagatedpths, reversed_propagatedpths, rqs = path_requests_run(service, network, equipment)
# Generate the output
result = []
# assumes that list of rqs and list of propgatedpths have same order
for i, pth in enumerate(propagatedpths):
result.append(ResultElement(rqs[i], pth, reversed_propagatedpths[i]))
return {"result": {"response": [n.json for n in result]}}, 201
@app.route('/api/v1/status', methods=['GET'])
def api_status():
return {"version": "v1", "status": "ok"}, 200
def _init_logger():
handler = RotatingFileHandler('api.log', maxBytes=1024 * 1024, backupCount=5, encoding='utf-8')
ch = logging.StreamHandler()
logging.basicConfig(level=logging.INFO, handlers=[handler, ch],
format="%(asctime)s %(levelname)s %(name)s(%(lineno)s) [%(threadName)s - %(thread)d] - %("
"message)s")
def path_requests_run(service, network, equipment):
# Build the network once using the default power defined in SI in eqpt config
# TODO power density: db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by
# spacing, f_min and f_max
p_db = equipment['SI']['default'].power_dbm
p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,
equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
build_network(network, equipment, p_db, p_total_db)
oms_list = build_oms_list(network, equipment)
rqs = requests_from_json(service, equipment)
# check that request ids are unique. Non unique ids, may
# mess the computation: better to stop the computation
all_ids = [r.request_id for r in rqs]
if len(all_ids) != len(set(all_ids)):
for item in list(set(all_ids)):
all_ids.remove(item)
msg = f'Requests id {all_ids} are not unique'
_logger.critical(msg)
raise ValueError('Requests id ' + all_ids + ' are not unique')
rqs = correct_json_route_list(network, rqs)
# pths = compute_path(network, equipment, rqs)
dsjn = disjunctions_from_json(service)
# need to warn or correct in case of wrong disjunction form
# disjunction must not be repeated with same or different ids
dsjn = deduplicate_disjunctions(dsjn)
rqs, dsjn = requests_aggregation(rqs, dsjn)
# TODO export novel set of aggregated demands in a json file
_logger.info(f'{ansi_escapes.blue}The following services have been requested:{ansi_escapes.reset}' + str(rqs))
_logger.info(f'{ansi_escapes.blue}Computing all paths with constraints{ansi_escapes.reset}')
pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
_logger.info(f'{ansi_escapes.blue}Propagating on selected path{ansi_escapes.reset}')
propagatedpths, reversed_pths, reversed_propagatedpths = compute_path_with_disjunction(network, equipment, rqs,
pths)
# Note that deepcopy used in compute_path_with_disjunction returns
# a list of nodes which are not belonging to network (they are copies of the node objects).
# so there can not be propagation on these nodes.
pth_assign_spectrum(pths, rqs, oms_list, reversed_pths)
return propagatedpths, reversed_propagatedpths, rqs
def common_error_handler(exception):
"""
:type exception: Exception
"""
status_code = 500
if not isinstance(exception, werkzeug.exceptions.HTTPException):
exception = werkzeug.exceptions.InternalServerError()
exception.description = "Something went wrong on our side."
response = {
'message': exception.name,
'description': exception.description,
'code': exception.code
}
return werkzeug.Response(response=json.dumps(response), status=status_code, mimetype='application/json')
def bad_request_handler(exception):
response = {
'message': 'bad request',
'description': _reaesc.sub('', str(exception)),
'code': 400
}
return werkzeug.Response(response=json.dumps(response), status=400, mimetype='application/json')
def _init_app():
app.register_error_handler(KeyError, bad_request_handler)
app.register_error_handler(TypeError, bad_request_handler)
app.register_error_handler(ValueError, bad_request_handler)
app.register_error_handler(exceptions.ConfigurationError, bad_request_handler)
app.register_error_handler(exceptions.DisjunctionError, bad_request_handler)
app.register_error_handler(exceptions.EquipmentConfigError, bad_request_handler)
app.register_error_handler(exceptions.NetworkTopologyError, bad_request_handler)
app.register_error_handler(exceptions.ServiceError, bad_request_handler)
app.register_error_handler(exceptions.SpectrumError, bad_request_handler)
app.register_error_handler(exceptions.ParametersError, bad_request_handler)
app.register_error_handler(AssertionError, bad_request_handler)
app.register_error_handler(InternalServerError, common_error_handler)
for error_code in werkzeug.exceptions.default_exceptions:
app.register_error_handler(error_code, common_error_handler)
def main():
_init_logger()
_init_app()
app.run(host='0.0.0.0', port=8080)
if __name__ == '__main__':
main()

View File

@@ -1,11 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.tools.service_sheet: XLS parser that can be called to create a JSON request file
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
gnpy.tools.service_sheet
========================
@@ -16,181 +11,109 @@ Yang model for requesting path computation.
See: draft-ietf-teas-yang-path-computation-01.txt
"""
from xlrd import open_workbook, XL_CELL_EMPTY
from collections import namedtuple
from logging import getLogger
from copy import deepcopy
from pathlib import Path
from typing import Dict, List, Generator
from networkx import DiGraph
from gnpy.core.utils import db2lin
from gnpy.core.exceptions import ServiceError
from gnpy.core.elements import Transceiver, Roadm, Edfa, Fiber
from gnpy.tools.convert import corresp_names, corresp_next_node, all_rows, generic_open_workbook, get_sheet, \
parse_row, parse_headers
from gnpy.tools.xls_utils import correct_cell_int_to_str, get_sheet_name, is_type_cell_empty
import gnpy.core.ansi_escapes as ansi_escapes
from gnpy.tools.convert import corresp_names, corresp_next_node
SERVICES_COLUMN = 12
SERVICE_LINE = 4
def all_rows(sheet, start=0):
return (sheet.row(x) for x in range(start, sheet.nrows))
logger = getLogger(__name__)
class Request:
"""DATA class for a request.
:params request_id (int): The unique identifier for the request.
:params source (str): The source node for the communication.
:params destination (str): The destination node for the communication.
:params trx_type (str): The type of transmission for the communication.
:params mode (str, optional): The mode of transmission. Defaults to None.
:params spacing (float, optional): The spacing between channels. Defaults to None.
:params power (float, optional): The power level for the communication. Defaults to None.
:params nb_channel (int, optional): The number of channels required for the communication. Defaults to None.
:params disjoint_from (str, optional): The node to be disjoint from. Defaults to ''.
:params nodes_list (list, optional): The list of nodes involved in the communication. Defaults to None.
:params is_loose (str, optional): Indicates if the communication is loose. Defaults to ''.
:params path_bandwidth (float, optional): The bandwidth required for the communication. Defaults to None.
"""
def __init__(self, **kwargs):
"""Constructor method
"""
super().__init__()
self.update_attr(kwargs)
def update_attr(self, kwargs):
"""Updates the attributes of the node based on provided keyword arguments.
:param kwargs: A dictionary of attributes to update.
"""
clean_kwargs = {k: v for k, v in kwargs.items() if v != '' and v is not None}
for k, v in self.default_values.items():
v = clean_kwargs.get(k, v)
if k != 'is_loose':
if k in ['request_id', 'trx_type', 'mode', 'disjoint_from']:
v = correct_cell_int_to_str(v)
setattr(self, k, v)
else:
self.is_loose = v in ['', None, 'yes', 'Yes', 'YES']
default_values = {
'request_id': None,
'source': None,
'destination': None,
'trx_type': None,
'mode': None,
'spacing': None,
'power': None,
'nb_channel': None,
'disjoint_from': '',
'nodes_list': '',
'is_loose': None,
'path_bandwidth': None
}
class Request(namedtuple('Request', 'request_id source destination trx_type mode \
spacing power nb_channel disjoint_from nodes_list is_loose path_bandwidth')):
def __new__(cls, request_id, source, destination, trx_type, mode=None, spacing=None, power=None, nb_channel=None, disjoint_from='', nodes_list=None, is_loose='', path_bandwidth=None):
return super().__new__(cls, request_id, source, destination, trx_type, mode, spacing, power, nb_channel, disjoint_from, nodes_list, is_loose, path_bandwidth)
class Element:
"""
"""
def __init__(self, uid):
self.uid = uid
def __eq__(self, other):
return isinstance(other, type(self)) and self.uid == other.ui
return type(self) == type(other) and self.uid == other.uid
def __hash__(self):
return hash((type(self), self.uid))
class Request_element(Element):
"""Class that generate the request in the json format
:params request_param (Request): The request object containing the information for the element.
:params equipment (dict): The equipment configuration for the communication.
:params bidir (bool): Indicates if the communication is bidirectional.
Attributes:
request_id (str): The unique identifier for the request.
source (str): The source node for the communication.
destination (str): The destination node for the communication.
srctpid (str): The source TP ID for the communication.
dsttpid (str): The destination TP ID for the communication.
bidir (bool): Indicates if the communication is bidirectional.
trx_type (str): The type of transmission for the communication.
mode (str): The mode of transmission for the communication.
spacing (float): The spacing between channels for the communication.
power (float): The power level for the communication.
nb_channel (int): The number of channels required for the communication.
disjoint_from (list): The list of nodes to be disjoint from.
nodes_list (list): The list of nodes involved in the communication.
loose (str): Indicates if the communication is loose or strict.
path_bandwidth (float): The bandwidth required for the communication.
"""
def __init__(self, request_param: Request, equipment: Dict, bidir: bool):
"""
"""
super().__init__(uid=request_param.request_id)
def __init__(self, Request, equipment, bidir):
# request_id is str
# excel has automatic number formatting that adds .0 on integer values
# the next lines recover the pure int value, assuming this .0 is unwanted
self.request_id = request_param.request_id
self.source = f'trx {request_param.source}'
self.destination = f'trx {request_param.destination}'
# The automatic naming generated by excel parser requires that source and dest name
self.request_id = correct_xlrd_int_to_str_reading(Request.request_id)
self.source = f'trx {Request.source}'
self.destination = f'trx {Request.destination}'
# TODO: the automatic naming generated by excel parser requires that source and dest name
# be a string starting with 'trx' : this is manually added here.
self.srctpid = f'trx {request_param.source}'
self.dsttpid = f'trx {request_param.destination}'
self.srctpid = f'trx {Request.source}'
self.dsttpid = f'trx {Request.destination}'
self.bidir = bidir
# test that trx_type belongs to eqpt_config.json
# if not replace it with a default
self.mode = None
try:
available_modes = [mode['format'] for mode in equipment['Transceiver'][request_param.trx_type].mode]
self.trx_type = request_param.trx_type
if request_param.mode not in [None, '']:
if request_param.mode in available_modes:
self.mode = request_param.mode
if equipment['Transceiver'][Request.trx_type]:
self.trx_type = correct_xlrd_int_to_str_reading(Request.trx_type)
if Request.mode is not None:
Requestmode = correct_xlrd_int_to_str_reading(Request.mode)
if [mode for mode in equipment['Transceiver'][Request.trx_type].mode if mode['format'] == Requestmode]:
self.mode = Requestmode
else:
msg = f'Request Id: {self.request_id} - could not find tsp : \'{request_param.trx_type}\' ' \
+ f'with mode: \'{request_param.mode}\' in eqpt library \nComputation stopped.'
msg = f'Request Id: {self.request_id} - could not find tsp : \'{Request.trx_type}\' with mode: \'{Requestmode}\' in eqpt library \nComputation stopped.'
# print(msg)
logger.critical(msg)
raise ServiceError(msg)
except KeyError as e:
msg = f'Request Id: {self.request_id} - could not find tsp : \'{request_param.trx_type}\' with mode: ' \
+ f'\'{request_param.mode}\' in eqpt library \nComputation stopped.'
raise ServiceError(msg) from e
else:
Requestmode = None
self.mode = Request.mode
except KeyError:
msg = f'Request Id: {self.request_id} - could not find tsp : \'{Request.trx_type}\' with mode: \'{Request.mode}\' in eqpt library \nComputation stopped.'
# print(msg)
logger.critical(msg)
raise ServiceError(msg)
# excel input are in GHz and dBm
if request_param.spacing:
self.spacing = request_param.spacing * 1e9
if Request.spacing is not None:
self.spacing = Request.spacing * 1e9
else:
msg = f'Request {self.request_id} missing spacing: spacing is mandatory.\ncomputation stopped'
logger.critical(msg)
raise ServiceError(msg)
if Request.power is not None:
self.power = db2lin(Request.power) * 1e-3
else:
self.power = None
if Request.nb_channel is not None:
self.nb_channel = int(Request.nb_channel)
else:
self.nb_channel = None
self.power = None
if request_param.power is not None:
self.power = db2lin(request_param.power) * 1e-3
self.nb_channel = None
if request_param.nb_channel is not None:
self.nb_channel = int(request_param.nb_channel)
self.disjoint_from = [n for n in request_param.disjoint_from.split(' | ') if request_param.disjoint_from]
value = correct_xlrd_int_to_str_reading(Request.disjoint_from)
self.disjoint_from = [n for n in value.split(' | ') if value]
self.nodes_list = []
if request_param.nodes_list:
self.nodes_list = request_param.nodes_list.split(' | ')
if Request.nodes_list:
self.nodes_list = Request.nodes_list.split(' | ')
self.loose = 'LOOSE'
if not request_param.is_loose:
if Request.is_loose.lower() == 'no':
self.loose = 'STRICT'
self.path_bandwidth = None
if Request.path_bandwidth is not None:
self.path_bandwidth = Request.path_bandwidth * 1e9
else:
self.path_bandwidth = 0
self.path_bandwidth = 0
if request_param.path_bandwidth is not None:
self.path_bandwidth = request_param.path_bandwidth * 1e9
uid = property(lambda self: repr(self))
@property
def pathrequest(self):
"""Creates json dictionnary for the request
"""
# Default assumption for bidir is False
req_dictionnary = {
'request-id': self.request_id,
@@ -215,15 +138,14 @@ class Request_element(Element):
if self.nodes_list:
req_dictionnary['explicit-route-objects'] = {}
temp = {'route-object-include-exclude': [
{
'index': self.nodes_list.index(node),
'explicit-route-usage': 'route-include-ero',
'num-unnum-hop': {
'node-id': f'{node}',
'link-tp-id': 'link-tp-id is not used',
'hop-type': f'{self.loose}',
}
}
{'explicit-route-usage': 'route-include-ero',
'index': self.nodes_list.index(node),
'num-unnum-hop': {
'node-id': f'{node}',
'link-tp-id': 'link-tp-id is not used',
'hop-type': f'{self.loose}',
}
}
for node in self.nodes_list]
}
req_dictionnary['explicit-route-objects'] = temp
@@ -234,34 +156,34 @@ class Request_element(Element):
@property
def pathsync(self):
"""Creates json dictionnary for disjunction list (synchronization vector)
"""
if self.disjoint_from:
return {'synchronization-id': self.request_id,
'svec': {
'relaxable': 'false',
'disjointness': 'node link',
'request-id-number': [self.request_id] + list(self.disjoint_from)
'request-id-number': [self.request_id] + [n for n in self.disjoint_from]
}
}
return None
else:
return None
# TO-DO: avoid multiple entries with same synchronisation vectors
@property
def json(self):
"""Returns the json dictionnary for requests and for synchronisation vector
"""
return self.pathrequest, self.pathsync
def read_service_sheet(
input_filename: Path,
eqpt: Dict,
network: DiGraph,
network_filename: Path = None,
bidir: bool = False) -> Dict:
input_filename,
eqpt,
network,
network_filename=None,
bidir=False,
filter_region=None):
""" converts a service sheet into a json structure
"""
if filter_region is None:
filter_region = []
if network_filename is None:
network_filename = input_filename
service = parse_excel(input_filename)
@@ -269,86 +191,70 @@ def read_service_sheet(
req = correct_xls_route_list(network_filename, network, req)
# if there is no sync vector , do not write any synchronization
synchro = [n.json[1] for n in req if n.json[1] is not None]
data = {'path-request': [n.json[0] for n in req]}
if synchro:
data['synchronization'] = synchro
data = {
'path-request': [n.json[0] for n in req],
'synchronization': synchro
}
else:
data = {
'path-request': [n.json[0] for n in req]
}
return data
def parse_excel(input_filename: Path) -> List[Request]:
"""Open xls_file and reads 'Service' sheet
Returns the list of services data in Request class
"""
wb, is_xlsx = generic_open_workbook(input_filename)
service_sheet = get_sheet(wb, 'Service', is_xlsx)
services = list(parse_service_sheet(service_sheet, is_xlsx))
def correct_xlrd_int_to_str_reading(v):
if not isinstance(v, str):
value = str(int(v))
if value.endswith('.0'):
value = value[:-2]
else:
value = v
return value
def parse_row(row, fieldnames):
return {f: r.value for f, r in zip(fieldnames, row[0:SERVICES_COLUMN])
if r.ctype != XL_CELL_EMPTY}
def parse_excel(input_filename):
with open_workbook(input_filename) as wb:
service_sheet = wb.sheet_by_name('Service')
services = list(parse_service_sheet(service_sheet))
return services
def parse_service_sheet(service_sheet, is_xlsx) -> Generator[Request, None, None]:
def parse_service_sheet(service_sheet):
""" reads each column according to authorized fieldnames. order is not important.
"""
logger.debug('Validating headers on %r', get_sheet_name(service_sheet, is_xlsx))
logger.info(f'Validating headers on {service_sheet.name!r}')
# add a test on field to enable the '' field case that arises when columns on the
# right hand side are used as comments or drawing in the excel sheet
header = [x.value.strip() for x in service_sheet.row(4)[0:SERVICES_COLUMN]
if len(x.value.strip()) > 0]
# create a service_fieldname independant from the excel column order
# to be compatible with any version of the sheet
# the following dictionnary records the excel field names and the corresponding parameter's name
authorized_fieldnames = {
'route id': 'request_id', 'Source': 'source', 'Destination': 'destination',
'TRX type': 'trx_type', 'Mode': 'mode', 'System: spacing': 'spacing',
'System: input power (dBm)': 'power', 'System: nb of channels': 'nb_channel',
'routing: disjoint from': 'disjoint_from', 'routing: path': 'nodes_list',
'routing: is loose?': 'is_loose', 'path bandwidth': 'path_bandwidth'}
header = parse_headers(service_sheet, is_xlsx, authorized_fieldnames, {}, SERVICE_LINE, (0, SERVICES_COLUMN))
# create a service_fieldname independant from the excel column order
# to be compatible with any version of the sheet
# the following dictionnary records the excel field names and the corresponding parameter's name
for row in all_rows(service_sheet, is_xlsx, start=5):
if not is_type_cell_empty(row[0], is_xlsx):
# Check required because openpyxl in read_only mode can return "ghost" rows at the end of the document
# (ReadOnlyCell cells with no actual value but formatting information even for empty rows).
yield Request(**parse_row(row[0:SERVICES_COLUMN], header))
def check_end_points(pathreq: Request_element, network: DiGraph):
"""Raise error if end point is not correct
"""
transponders = [n.uid for n in network.nodes() if isinstance(n, Transceiver)]
if pathreq.source not in transponders:
msg = f'Request: {pathreq.request_id}: could not find' +\
f' transponder source : {pathreq.source}.'
try:
service_fieldnames = [authorized_fieldnames[e] for e in header]
except KeyError:
msg = f'Malformed header on Service sheet: {header} field not in {authorized_fieldnames}'
logger.critical(msg)
raise ServiceError(msg)
if pathreq.destination not in transponders:
msg = f'Request: {pathreq.request_id}: could not find' +\
f' transponder destination: {pathreq.destination}.'
logger.critical(msg)
raise ServiceError(msg)
raise ValueError(msg)
for row in all_rows(service_sheet, start=5):
yield Request(**parse_row(row[0:SERVICES_COLUMN], service_fieldnames))
def find_node_sugestion(n_id, corresp_roadm, corresp_fused, corresp_ila, network):
"""
"""
roadmtype = [n.uid for n in network.nodes() if isinstance(n, Roadm)]
edfatype = [n.uid for n in network.nodes() if isinstance(n, Edfa)]
# check that n_id is in the node list, if not find a correspondance name
if n_id in roadmtype + edfatype:
return [n_id]
# checks first roadm, fused, and ila in this order, because ila automatic name
# contains roadm names. If it is a fused node, next ila names might be correct
# suggestions, especially if following fibers were splitted and ila names
# created with the name of the fused node
if n_id in corresp_roadm.keys():
return corresp_roadm[n_id]
if n_id in corresp_fused.keys():
return corresp_fused[n_id] + corresp_ila[n_id]
if n_id in corresp_ila.keys():
return corresp_ila[n_id]
return []
def correct_xls_route_list(network_filename: Path, network: DiGraph,
pathreqlist: List[Request_element]) -> List[Request_element]:
def correct_xls_route_list(network_filename, network, pathreqlist):
""" prepares the format of route list of nodes to be consistant with nodes names:
remove wrong names, find correct names for ila, roadm and fused if the entry was
xls.
@@ -362,17 +268,32 @@ def correct_xls_route_list(network_filename: Path, network: DiGraph,
corresp_ila, next_node = corresp_next_node(network, corresp_ila, corresp_roadm)
# finally correct constraints based on these dict
trxfibertype = [n.uid for n in network.nodes() if isinstance(n, (Transceiver, Fiber))]
roadmtype = [n.uid for n in network.nodes() if isinstance(n, Roadm)]
edfatype = [n.uid for n in network.nodes() if isinstance(n, Edfa)]
# TODO there is a problem of identification of fibers in case of parallel
# fibers between two adjacent roadms so fiber constraint is not supported
transponders = [n.uid for n in network.nodes() if isinstance(n, Transceiver)]
for pathreq in pathreqlist:
# first check that source and dest are transceivers
check_end_points(pathreq, network)
if pathreq.source not in transponders:
msg = f'{ansi_escapes.red}Request: {pathreq.request_id}: could not find' +\
f' transponder source : {pathreq.source}.{ansi_escapes.reset}'
logger.critical(msg)
raise ServiceError(msg)
if pathreq.destination not in transponders:
msg = f'{ansi_escapes.red}Request: {pathreq.request_id}: could not find' +\
f' transponder destination: {pathreq.destination}.{ansi_escapes.reset}'
logger.critical(msg)
raise ServiceError(msg)
# silently pop source and dest nodes from the list if they were added by the user as first
# and last elem in the constraints respectively. Other positions must lead to an error
# caught later on
if pathreq.nodes_list and pathreq.source == pathreq.nodes_list[0]:
pathreq.loose_list.pop(0)
pathreq.nodes_list.pop(0)
if pathreq.nodes_list and pathreq.destination == pathreq.nodes_list[-1]:
pathreq.loose_list.pop(-1)
pathreq.nodes_list.pop(-1)
# Then process user defined constraints with respect to automatic namings
temp = deepcopy(pathreq)
@@ -382,57 +303,79 @@ def correct_xls_route_list(network_filename: Path, network: DiGraph,
# n_id must not be a transceiver and must not be a fiber (non supported, user
# can not enter fiber names in excel)
if n_id not in trxfibertype:
nodes_suggestion = find_node_sugestion(n_id, corresp_roadm, corresp_fused, corresp_ila, network)
try:
if len(nodes_suggestion) > 1:
# if there is more than one suggestion, we need to choose the direction
# we rely on the next node provided by the user for this purpose
new_n = next(n for n in nodes_suggestion
if n in next_node
and next_node[n] in temp.nodes_list[i:] + [pathreq.destination]
and next_node[n] not in temp.nodes_list[:i])
elif len(nodes_suggestion) == 1:
new_n = nodes_suggestion[0]
# check that n_id is in the node list, if not find a correspondance name
if n_id in roadmtype + edfatype:
nodes_suggestion = [n_id]
else:
# checks first roadm, fused, and ila in this order, because ila automatic name
# contain roadm names. If it is a fused node, next ila names might be correct
# suggestions, especially if following fibers were splitted and ila names
# created with the name of the fused node
if n_id in corresp_roadm.keys():
nodes_suggestion = corresp_roadm[n_id]
elif n_id in corresp_fused.keys():
nodes_suggestion = corresp_fused[n_id] + corresp_ila[n_id]
elif n_id in corresp_ila.keys():
nodes_suggestion = corresp_ila[n_id]
else:
if temp.loose == 'LOOSE':
# if no matching can be found in the network just ignore this constraint
# if it is a loose constraint
# warns the user that this node is not part of the topology
msg = f'{pathreq.request_id}: Invalid node specified:\n\t\'{n_id}\'' \
+ ', could not use it as constraint, skipped!'
print(msg)
nodes_suggestion = []
if nodes_suggestion:
try:
if len(nodes_suggestion) > 1:
# if there is more than one suggestion, we need to choose the direction
# we rely on the next node provided by the user for this purpose
new_n = next(n for n in nodes_suggestion
if n in next_node.keys() and next_node[n]
in temp.nodes_list[i:] + [pathreq.destination] and
next_node[n] not in temp.nodes_list[:i])
else:
new_n = nodes_suggestion[0]
if new_n != n_id:
# warns the user when the correct name is used only in verbose mode,
# eg 'a' is a roadm and correct name is 'roadm a' or when there was
# too much ambiguity, 'b' is an ila, its name can be:
# Edfa0_fiber (a → b)-xx if next node is c or
# Edfa0_fiber (c → b)-xx if next node is a
msg = f'{ansi_escapes.yellow}Invalid route node specified:' +\
f'\n\t\'{n_id}\', replaced with \'{new_n}\'{ansi_escapes.reset}'
logger.info(msg)
pathreq.nodes_list.remove(n_id)
continue
msg = f'{pathreq.request_id}: Could not find node:\n\t\'{n_id}\' in network' \
+ ' topology. Strict constraint can not be applied.'
raise ServiceError(msg)
if new_n != n_id:
# warns the user when the correct name is used only in verbose mode,
# eg 'a' is a roadm and correct name is 'roadm a' or when there was
# too much ambiguity, 'b' is an ila, its name can be:
# "east edfa in b to c", or "west edfa in b to a" if next node is c or
# "west edfa in b to c", or "east edfa in b to a" if next node is a
msg = f'{pathreq.request_id}: Invalid route node specified:' \
+ f'\n\t\'{n_id}\', replaced with \'{new_n}\''
pathreq.nodes_list[pathreq.nodes_list.index(n_id)] = new_n
except StopIteration:
# shall not come in this case, unless requested direction does not exist
msg = f'{ansi_escapes.yellow}Invalid route specified {n_id}: could' +\
f' not decide on direction, skipped!.\nPlease add a valid' +\
f' direction in constraints (next neighbour node){ansi_escapes.reset}'
print(msg)
logger.info(msg)
pathreq.nodes_list[pathreq.nodes_list.index(n_id)] = new_n
except StopIteration:
# shall not come in this case, unless requested direction does not exist
msg = f'{pathreq.request_id}: Invalid route specified {n_id}: could' \
+ ' not decide on direction, skipped!.\nPlease add a valid' \
+ ' direction in constraints (next neighbour node)'
logger.info(msg)
pathreq.nodes_list.remove(n_id)
pathreq.loose_list.pop(pathreq.nodes_list.index(n_id))
pathreq.nodes_list.remove(n_id)
else:
if temp.loose_list[i] == 'LOOSE':
# if no matching can be found in the network just ignore this constraint
# if it is a loose constraint
# warns the user that this node is not part of the topology
msg = f'{ansi_escapes.yellow}Invalid node specified:\n\t\'{n_id}\'' +\
f', could not use it as constraint, skipped!{ansi_escapes.reset}'
print(msg)
logger.info(msg)
pathreq.loose_list.pop(pathreq.nodes_list.index(n_id))
pathreq.nodes_list.remove(n_id)
else:
msg = f'{ansi_escapes.red}Could not find node:\n\t\'{n_id}\' in network' +\
f' topology. Strict constraint can not be applied.{ansi_escapes.reset}'
logger.critical(msg)
raise ServiceError(msg)
else:
if temp.loose == 'LOOSE':
msg = f'{pathreq.request_id}: Invalid route node specified:\n\t\'{n_id}\'' \
+ ' type is not supported as constraint with xls network input, skipped!'
logger.warning(msg)
if temp.loose_list[i] == 'LOOSE':
print(f'{ansi_escapes.yellow}Invalid route node specified:\n\t\'{n_id}\'' +
f' type is not supported as constraint with xls network input,' +
f' skipped!{ansi_escapes.reset}')
pathreq.loose_list.pop(pathreq.nodes_list.index(n_id))
pathreq.nodes_list.remove(n_id)
else:
msg = f'{pathreq.request_id}: Invalid route node specified \n\t\'{n_id}\'' \
+ ' type is not supported as constraint with xls network input,' \
+ ', Strict constraint can not be applied.'
msg = f'{ansi_escapes.red}Invalid route node specified \n\t\'{n_id}\'' +\
f' type is not supported as constraint with xls network input,' +\
f', Strict constraint can not be applied.{ansi_escapes.reset}'
logger.critical(msg)
raise ServiceError(msg)
return pathreqlist

View File

@@ -1,253 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.tools.worker_utils: Common code for CLI examples and API
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
gnpy.tools.worker_utils
=======================
Common code for CLI examples and API
"""
import logging
from copy import deepcopy
from typing import Union, List, Tuple
from numpy import linspace
from networkx import DiGraph
from gnpy.core.utils import automatic_nch, watt2dbm, dbm2watt, pretty_summary_print, per_label_average
from gnpy.core.equipment import trx_mode_params
from gnpy.core.network import add_missing_elements_in_network, design_network
from gnpy.core import exceptions
from gnpy.core.info import SpectralInformation
from gnpy.topology.spectrum_assignment import build_oms_list, pth_assign_spectrum, OMS
from gnpy.topology.request import correct_json_route_list, deduplicate_disjunctions, requests_aggregation, \
compute_path_dsjctn, compute_path_with_disjunction, ResultElement, PathRequest, Disjunction, \
compute_constrained_path, propagate
from gnpy.tools.json_io import requests_from_json, disjunctions_from_json
logger = logging.getLogger(__name__)
def designed_network(equipment: dict, network: DiGraph, source: str = None, destination: str = None,
nodes_list: List[str] = None, loose_list: List[str] = None,
initial_spectrum: dict = None, no_insert_edfas: bool = False,
args_power: Union[str, float, int] = None,
service_req: PathRequest = None) -> Tuple[DiGraph, PathRequest, PathRequest]:
"""Build the reference channels based on inputs and design the network for this reference channel, and build the
channel to be propagated for the single transmission script.
Reference channel (target input power in spans, nb of channels, transceiver output power) is built using
equipment['SI'] information. If indicated, with target input power in spans is updated with args_power.
Channel to be propagated is using the same channel reference, except if different settings are provided
with service_req and initial_spectrum. The service to be propagated uses specified source, destination
and list nodes_list of include nodes constraint except if the service_req is specified.
Args:
- equipment: a dictionary containing equipment information.
- network: a directed graph representing the initial network.
- no_insert_edfas: a boolean indicating whether to insert EDFAs in the network.
- args_power: the power to be used for the network design.
- service_req: the service request the user wants to propagate.
- source: the source node for the channel to be propagated if no service_req is specified.
- destination: the destination node for the channel to be propagated if no service_req is specified.
- nodes_list: a list of nodes to be included ifor the channel to be propagated if no service_req is specified.
- loose_list: a list of loose nodes to be included in the network design.
- initial_spectrum: a dictionary representing the initial spectrum to propagate.
Returns:
- The designed network.
- The channel to propagate.
- The reference channel used for the design.
"""
if loose_list is None:
loose_list = []
if nodes_list is None:
nodes_list = []
if not no_insert_edfas:
add_missing_elements_in_network(network, equipment)
if not nodes_list:
if destination:
nodes_list = [destination]
loose_list = ['STRICT']
else:
nodes_list = []
loose_list = []
params = {
'request_id': 'reference',
'trx_type': '',
'trx_mode': '',
'source': source,
'destination': destination,
'bidir': False,
'nodes_list': nodes_list,
'loose_list': loose_list,
'format': '',
'path_bandwidth': 0,
'effective_freq_slot': None,
'nb_channel': None if equipment['SI']['default'].use_si_channel_count_for_design is False
else automatic_nch(equipment['SI']['default'].f_min, equipment['SI']['default'].f_max,
equipment['SI']['default'].spacing),
'power': dbm2watt(equipment['SI']['default'].power_dbm),
'tx_power': dbm2watt(equipment['SI']['default'].power_dbm)
}
if equipment['SI']['default'].tx_power_dbm is not None:
# use SI tx_power if present
params['tx_power'] = dbm2watt(equipment['SI']['default'].tx_power_dbm)
trx_params = trx_mode_params(equipment)
params.update(trx_params)
# use args_power instead of si
if args_power:
params['power'] = dbm2watt(float(args_power))
if equipment['SI']['default'].tx_power_dbm is None:
params['tx_power'] = params['power']
# use si as reference channel
reference_channel = PathRequest(**params)
if service_req:
# use service_req as reference channel with si tx_power if service_req tx_power is None
if service_req.tx_power is None:
service_req.tx_power = params['tx_power']
reference_channel = deepcopy(service_req)
if equipment['SI']['default'].use_si_channel_count_for_design is False:
reference_channel.nb_channel = None
design_network(reference_channel, network, equipment, set_connector_losses=True, verbose=True)
if initial_spectrum:
params['nb_channel'] = len(initial_spectrum)
req = PathRequest(**params)
if service_req:
req = service_req
req.initial_spectrum = initial_spectrum
return network, req, reference_channel
def check_request_path_ids(rqs: List[PathRequest]):
"""check that request ids are unique. Non unique ids, may
mess the computation: better to stop the computation
"""
all_ids = [r.request_id for r in rqs]
if len(all_ids) != len(set(all_ids)):
for item in list(set(all_ids)):
all_ids.remove(item)
msg = f'Requests id {all_ids} are not unique'
logger.error(msg)
raise ValueError(msg)
def planning(network: DiGraph, equipment: dict, data: dict, redesign: bool = False) \
-> Tuple[List[OMS], list, list, List[PathRequest], List[Disjunction], List[ResultElement]]:
"""Run planning
data contain the service dict from json
redesign True means that network is redesign using each request as reference channel
when False it means that the design is made once and successive propagation use the settings
computed with this design.
"""
oms_list = build_oms_list(network, equipment)
rqs = requests_from_json(data, equipment)
# check that request ids are unique.
check_request_path_ids(rqs)
rqs = correct_json_route_list(network, rqs)
dsjn = disjunctions_from_json(data)
logger.info('List of disjunctions:\n%s', dsjn)
# need to warn or correct in case of wrong disjunction form
# disjunction must not be repeated with same or different ids
dsjn = deduplicate_disjunctions(dsjn)
logger.info('Aggregating similar requests')
rqs, dsjn = requests_aggregation(rqs, dsjn)
logger.info('The following services have been requested:\n%s', rqs)
# logger.info('Computing all paths with constraints for request %s', optical_path_result_id)
pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
logger.info('Propagating on selected path')
propagatedpths, reversed_pths, reversed_propagatedpths = \
compute_path_with_disjunction(network, equipment, rqs, pths, redesign=redesign)
# Note that deepcopy used in compute_path_with_disjunction returns
# a list of nodes which are not belonging to network (they are copies of the node objects).
# so there can not be propagation on these nodes.
# Allowed user_policy are first_fit and 2partition
pth_assign_spectrum(pths, rqs, oms_list, reversed_pths)
for i, rq in enumerate(rqs):
if hasattr(rq, 'OSNR') and rq.OSNR:
rq.osnr_with_sys_margin = rq.OSNR + equipment["SI"]["default"].sys_margins
# assumes that list of rqs and list of propgatedpths have same order
result = [ResultElement(rq, pth, rpth) for rq, pth, rpth in zip(rqs, propagatedpths, reversed_propagatedpths)]
return oms_list, propagatedpths, reversed_propagatedpths, rqs, dsjn, result
def transmission_simulation(equipment: dict, network: DiGraph, req: PathRequest, ref_req: PathRequest) \
-> Tuple[list, List[list], List[Union[float, int]], SpectralInformation]:
"""Run simulation and returms the propagation result for each power sweep iteration.
Args:
- equipment: a dictionary containing equipment information.
- network: network after being designed using ref_req. Any missing information (amp gain or delta_p) must have
been filled using ref_req as reference channel previuos to this function.
- req: channel to be propagated.
- ref_req: the reference channel used for filling missing information in the network.
In case of power sweep, network is redesigned using ref_req whose target input power in span is
updated with the power step.
Returns a tuple containing:
- path: last propagated path. Power sweep is not possible with gain mode (as gain targets are used)
- propagations: list of propagated path for each power iteration
- powers_dbm: list of power used for the power sweep
- infos: last propagated spectral information
"""
power_mode = equipment['Span']['default'].power_mode
logger.info('Power mode is set to %s=> it can be modified in eqpt_config.json - Span', power_mode)
# initial network is designed using ref_req. that is that any missing information (amp gain or delta_p) is filled
# using this ref_req.power, previous to any sweep requested later on.
pref_ch_db = watt2dbm(ref_req.power)
p_ch_db = watt2dbm(req.power)
path = compute_constrained_path(network, req)
power_range = [0]
if power_mode:
# power cannot be changed in gain mode
try:
p_start, p_stop, p_step = equipment['SI']['default'].power_range_db
p_num = abs(int(round((p_stop - p_start) / p_step))) + 1 if p_step != 0 else 1
power_range = list(linspace(p_start, p_stop, p_num))
except TypeError as e:
msg = 'invalid power range definition in eqpt_config, should be power_range_db: [lower, upper, step]'
logger.error(msg)
raise exceptions.EquipmentConfigError(msg) from e
logger.info('Now propagating between %s and %s', req.source, req.destination)
propagations = []
powers_dbm = []
for dp_db in power_range:
ref_req.power = dbm2watt(pref_ch_db + dp_db)
req.power = dbm2watt(p_ch_db + dp_db)
# Power sweep is made to evaluate different span input powers, so redesign is mandatory for each power,
# but no need to redesign if there are no power sweep
if len(power_range) > 1:
design_network(ref_req, network.subgraph(path), equipment, set_connector_losses=False, verbose=False)
infos = propagate(path, req, equipment)
propagations.append(deepcopy(path))
powers_dbm.append(pref_ch_db + dp_db)
logger.info('\nChannels propagating: (Input optical power deviation in span = '
+ f'{pretty_summary_print(per_label_average(infos.delta_pdb_per_channel, infos.label))}dB,\n'
+ ' spacing = '
+ f'{pretty_summary_print(per_label_average(infos.slot_width * 1e-9, infos.label))}GHz,\n'
+ ' transceiver output power = '
+ f'{pretty_summary_print(per_label_average(watt2dbm(infos.tx_power), infos.label))}dBm,\n'
+ f' nb_channels = {infos.number_of_channels})')
if not power_mode:
logger.info('\n\tPropagating using gain targets: Input optical power deviation in span ignored')
return path, propagations, powers_dbm, infos

View File

@@ -1,324 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.tools.worker_utils: Utilities for reading and writing XLS, XLSX
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
gnpy.tools.xls_utils
====================
This module contains utilities for reading and writing XLS, XLSX
"""
from pathlib import Path
from typing import Generator, Tuple, List, Union, Optional, Iterator, Callable
from openpyxl import load_workbook, Workbook
from openpyxl.worksheet.worksheet import Worksheet
from openpyxl.cell.cell import Cell as OpenpyxlCell
from openpyxl.cell.read_only import ReadOnlyCell as OpenpyxlReadOnlyCell
from openpyxl.utils.exceptions import InvalidFileException
from xlrd import Book, open_workbook, XL_CELL_EMPTY
from xlrd.sheet import Sheet as XlrdSheet, Cell as XlrdCell
from xlrd.biffh import XLRDError
SheetType = Union[Worksheet, XlrdSheet]
WorkbookType = Union[Workbook, Book]
CellType = Union[OpenpyxlCell, OpenpyxlReadOnlyCell, XlrdCell]
XLS_EXCEPTIONS = (InvalidFileException, KeyError, XLRDError)
def generic_open_workbook(file_path: Union[str, Path]) -> Tuple[WorkbookType, bool]:
"""Open an Excel file supporting both XLS or XLSX.
:param file_path: Path of excel file
:type file_path: Union[str, Path]
:return: Tuple (workbook, is_xlsx) where is_xlsx inidcate if the file is XLSX or not
:rtype: Tuple[WorkbookType, bool]
"""
file_path = Path(file_path) if isinstance(file_path, str) else file_path
if file_path.suffix.lower() in ['.xlsx', '.xlsm']:
return load_workbook(file_path, read_only=True, data_only=True), True
return open_workbook(file_path), False
def get_sheet(workbook: WorkbookType,
sheet_name: str,
is_xlsx: bool) -> SheetType:
"""Get the Excel Sheet by name
:param workbook: Opened Excel workbook
:type workbook: WorkbookType
:param sheet_name: Sheet name
:type sheet_name: SheetType
:param is_xlsx: True if this is an XLSX workbook, False if XLS
:type is_xlsx: bool
:return: Excel sheet
:rtype: SheetType
"""
if is_xlsx:
return workbook[sheet_name]
return workbook.sheet_by_name(sheet_name)
def get_cell_value(sheet: SheetType, row: int, col: int, is_xlsx: bool) -> Optional[Union[str, int, float]]:
"""Get the cell value
:param sheet: Excel sheet
:type sheet: SheetType
:param row: Line index (0-based)
:type row: int
:param col: Column index (0-based)
:type: int
:param is_xlsx: True if this is an XLSX workbook, False if XLS
:type is_xlsx: bool
:return: cell value
:rtype: Optional[Union[str, int, float]]
"""
if is_xlsx:
# openpyxl uses a 1-based index
cell = sheet.cell(row=row + 1, column=col + 1)
return cell.value
# xlrd uses a 0-base index
return sheet.cell(row, col).value
def get_row(sheet: SheetType, row_index: int, is_xlsx: bool, get_rows=None) -> List[CellType]:
"""Get row in a workbook sheet.
:param sheet: Excel sheet
:type sheet: SheetType
:param row_index: Line index (0-based)
:type row_index: int
:param is_xlsx: True si c'est un fichier XLSX, False si XLS
:param is_xlsx: True if this is an XLSX workbook, False if XLS
:type is_xlsx: bool
:param get_rows: Optional function that returns preloaded rows (from fast_get_sheet_rows)
:type get_rows: Optional[Callable]
:return: List row cells
:rtype: List[CellType]
"""
if is_xlsx:
if get_rows is not None:
# use fast access with aclosure function
rows = get_rows()
else:
rows = list(sheet.rows)
return rows[row_index] if row_index < len(rows) else []
return sheet.row(row_index)
def fast_get_sheet_rows(sheet: Worksheet) -> Callable:
"""Preloads all rows from an Excel sheet for fast access.
This function loads the sheet data only once and returns a function
that provides access to this preloaded data without having to query
the Excel sheet on each access, which significantly improves performance,
particularly with openpyxl.
:param sheet: Excel worksheet (openpyxl.worksheet.worksheet.Worksheet object)
:type sheet: Worksheet
:return: Function that returns the preloaded rows
:rtype: Callable[[], List[Tuple[Cell, ...]]]
Usage example:
> get_rows = fast_get_sheet_rows(sheet)
> rows = get_rows() # Access to preloaded data
> first_row = rows[0] # First row
"""
# Load all sheet rows into memory only once
# This operation can be expensive, but it's performed only once
# load the rows only once.
preloaded_data = list(sheet.rows)
def get_rows():
"""Inner function (clodure function) that returns the preloaded data.
This function doesn't reload the data on each call,
it simply returns the reference to the already loaded data.
:return: List of preloaded rows
:rtype: List[Tuple[Cell, ...]]
"""
return preloaded_data
return get_rows
def get_row_slice(sheet: SheetType, row_index: int, start_col: int, end_col: int, is_xlsx: bool,
get_rows: Callable = None) -> Union[Tuple[CellType], List[CellType]]:
"""Get a row slice.
:param sheet: Excel sheet
:type sheet: SheetType
:param row_index: Line index (0-based)
:type row_index: int
:param start_col: Index of start column (0-based)
:type start_col: int
:param end_col: Index of end column (0-based)
:type end_col: int
:param is_xlsx: True if this is an XLSX workbook, False if XLS
:type is_xlsx: bool
:param get_rows: Optional function that returns preloaded rows (from fast_get_sheet_rows)
:type get_rows: Optional[Callable]
:return: List of cells in the selected slice
:rtype: List[CellType]
"""
if is_xlsx:
if get_rows is not None:
rows = get_rows()
else:
rows = list(sheet.rows)
return rows[row_index][start_col:end_col] if row_index < len(rows) else []
return sheet.row_slice(row_index, start_col, end_col)
def convert_empty(cell_value: Optional[Union[str, int, float]]) -> Optional[Union[str, int, float]]:
"""Convert empty string into None
:param cell_value: Cell value
:type cell_value: Optional[Union[str, int, float]]
>>> convert_empty('')
>>> convert_empty('data')
'data'
>>> convert_empty(123)
123
"""
if cell_value == '':
return None
return cell_value
def get_num_rows(sheet: SheetType, is_xlsx: bool, get_rows: Callable = None) -> int:
"""Get the number of lines of an Excel sheet. Note that openpyxl in read_only mode can return "ghost" rows
at the end (ReadOnlyCell cells with no actual value but formatting information even for empty rows).
:param sheet: Excel sheet
:type sheet: SheetType
:param is_xlsx: True if this is an XLSX workbook, False if XLS
:type is_xlsx: bool
:param get_rows: Optional function that returns preloaded rows (from fast_get_sheet_rows)
:type get_rows: Optional[Callable]
:return: Number of lines
:rtype: int
"""
if is_xlsx:
if get_rows is not None:
return len(list(get_rows()))
else:
return len(list(sheet.rows))
return sheet.nrows
def is_type_cell_empty(cell, is_xlsx: bool) -> bool:
"""Check is a cell is empty.
:param sheet: Excel sheet
:type sheet: SheetType
:param row: Line index (0-based)
:type row: int
:param col: Column index (0-based)
:type: int
:param is_xlsx: True if this is an XLSX workbook, False if XLS
:type is_xlsx: bool
:return: True if cell is empty, else returns False
:rtype: bool
"""
if is_xlsx:
return cell.value in [None, '']
return cell.ctype == XL_CELL_EMPTY
def get_sheet_name(sheet: SheetType, is_xlsx: bool) -> str:
"""Get the name of the current sheet
:param sheet: Excel sheet
:type sheet: SheetType
:param is_xlsx: True if this is an XLSX workbook, False if XLS
:type is_xlsx: bool
:return: Name of the sheet
:rtype: str
"""
if is_xlsx:
return sheet.title
return sheet.name
def all_rows(sh: Worksheet, is_xlsx: bool, start: int = 0, get_rows: Callable = None) -> Generator[list, None, None]:
"""Returns all rows of the xls(x) sheet starting from start row.
:param sh: sheet: Excel sheet
:type sheet: SheetType
:param start: The starting row index (0-based).
:type start: int
:param get_rows: Optional function that returns preloaded rows (from fast_get_sheet_rows)
:type get_rows: Optional[Callable]
:return: A generator yielding all rows from the specified starting index.
:rtype: Generator[list, None, None]
"""
return (get_row(sh, x, is_xlsx, get_rows) for x in range(start, get_num_rows(sh, is_xlsx, get_rows)))
def correct_cell_int_to_str(v: Optional[Union[str, int, float]]) -> Optional[Union[str, int, float]]:
"""Ensure that int values in "id" cells are read as strings containing the int and
do not use the automatic float conversion from xlrd or openpyxl
:param v: cell value to convert
:type v: Optional[Union[str, int, float]]
:return: corrected cell value
:rtype: Optional[Union[str, int, float]]
>>> correct_cell_int_to_str(123)
'123'
>>> correct_cell_int_to_str(123.0)
'123'
>>> correct_cell_int_to_str('abc')
'abc'
>>> correct_cell_int_to_str(None)
"""
if not isinstance(v, str) and v is not None:
value = str(int(v))
if value.endswith('.0'):
value = value[:-2]
else:
value = v
return value
def get_all_sheets(workbook: WorkbookType, is_xlsx: bool) -> Iterator[SheetType]:
"""Get all sheets from an Excel workbook.
:param workbook: Opened Excel workbook
:type workbook: WorkbookType
:param is_xlsx: True if this is an XLSX workbook, False if XLS
:type is_xlsx: bool
:return: Iterator of all sheets in the workbook
:rtype: Iterator[SheetType]
"""
if is_xlsx:
for sheet in workbook.worksheets:
yield sheet
else:
for i in range(workbook.nsheets):
yield workbook.sheet_by_index(i)
def get_sheet_names(workbook: WorkbookType, is_xlsx: bool) -> List[str]:
"""Get all sheet names from an Excel workbook.
:param workbook: Opened Excel workbook
:type workbook: WorkbookType
:param is_xlsx: True if this is an XLSX workbook, False if XLS
:type is_xlsx: bool
:return: List of sheet names
:rtype: List[str]
"""
if is_xlsx:
return workbook.sheetnames
return workbook.sheet_names()

View File

@@ -1,960 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# Utils for yang <-> legacy format conversion
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
Utils for yang <-> legacy format conversion
===========================================
Format conversion utils.
"""
from pathlib import Path
from copy import deepcopy
from typing import Dict, Union, List, Any, NamedTuple
import json
import os
import oopt_gnpy_libyang as ly
from gnpy.yang.precision_dict import PRECISION_DICT
ELEMENTS_KEY = 'elements'
ROADM_KEY = 'Roadm'
PARAMS_KEY = 'params'
METADATA_KEY = 'metadata'
LOCATION_KEY = 'location'
DEGREE_KEY = 'degree_uid'
PATH_REQUEST_KEY = 'path-request'
RESPONSE_KEY = 'response'
SPECTRUM_KEY = 'spectrum'
LOSS_COEF_KEY = 'loss_coef'
LOSS_COEF_KEY_PER_FREQ = 'loss_coef_per_frequency'
RAMAN_COEF_KEY = 'raman_coefficient'
RAMAN_EFFICIENCY_KEY = 'raman_efficiency'
EQPT_TYPES = ['Edfa', 'Transceiver', 'Fiber', 'Roadm']
EDFA_CONFIG_KEYS = ['nf_fit_coeff', 'nf_ripple', 'gain_ripple', 'dgt']
SIM_PARAMS_KEYS = ['raman_params', 'nli_params']
TOPO_NMSP = 'gnpy-network-topology:topology'
EQPT_NMSP = 'gnpy-eqpt-config:equipment'
SERV_NMSP = 'gnpy-path-computation:services'
RESP_NMSP = 'gnpy-path-computation:responses'
EDFA_CONFIG_NMSP = 'gnpy-edfa-config:edfa-config'
SIM_PARAMS_NMSP = 'gnpy-sim-params:sim-params'
SPECTRUM_NMSP = 'gnpy-spectrum:spectrum'
class PrettyFloat(float):
""""A float subclass for formatting according to specific fraction digit requirements.
>>> PrettyFloat(3.1245)
3.12
>>> PrettyFloat(100.65, 5)
100.65
>>> PrettyFloat(2.1e-5, 8)
0.000021
>>> PrettyFloat(10, 3)
10.0
>>> PrettyFloat(-0.3110761646066259, 18)
-0.3110761646066259
"""
def __new__(cls, value: float, fraction_digit: int = 2):
"""Create a new instance of PrettyFloat"""
instance = super().__new__(cls, value)
instance.fraction_digit = fraction_digit
instance.value = value
return instance
def __repr__(self) -> str:
"""Return the string representation of the float formatted to the specified fraction digits. It removes
scientific notation ("e-x").
"""
# When fraction digit is over 16, the usual formatting does not works properly because of floating point issues.
# For example -0.3110761646066259 is represented as "-0.311076164606625905". The following function makes
# sure that the unwanted floating point issue does not change the value. Maximum fraction digit in YANG is 18.
if self.fraction_digit in range(0, 19):
temp = str(self.value)
if 'e' in temp or '.' not in temp or self.fraction_digit < 17:
formatted_value = f'{self:.{self.fraction_digit}f}' # noqa E231
if '.' in formatted_value:
formatted_value = formatted_value.rstrip('0')
if formatted_value.endswith('.'):
formatted_value += '0'
return formatted_value
if '.' in temp:
parts = temp.split('.')
formatted_value = parts[0] + '.' + parts[1][0:min(self.fraction_digit, len(parts[1]))]
formatted_value = formatted_value.rstrip('0')
if formatted_value.endswith('.'):
formatted_value += '0'
return formatted_value
return temp
raise ValueError(f'Fraction digit {self.fraction_digit} not handled')
def gnpy_precision_dict() -> Dict[str, int]:
"""Return a dictionary of fraction-digit definitions for GNPy.
Precision correspond to fraction digit number if it is a decimal64 yang type, or 0 if it is an
(u)int < 64 or -1 if it is a string or an (u)int64 type.
:return: Dictionnary mapping key names with digit numbers for values.
:rtype: Dict[str, int]
"""
return PRECISION_DICT
def convert_dict(data: Dict, fraction_digit: int = 2, precision: Union[Dict[str, int], None] = None) \
-> Union[Dict, List, float, int, str, None]:
"""Recursive conversion from float to str, conformed to RFC7951
does not work for int64 (will not returm str as stated in standard)
If nothing is stated precision is using gnpy_precision_dict.
:param data: the input dictionary to convert.
:type data: data: Dict
:param fraction_digit: the number of decimal places to format.
:type fraction_digit: int
:param precision: A dictionary defining precision for specific keys.
:type precision: Union[Dict[str, int], None]
:return: A new dictionary with converted values.
:rtype: Dict
>>> convert_dict({"y": "amp", "t": "vn", "g": 25, "gamma": 0.0016, "p": 21.5, "o": True, \
"output-power": 14.12457896})
{'y': 'amp', 't': 'vn', 'g': '25.0', 'gamma': '0.0016', 'p': '21.5', 'o': True, 'output-power': '14.12457896'}
"""
if not precision:
precision = gnpy_precision_dict()
if isinstance(data, dict):
for k, v in data.items():
fraction_digit = precision.get(k, 2)
data[k] = convert_dict(v, fraction_digit, precision=precision)
elif isinstance(data, list):
temp = deepcopy(data)
for i, el in enumerate(temp):
if isinstance(el, float):
data[i] = PrettyFloat(el, fraction_digit)
data[i] = str(data[i])
else:
data[i] = convert_dict(el, fraction_digit=fraction_digit, precision=precision)
elif isinstance(data, bool):
return data
elif isinstance(data, int):
data = PrettyFloat(data)
data.fraction_digit = fraction_digit
if fraction_digit > 0:
return str(data)
if fraction_digit < 0:
return data
return int(data)
elif isinstance(data, float):
data = PrettyFloat(data)
data.fraction_digit = fraction_digit
return str(data)
return data
def convert_back(data: Dict, fraction_digit: Union[int, None] = None, precision: Union[Dict[str, int], None] = None) \
-> Union[Dict, List, float, int, str, None]:
"""Recursively convert strings back to their original types int, float according to RFC7951.
:param data: the input dictionary to convert.
:type data: Dict
:param fraction_digit: the number of decimal places to format.
:type fraction_digit: Union[int, None]
:param precision: A dictionary defining precision for specific keys.
:type precision: Union[Dict[str, int], None]
:return: A new dictionary with converted values.
:rtype: Dict
>>> a = {'y': 'amp', 't': 'vn', 'N': '25', 'gamma': '0.0000000000000016', 'p': '21.50', 'o': True, \
'output-power': '14.12458'}
>>> convert_back({'a': a, 'delta_power_range_db': ['12.3', '10.6', True]})
{'a': {'y': 'amp', 't': 'vn', 'N': 25, 'gamma': 1.6e-15, 'p': '21.50', 'o': True, 'output-power': 14.12458}, \
'delta_power_range_db': ['12.3', '10.6', True]}
"""
if not precision:
precision = gnpy_precision_dict()
if isinstance(data, dict):
for k, v in data.items():
fraction_digit = None
if k in precision:
fraction_digit = precision[k]
data[k] = convert_back(v, fraction_digit, precision=precision)
elif isinstance(data, list):
for i, el in enumerate(data):
if isinstance(el, str) and fraction_digit not in [None, -1]:
data[i] = float(data[i])
else:
data[i] = convert_back(el, fraction_digit=fraction_digit, precision=precision)
elif isinstance(data, (bool, int, float)):
return data
elif isinstance(data, str) and fraction_digit is not None:
if fraction_digit > 0:
return float(data)
if fraction_digit < 0:
return data
return int(data)
return data
def model_path() -> Path:
"""Filesystem path to YANG models.
return: path to the GNPy YANG modules.
rtype: Path
"""
return Path(__file__).parent.parent / 'yang'
def external_yang() -> Path:
"""Filesystem to the IETF external yang modules.
return: path to the IETF modules.
rtype: Path
"""
return Path(__file__).parent.parent / 'yang' / 'ext'
def yang_lib() -> Path:
"""Path to the json library of needed yang modules.
return: path to the library describing all modules and revisions for this gnpy release.
rtype: Path
"""
return Path(__file__).parent.parent / 'yang' / 'yang-library-gnpy.json'
def _create_context(yang_library) -> ly.Context:
"""Prepare a libyang context for validating data against GNPy YANG models.
:param yang_library: path to the library describing all modules and revisions to be considered for the formatted
string generation.
:type yang_library: Path
:return: Context used to hold all information about schemas.
:rtype: ly.Context
"""
ly.set_log_options(ly.LogOptions.Log | ly.LogOptions.Store)
ctx = ly.Context(str(model_path()) + os.pathsep + str(external_yang()),
ly.ContextOptions.AllImplemented | ly.ContextOptions.DisableSearchCwd)
with open(yang_library, 'r', encoding='utf-8') as file:
data = json.load(file)
yang_modules = [{'name': e['name'], 'revision': e['revision']}
for e in data['ietf-yang-library:modules-state']['module']]
for module in yang_modules:
ctx.load_module(module['name'], revision=module['revision'])
return ctx
class ErrorMessage(NamedTuple):
# pylint: disable=C0115
what: str
where: str
def load_data(s: str, yang_library: Path = yang_lib()) -> ly.DataNode:
"""Load data from YANG-based JSON input and validate them.
:param data: a string contating the json data to be loaded.
:type data: str
:param yang_library: path to the library describing all modules and revisions to be considered for the formatted
string generation.
:type yang_library: Path
:return: DataNode containing the loaded data
:rtype: ly.DataNode
"""
ctx = _create_context(yang_library)
try:
data = ctx.parse_data(s, ly.DataFormat.JSON,
ly.ParseOptions.Strict | ly.ParseOptions.Ordered,
ly.ValidationOptions.Present
| ly.ValidationOptions.MultiError)
except ly.Error as exc:
raise ly.Error(exc, [ErrorMessage(err.message, err.path) for err in ctx.errors()]) from None
return data
def dump_data(data: Dict, yang_library: Path = yang_lib()) -> str:
"""Creates a formatted string using oopt-gnpy-libyang.
:param data: a json dict with data already formatted
:type data: Dict
:param yang_library: path to the library describing all modules and revisions to be considered for the formatted
string generation.
:type yang_library: Path
:return: formatted string data
:rtype: str
"""
return load_data(json.dumps(data), yang_library).print(ly.DataFormat.JSON, ly.PrintFlags.WithSiblings)
def convert_degree(json_data: Dict) -> Dict:
"""Convert legacy json topology format to gnpy yang format revision 2025-01-20:
:param json_data: The input JSON topology data to convert.
:type json_data: Dict
:return: the converted JSON data
:rtype: Dict
"""
for elem in json_data[ELEMENTS_KEY]:
if elem['type'] == ROADM_KEY and PARAMS_KEY in elem:
new_targets = []
for equalization_type in ['per_degree_pch_out_db', 'per_degree_psd_out_mWperGHz',
'per_degree_psd_out_mWperSlotWidth']:
targets = elem[PARAMS_KEY].pop(equalization_type, None)
if targets:
new_targets.extend([{DEGREE_KEY: degree, equalization_type: target}
for degree, target in targets.items()])
if new_targets:
elem[PARAMS_KEY]['per_degree_power_targets'] = new_targets
return json_data
def convert_back_degree(json_data: Dict) -> Dict:
"""Convert gnpy yang format back to legacy json topology format.
:param json_data: The input JSON topology data to convert back.
:type json_data: Dict
:return: the converted JSON data
:rtype: Dict
"""
for elem in json_data[ELEMENTS_KEY]:
if elem['type'] != ROADM_KEY or PARAMS_KEY not in elem:
continue
power_targets = elem[PARAMS_KEY].pop('per_degree_power_targets', None)
if not power_targets:
continue
# Process each power target
process_power_targets(elem, power_targets)
return json_data
def process_power_targets(elem: Dict, power_targets: List[Dict]) -> None:
"""Process power targets and update element parameters.
:param elem: The element to update
:type elem: Dict
:param power_targets: List of power target configurations
:type power_targets: List[Dict]
"""
equalization_types = [
'per_degree_pch_out_db',
'per_degree_psd_out_mWperGHz',
'per_degree_psd_out_mWperSlotWidth'
]
for target in power_targets:
degree_uid = target[DEGREE_KEY]
for eq_type in equalization_types:
if eq_type not in target:
continue
# Initialize the equalization type dict if needed
if eq_type not in elem[PARAMS_KEY]:
elem[PARAMS_KEY][eq_type] = {}
# Set the value for this degree
elem[PARAMS_KEY][eq_type][degree_uid] = target[eq_type]
def convert_loss_coeff_list(json_data: Dict) -> Dict:
"""Convert legacy json topology format to gnpy yang format revision 2025-01-20:
:param json_data: The input JSON topology data to convert.
:type json_data: Dict
:return: the converted JSON data
:rtype: Dict
"""
for elem in json_data[ELEMENTS_KEY]:
if PARAMS_KEY in elem and LOSS_COEF_KEY in elem[PARAMS_KEY] \
and isinstance(elem[PARAMS_KEY][LOSS_COEF_KEY], dict):
loss_coef_per_frequency = elem[PARAMS_KEY].pop(LOSS_COEF_KEY)
loss_coef_list = loss_coef_per_frequency.pop('loss_coef_value', None)
frequency_list = loss_coef_per_frequency.pop('frequency', None)
if loss_coef_list:
new_loss_coef_per_frequency = [{'frequency': f, 'loss_coef_value': v}
for f, v in zip(frequency_list, loss_coef_list)]
elem[PARAMS_KEY][LOSS_COEF_KEY_PER_FREQ] = new_loss_coef_per_frequency
return json_data
def convert_back_loss_coeff_list(json_data: Dict) -> Dict:
"""Convert gnpy yang format revision 2025-01-20 back to legacy json topology format
:param json_data: The input JSON topology data to convert back
:type json_data: Dict
:return: the converted JSON data
:rtype: Dict
"""
for elem in json_data[ELEMENTS_KEY]:
if PARAMS_KEY in elem and LOSS_COEF_KEY_PER_FREQ in elem[PARAMS_KEY]:
loss_coef_per_frequency = elem[PARAMS_KEY].pop(LOSS_COEF_KEY_PER_FREQ)
if loss_coef_per_frequency:
new_loss_coef_per_frequency = {
'frequency': [item['frequency'] for item in loss_coef_per_frequency],
'loss_coef_value': [item['loss_coef_value'] for item in loss_coef_per_frequency]}
elem[PARAMS_KEY]['loss_coef'] = new_loss_coef_per_frequency
return json_data
def convert_design_band(json_data: Dict) -> Dict:
"""Convert legacy json topology format to gnpy yang format revision 2025-01-20:
:param json_data: The input JSON topology data to convert.
:type json_data: Dict
:return: the converted JSON data
:rtype: Dict
"""
for elem in json_data[ELEMENTS_KEY]:
if elem['type'] == ROADM_KEY and PARAMS_KEY in elem:
new_targets = []
targets = elem[PARAMS_KEY].pop('per_degree_design_bands', None)
if targets:
new_targets.extend([{DEGREE_KEY: degree, 'design_bands': target}
for degree, target in targets.items()])
if new_targets:
elem[PARAMS_KEY]['per_degree_design_bands_targets'] = new_targets
return json_data
def convert_back_design_band(json_data: Dict) -> Dict:
"""Convert gnpy yang format revision 2025-01-20 back to legacy json topology format
:param json_data: The input JSON topology data to convert back
:type json_data: Dict
:return: the converted JSON data
:rtype: Dict
"""
for elem in json_data[ELEMENTS_KEY]:
if elem['type'] == ROADM_KEY and PARAMS_KEY in elem:
targets = elem[PARAMS_KEY].pop('per_degree_design_bands_targets', None)
if targets:
design_bands = {}
for target in targets:
design_bands[target[DEGREE_KEY]] = target['design_bands']
if design_bands:
elem[PARAMS_KEY]['per_degree_design_bands'] = design_bands
return json_data
def convert_range_to_dict(range_values: List[float]) -> Dict[str, float]:
"""Convert a range list to a dictionary format:
:param range_values: range of loat values defined with the format [min, max, step].
:type range_value: List[float]
:return: range formatted as a dict {"min_value": min, "max_value": max, "step": step}
:rtype: Dict[str, float]
"""
return {
'min_value': range_values[0],
'max_value': range_values[1],
'step': range_values[2]
}
def process_span_data(span: Dict) -> None:
"""Convert Span data with range in dict format
:param span: The span data to process.
:type span: Dict
"""
if 'delta_power_range_dict_db' in span:
return
if 'delta_power_range_db' not in span:
raise KeyError('delta_power_range or delta_power_range_dict_db missing in Span dict.')
delta_power_range_db = span.get('delta_power_range_db', [0, 0, 0])
span['delta_power_range_dict_db'] = convert_range_to_dict(delta_power_range_db)
del span['delta_power_range_db']
def process_si_data(si: Dict) -> None:
"""Convert Span data with range in dict format
:param si: The span data to process.
:type si: Dict
"""
if 'power_range_dict_db' in si:
return
if 'power_range_db' not in si:
raise KeyError('power_range_db or power_range_dict_db missing in SI dict.')
power_range_db = si.get('power_range_db', [0, 0, 0])
si['power_range_dict_db'] = convert_range_to_dict(power_range_db)
del si['power_range_db']
def convert_delta_power_range(json_data: Dict) -> Dict:
"""Convert legacy json equipment format to GNPy yang format revision 2025-01-20
:param json_data: the input JSON data to convert.
:type json_data: Dict
:return: The converted JSON data.
:rtype: Dict
"""
if 'Span' in json_data:
for span in json_data['Span']:
process_span_data(span)
if 'SI' in json_data:
for si in json_data['SI']:
process_si_data(si)
return json_data
def convert_back_delta_power_range(json_data: Dict) -> Dict:
"""Convert Yang JSON revision 2025-01-20 equipment format to legacy GNPy format.
:param json_data: the input JSON data to convert.
:type json_data: Dict
:return: The converted JSON data.
:rtype: Dict
"""
if 'Span' in json_data and 'delta_power_range_dict_db' in json_data['Span'][0]:
delta_power_range_db = json_data['Span'][0]['delta_power_range_dict_db']
json_data['Span'][0]['delta_power_range_db'] = [
delta_power_range_db['min_value'],
delta_power_range_db['max_value'],
delta_power_range_db['step']]
del json_data['Span'][0]['delta_power_range_dict_db']
if 'SI' in json_data and 'power_range_dict_db' in json_data['SI'][0]:
power_range_db = json_data['SI'][0]['power_range_dict_db']
json_data['SI'][0]['power_range_db'] = [
power_range_db['min_value'],
power_range_db['max_value'],
power_range_db['step']]
del json_data['SI'][0]['power_range_dict_db']
return json_data
def add_missing_default_type_variety(json_data: Dict) -> Dict:
"""Case of ROADM: legacy does not enforce type_variety to be present.
This utils ensures that 'default' type_variety is inserted if the key is missing.
:param json_data: the input JSON data to convert.
:type json_data: Dict
:return: The converted JSON data.
:rtype: Dict
"""
if 'Roadm' not in json_data:
return json_data
for i, elem in enumerate(json_data['Roadm']):
if 'type_variety' not in elem:
# make sure type_variety is the first key in the elem
temp = {'type_variety': 'default'}
temp.update(elem)
json_data['Roadm'][i] = temp
break
return json_data
def remove_null_region_city(json_data: Dict) -> Dict:
"""if present, name should not be None.
:param json_data: the input JSON data to convert.
:type json_data: Dict
:return: The converted JSON data.
:rtype: Dict
"""
for elem in json_data[ELEMENTS_KEY]:
if "metadata" in elem and "location" in elem[METADATA_KEY]:
for name in ['city', 'region']:
if name in elem[METADATA_KEY][LOCATION_KEY] \
and elem[METADATA_KEY][LOCATION_KEY][name] is None:
elem[METADATA_KEY][LOCATION_KEY][name] = ""
return json_data
def remove_union_that_fail(json_data: Dict) -> Dict:
"""Convert GNPy legacy JSON request format to GNPy yang format revision 2025-01-20
If present "N": or "M": should not contain empy data.
If present max-nb-of-channel should not contain empty data.
:param json_data: the input JSON data to convert.
:type json_data: Dict
:return: The converted JSON data.
:rtype: Dict
"""
for elem in json_data[PATH_REQUEST_KEY]:
te = elem['path-constraints']['te-bandwidth']
freq_slot = te.get('effective-freq-slot', None)
if freq_slot:
for slot in freq_slot:
if slot.get('N', None) is None:
slot.pop('N', None)
if slot.get('M', None) is None:
slot.pop('M', None)
if not slot:
te['effective-freq-slot'].remove(slot)
if not te['effective-freq-slot']:
te.pop('effective-freq-slot', None)
for attribute in ['max-nb-of-channel', 'trx_mode', 'output-power']:
if te.get(attribute) is None:
te.pop(attribute, None)
return json_data
def convert_none_to_empty(json_data: Any):
"""Convert all instances of None in the input to [None].
This function recursively traverses the input and replaces any None
values with a list containing None. If the input is already a list
containing None, it returns the input unchanged.
:param json_data: The input data to process, which can be of any type.
:type json_data: Any
:return: A new representation of the input with None values replaced by [None].
:rtype: Any
:example:
>>> a = {'uid': '[930/WRT-2-2-SIG=>923/WRT-1-9-SIG]-923/AMP-1-13', 'type_variety': 'AMP',
... 'metadata': {'location': {'latitude': 0.0, 'longitude': 0.0, 'city': 'Zion', 'region': ''}},
... 'type': 'Multiband_amplifier', 'amplifiers': [{'type_variety': 'AMP_LOW_C',
... 'operational': {'gain_target': 12.22, 'delta_p': 4.19, 'out_voa': None, 'tilt_target': 0.0,
... 'f_min': 191.3, 'f_max': 196.1}}, {'type_variety': 'AMP_LOW_L',
... 'operational': {'gain_target': 12.05, 'delta_p': 4.19, 'out_voa': None, 'tilt_target': 0.0,
... 'f_min': 186.1, 'f_max': 190.9}}]}
>>> convert_none_to_empty(a)
{'uid': '[930/WRT-2-2-SIG=>923/WRT-1-9-SIG]-923/AMP-1-13', 'type_variety': 'AMP', \
'metadata': {'location': {'latitude': 0.0, 'longitude': 0.0, 'city': 'Zion', 'region': ''}}, \
'type': 'Multiband_amplifier', 'amplifiers': [{'type_variety': 'AMP_LOW_C', \
'operational': {'gain_target': 12.22, 'delta_p': 4.19, 'out_voa': [None], 'tilt_target': 0.0, \
'f_min': 191.3, 'f_max': 196.1}}, {'type_variety': 'AMP_LOW_L', \
'operational': {'gain_target': 12.05, 'delta_p': 4.19, 'out_voa': [None], 'tilt_target': 0.0, \
'f_min': 186.1, 'f_max': 190.9}}]}
"""
if json_data == [None]:
# already conformed
return json_data
if isinstance(json_data, dict):
for key, value in json_data.items():
json_data[key] = convert_none_to_empty(value)
elif isinstance(json_data, list):
for i, elem in enumerate(json_data):
json_data[i] = convert_none_to_empty(elem)
elif json_data is None:
return [None]
return json_data
def convert_empty_to_none(json_data: Any):
"""Convert all instances of [None] in the input to None.
This function recursively traverses the input data and replaces any
lists containing a single None element with None. If the input is
already None, it returns None unchanged.
:param json_data: The input data to process, which can be of any type.
:type json_data: Any
:return: A new representation of the input with [None] replaced by None.
:rtype: Any
>>> json_data = {
... "uid": "[930/WRT-2-2-SIG=>923/WRT-1-9-SIG]-923/AMP-1-13",
... "type_variety": "AMP",
... "metadata": {
... "location": {
... "latitude": 0.000000,
... "longitude": 0.000000,
... "city": "Zion",
... "region": ""
... }
... },
... "type": "Multiband_amplifier",
... "amplifiers": [{
... "type_variety": "AMP_LOW_C",
... "operational": {
... "gain_target": 12.22,
... "delta_p": 4.19,
... "out_voa": [None],
... "tilt_target": 0.00,
... "f_min": 191.3,
... "f_max": 196.1
... }
... }, {
... "type_variety": "AMP_LOW_L",
... "operational": {
... "gain_target": 12.05,
... "delta_p": 4.19,
... "out_voa": [None],
... "tilt_target": 0.00,
... "f_min": 186.1,
... "f_max": 190.9
... }
... }
... ]
... }
>>> convert_empty_to_none(json_data)
{'uid': '[930/WRT-2-2-SIG=>923/WRT-1-9-SIG]-923/AMP-1-13', 'type_variety': 'AMP', \
'metadata': {'location': {'latitude': 0.0, 'longitude': 0.0, 'city': 'Zion', 'region': ''}}, \
'type': 'Multiband_amplifier', 'amplifiers': [{'type_variety': 'AMP_LOW_C', \
'operational': {'gain_target': 12.22, 'delta_p': 4.19, 'out_voa': None, 'tilt_target': 0.0, \
'f_min': 191.3, 'f_max': 196.1}}, {'type_variety': 'AMP_LOW_L', \
'operational': {'gain_target': 12.05, 'delta_p': 4.19, 'out_voa': None, 'tilt_target': 0.0, \
'f_min': 186.1, 'f_max': 190.9}}]}
"""
if isinstance(json_data, dict):
for key, value in json_data.items():
json_data[key] = convert_empty_to_none(value)
elif isinstance(json_data, list):
if len(json_data) == 1 and json_data[0] is None:
return None
for i, elem in enumerate(json_data):
json_data[i] = convert_empty_to_none(elem)
return json_data
def remove_namespace_context(json_data: Union[Dict, List, float, int, str, bool, None], namespace: str) \
-> Union[Dict, List, float, int, str, bool, None]:
"""Serialisation with yang introduces a namespace in values that
are defined as identity. this function filter them out.
:param json_data: The input JSON topology data to process.
:type json_data: Union[Dict, List, float, int, str, bool, None]
:param namespace: a namespace string
:type namespace: str
:return: the converted JSON data
:rtype: Union[Dict, List, float, int, str, bool, None]
>>> a = [{"a": 123, "b": "123:alkdje"}, {"a": 456, "c": "123", "d": "123:123"}]
>>> remove_namespace_context(a, "123:")
[{'a': 123, 'b': 'alkdje'}, {'a': 456, 'c': '123', 'd': '123'}]
"""
if isinstance(json_data, dict):
for key, value in json_data.items():
json_data[key] = remove_namespace_context(value, namespace)
elif isinstance(json_data, list):
for i, elem in enumerate(json_data):
json_data[i] = remove_namespace_context(elem, namespace)
elif isinstance(json_data, str) and namespace in json_data:
return json_data.split(namespace)[1]
return json_data
def convert_nf_coef(json_data: Dict) -> Dict:
"""Convert gnpy legacy format yang topology format.
:param json_data: The input JSON topology data to convert.
:type json_data: Dict
:return: the converted JSON data
:rtype: dict
"""
if 'Edfa' not in json_data:
return json_data
for edfa in json_data['Edfa']:
if 'nf_coef' in edfa and not isinstance(edfa['nf_coef'][0], dict):
nf_coef = edfa.pop('nf_coef')
new_nf_coef = [{'coef_order': i, 'nf_coef': c} for i, c in enumerate(nf_coef)]
edfa['nf_coef'] = new_nf_coef
return json_data
def convert_back_nf_coef(json_data: Dict) -> Dict:
"""Convert gnpy yang format back to legacy json topology format.
:param json_data: The input JSON topology data to convert back.
:type json_data: Dict
:return: the converted back JSON data
:rtype: dict
"""
if 'Edfa' not in json_data:
return json_data
for edfa in json_data['Edfa']:
if 'nf_coef' in edfa and isinstance(edfa['nf_coef'][0], dict):
nf_coef = edfa.pop('nf_coef')
sorted_nf_coef = sorted(nf_coef, key=lambda x: x['coef_order'])
new_nf_coef = [c['nf_coef'] for c in sorted_nf_coef]
edfa['nf_coef'] = new_nf_coef
return json_data
def convert_nf_fit_coef(json_data: Dict) -> Dict:
"""Convert gnpy legacy format yang topology format.
:param json_data: The input JSON topology data to convert.
:type json_data: Dict
:return: the converted JSON data
:rtype: dict
"""
if 'nf_fit_coeff' in json_data and not isinstance(json_data['nf_fit_coeff'][0], dict):
nf_coef = json_data.pop('nf_fit_coeff')
new_nf_coef = [{'coef_order': i, 'nf_coef': c} for i, c in enumerate(nf_coef)]
json_data['nf_fit_coeff'] = new_nf_coef
return json_data
def convert_back_nf_fit_coef(json_data: Dict) -> Dict:
"""Convert gnpy yang format back to legacy json topology format.
:param json_data: The input JSON topology data to convert back.
:type json_data: Dict
:return: the converted back JSON data
:rtype: dict
"""
if 'nf_fit_coeff' in json_data and isinstance(json_data['nf_fit_coeff'][0], dict):
nf_coef = json_data.pop('nf_fit_coeff')
sorted_nf_coef = sorted(nf_coef, key=lambda x: x['coef_order'])
new_nf_coef = [c['nf_coef'] for c in sorted_nf_coef]
json_data['nf_fit_coeff'] = new_nf_coef
return json_data
def convert_raman_coef(json_data: Dict) -> Dict:
"""Convert gnpy legacy format yang topology format.
:param json_data: The input JSON topology data to convert.
:type json_data: Dict
:return: the converted JSON data
:rtype: dict
"""
for elem in json_data[ELEMENTS_KEY]:
if PARAMS_KEY in elem and RAMAN_COEF_KEY in elem[PARAMS_KEY] \
and 'g0' in elem[PARAMS_KEY][RAMAN_COEF_KEY]:
raman_coef = elem[PARAMS_KEY].pop(RAMAN_COEF_KEY)
g0_list = raman_coef.pop('g0', [])
frequency_offset_list = raman_coef.pop('frequency_offset', [])
if frequency_offset_list:
new_raman_coef = {'reference_frequency': raman_coef['reference_frequency'],
'g0_per_frequency': [{'frequency_offset': f, 'g0': v}
for f, v in zip(frequency_offset_list, g0_list)]}
elem[PARAMS_KEY][RAMAN_COEF_KEY] = new_raman_coef
return json_data
def convert_back_raman_coef(json_data: Dict) -> Dict:
"""Convert gnpy yang format back to legacy json topology format.
:param json_data: The input JSON topology data to convert back.
:type json_data: Dict
:return: the converted back JSON data
:rtype: dict
"""
for elem in json_data[ELEMENTS_KEY]:
if PARAMS_KEY in elem and RAMAN_COEF_KEY in elem[PARAMS_KEY] \
and 'g0_per_frequency' in elem[PARAMS_KEY][RAMAN_COEF_KEY]:
raman_coef = elem[PARAMS_KEY].pop(RAMAN_COEF_KEY)
g0_list = [g['g0'] for g in raman_coef.pop('g0_per_frequency', [])]
frequency_offset_list = [f['frequency_offset'] for f in raman_coef.pop('g0_per_frequency', [])]
if frequency_offset_list:
new_raman_coef = {'reference_frequency': raman_coef['reference_frequency'],
'g0': g0_list,
'frequency_offset': frequency_offset_list}
elem[PARAMS_KEY][RAMAN_COEF_KEY] = new_raman_coef
return json_data
def convert_raman_efficiency(json_data: Dict) -> Dict:
"""Convert gnpy legacy format yang topology format.
:param json_data: The input JSON topology data to convert.
:type json_data: Dict
:return: the converted JSON data
:rtype: dict
"""
if 'RamanFiber' not in json_data:
return json_data
for fiber_eqpt in json_data['RamanFiber']:
if RAMAN_EFFICIENCY_KEY in fiber_eqpt \
and 'cr' in fiber_eqpt[RAMAN_EFFICIENCY_KEY]:
raman_efficiency = fiber_eqpt.pop(RAMAN_EFFICIENCY_KEY)
cr_list = raman_efficiency.pop('cr', [])
frequency_offset_list = raman_efficiency.pop('frequency_offset', [])
if frequency_offset_list:
new_raman_efficiency = [{'frequency_offset': f, 'cr': v}
for f, v in zip(frequency_offset_list, cr_list)]
fiber_eqpt[RAMAN_EFFICIENCY_KEY] = new_raman_efficiency
return json_data
def convert_back_raman_efficiency(json_data: Dict) -> Dict:
"""Convert gnpy yang format back to legacy json topology format.
:param json_data: The input JSON topology data to convert back.
:type json_data: Dict
:return: the converted back JSON data
:rtype: dict
"""
if 'RamanFiber' not in json_data:
return json_data
for fiber_eqpt in json_data['RamanFiber']:
if RAMAN_EFFICIENCY_KEY in fiber_eqpt and isinstance(fiber_eqpt[RAMAN_EFFICIENCY_KEY], list):
raman_efficiency = fiber_eqpt.pop(RAMAN_EFFICIENCY_KEY)
cr_list = [c['cr'] for c in raman_efficiency]
frequency_offset_list = [f['frequency_offset'] for f in raman_efficiency]
if frequency_offset_list:
old_raman_efficiency = {'cr': cr_list,
'frequency_offset': frequency_offset_list}
fiber_eqpt[RAMAN_COEF_KEY] = old_raman_efficiency
return json_data
def reorder_keys(data_list: List, key: str) -> List:
"""Roarder item in a dict placing the key (the key of a list with YANG meaning) first.
This is required because oopt-gnpy-libyang does not recognize the key when it is not placed first in the data node.
:param json_data: the list of dictionary items.
:type data_list: List
:return: the converted back JSON data
:rtype: List
"""
for item in data_list:
index_value = item.pop(key, None)
if index_value is not None:
# Place key first
new_item = {key: index_value}
# add other items
new_item.update(item)
# replace old element with new element
for k in list(item.keys()):
item.pop(k)
item.update(new_item)
return data_list
# next functions because ly requires that the key of a list be in the first position in the item
def reorder_route_objects(json_data: Dict) -> Dict:
"""Make sure that the index of a route object is placed first in the object.
:param json_data: The input JSON topology data to convert.
:type json_data: Dict
:return: the converted JSON data
:rtype: dict
"""
for request in json_data['path-request']:
if "explicit-route-objects" in request:
request["explicit-route-objects"]["route-object-include-exclude"] = \
reorder_keys(request["explicit-route-objects"]["route-object-include-exclude"], "index")
return json_data
def reorder_lumped_losses_objects(json_data: Dict) -> Dict:
"""Make sure that the position of a lumped loss object is placed first in the object.
:param json_data: The input JSON topology data to convert.
:type json_data: Dict
:return: the converted JSON data
:rtype: dict
"""
for element in json_data['elements']:
if "params" in element and "lumped_losses" in element["params"]:
element["params"]["lumped_losses"] = reorder_keys(element["params"]["lumped_losses"], "position")
return json_data
def reorder_raman_pumps(json_data: Dict) -> Dict:
"""Make sure that the frequency of a Raman pum object is placed first in the object.
:param json_data: The input JSON topology data to convert.
:type json_data: Dict
:return: the converted JSON data
:rtype: dict
"""
for element in json_data['elements']:
if "operational" in element and "raman_pumps" in element["operational"]:
element["operational"]["raman_pumps"] = reorder_keys(element["operational"]["raman_pumps"], "frequency")
return json_data

View File

@@ -1,3 +1,3 @@
"""
'''
Tracking :py:mod:`.request` for spectrum and their :py:mod:`.spectrum_assignment`.
"""
'''

View File

@@ -1,11 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.topology.request: path computation functionality
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
gnpy.topology.request
=====================
@@ -21,40 +16,34 @@ See: draft-ietf-teas-yang-path-computation-01.txt
"""
from collections import namedtuple, OrderedDict
from typing import List
from logging import getLogger
from networkx import (dijkstra_path, NetworkXNoPath,
all_simple_paths, shortest_simple_paths)
from networkx.utils import pairwise
from numpy import mean, argmin
from gnpy.core.elements import Transceiver, Roadm, Edfa, Multiband_amplifier
from gnpy.core.utils import lin2db, unique_ordered, find_common_range
from gnpy.core.info import create_input_spectral_information, carriers_to_spectral_information, \
demuxed_spectral_information, muxed_spectral_information, SpectralInformation
from gnpy.core import network as network_module
from numpy import mean
from gnpy.core.elements import Transceiver, Roadm
from gnpy.core.utils import lin2db
from gnpy.core.info import create_input_spectral_information
from gnpy.core.exceptions import ServiceError, DisjunctionError
import gnpy.core.ansi_escapes as ansi_escapes
from copy import deepcopy
from csv import writer
from math import ceil
LOGGER = getLogger(__name__)
RequestParams = namedtuple('RequestParams', 'request_id source destination bidir trx_type'
' trx_mode nodes_list loose_list spacing power nb_channel f_min'
' f_max format baud_rate OSNR penalties bit_rate'
' roll_off tx_osnr min_spacing cost path_bandwidth effective_freq_slot'
' equalization_offset_db, tx_power')
DisjunctionParams = namedtuple('DisjunctionParams', 'disjunction_id relaxable link_diverse'
' node_diverse disjunctions_req')
RequestParams = namedtuple('RequestParams', 'request_id source destination bidir trx_type' +
' trx_mode nodes_list loose_list spacing power nb_channel f_min' +
' f_max format baud_rate OSNR bit_rate roll_off tx_osnr' +
' min_spacing cost path_bandwidth effective_freq_slot')
DisjunctionParams = namedtuple('DisjunctionParams', 'disjunction_id relaxable link' +
'_diverse node_diverse disjunctions_req')
class PathRequest:
"""the class that contains all attributes related to a request"""
""" the class that contains all attributes related to a request
"""
def __init__(self, *args, **params):
params = RequestParams(**params)
self.request_id = params.request_id
@@ -73,19 +62,15 @@ class PathRequest:
self.f_max = params.f_max
self.format = params.format
self.OSNR = params.OSNR
self.penalties = params.penalties
self.bit_rate = params.bit_rate
self.roll_off = params.roll_off
self.tx_osnr = params.tx_osnr
self.tx_power = params.tx_power
self.min_spacing = params.min_spacing
self.cost = params.cost
self.path_bandwidth = params.path_bandwidth
if params.effective_freq_slot is not None:
self.N = [s['N'] for s in params.effective_freq_slot]
self.M = [s['M'] for s in params.effective_freq_slot]
self.initial_spectrum = None
self.offset_db = params.equalization_offset_db
self.N = params.effective_freq_slot['N']
self.M = params.effective_freq_slot['M']
def __str__(self):
return '\n\t'.join([f'{type(self).__name__} {self.request_id}',
@@ -93,7 +78,7 @@ class PathRequest:
f'destination: {self.destination}'])
def __repr__(self):
if self.baud_rate is not None and self.bit_rate is not None:
if self.baud_rate is not None:
temp = self.baud_rate * 1e-9
temp2 = self.bit_rate * 1e-9
else:
@@ -108,8 +93,7 @@ class PathRequest:
f'baud_rate:\t{temp} Gbaud',
f'bit_rate:\t{temp2} Gb/s',
f'spacing:\t{self.spacing * 1e-9} GHz',
f'power: \t{round(lin2db(self.power) + 30, 2)} dBm',
f'tx_power_dbm: \t{round(lin2db(self.tx_power) + 30, 2)} dBm',
f'power: \t{round(lin2db(self.power)+30, 2)} dBm',
f'nb channels: \t{self.nb_channel}',
f'path_bandwidth: \t{round(self.path_bandwidth * 1e-9, 2)} Gbit/s',
f'nodes-list:\t{self.nodes_list}',
@@ -118,7 +102,8 @@ class PathRequest:
class Disjunction:
"""the class that contains all attributes related to disjunction constraints"""
""" the class that contains all attributes related to disjunction constraints
"""
def __init__(self, *args, **params):
params = DisjunctionParams(**params)
@@ -147,7 +132,7 @@ BLOCKING_NOPATH = ['NO_PATH', 'NO_PATH_WITH_CONSTRAINT',
'NO_FEASIBLE_BAUDRATE_WITH_SPACING',
'NO_COMPUTED_SNR']
BLOCKING_NOMODE = ['NO_FEASIBLE_MODE', 'MODE_NOT_FEASIBLE']
BLOCKING_NOSPECTRUM = ['NO_SPECTRUM', 'NOT_ENOUGH_RESERVED_SPECTRUM']
BLOCKING_NOSPECTRUM = 'NO_SPECTRUM'
class ResultElement:
@@ -163,7 +148,8 @@ class ResultElement:
@property
def detailed_path_json(self):
"""a function that builds path object for normal and blocking cases"""
""" a function that builds path object for normal and blocking cases
"""
index = 0
pro_list = []
for element in self.computed_path:
@@ -179,30 +165,24 @@ class ResultElement:
}
pro_list.append(temp)
index += 1
if not hasattr(self.path_request, 'blocking_reason'):
# M and N values should not be None at this point
if self.path_request.M is None or self.path_request.N is None:
raise ServiceError('request {self.path_id} should have positive non null n and m values.')
if self.path_request.M > 0:
temp = {
'path-route-object': {
'index': index,
"label-hop": [{
"N": n,
"M": m
} for n, m in zip(self.path_request.N, self.path_request.M)],
"label-hop": {
"N": self.path_request.N,
"M": self.path_request.M
},
}
}
pro_list.append(temp)
index += 1
elif self.path_request.M == 0 and hasattr(self.path_request, 'blocking_reason'):
# if the path is blocked due to spectrum, no label object is created, but
# the json response includes a detailed path for user infromation.
pass
else:
# if the path is blocked, no label object is created, but
# the json response includes a detailed path for user information.
# M and N values should be None at this point
if self.path_request.M is not None or self.path_request.N is not None:
raise ServiceError('request {self.path_id} should not have label M and N values at this point.')
raise ServiceError('request {self.path_id} should have positive path bandwidth value.')
if isinstance(element, Transceiver):
temp = {
'path-route-object': {
@@ -219,9 +199,11 @@ class ResultElement:
@property
def path_properties(self):
"""a function that returns the path properties (metrics, crossed elements) into a dict"""
""" a function that returns the path properties (metrics, crossed elements) into a dict
"""
def path_metric(pth, req):
"""creates the metrics dictionary"""
""" creates the metrics dictionary
"""
return [
{
'metric-type': 'SNR-bandwidth',
@@ -263,7 +245,8 @@ class ResultElement:
@property
def pathresult(self):
"""create the result dictionnary (response for a request)"""
""" create the result dictionnary (response for a request)
"""
try:
if self.path_request.blocking_reason in BLOCKING_NOPATH:
response = {
@@ -301,6 +284,7 @@ def compute_constrained_path(network, req):
# been corrected and harmonized before
msg = (f'Request {req.request_id} malformed list of nodes: last node should '
'be destination trx')
LOGGER.critical(msg)
raise ValueError()
trx = [n for n in network if isinstance(n, Transceiver)]
@@ -310,16 +294,15 @@ def compute_constrained_path(network, req):
nodes_list = []
for node in req.nodes_list[:-1]:
nodes_list.append(next(el for el in network if el.uid == node))
total_path = explicit_path(nodes_list, source, destination, network)
if total_path is not None:
return total_path
try:
path_generator = shortest_simple_paths(network, source, destination, weight='weight')
total_path = next(path for path in path_generator if ispart(nodes_list, path))
except NetworkXNoPath:
msg = (f'Request {req.request_id} could not find a path from'
f' {source.uid} to node: {destination.uid} in network topology')
msg = (f'{ansi_escapes.yellow}Request {req.request_id} could not find a path from'
f' {source.uid} to node: {destination.uid} in network topology{ansi_escapes.reset}')
LOGGER.critical(msg)
print(msg)
req.blocking_reason = 'NO_PATH'
total_path = []
except StopIteration:
@@ -328,117 +311,79 @@ def compute_constrained_path(network, req):
# last node which is the transceiver)
# if all nodes i n node_list are LOOSE constraint, skip the constraints and find
# a path w/o constraints, else there is no possible path
LOGGER.warning(f'Request {req.request_id} could not find a path crossing '
f'{[el.uid for el in nodes_list[:-1]]} in network topology')
print(f'{ansi_escapes.yellow}Request {req.request_id} could not find a path crossing '
f'{[el.uid for el in nodes_list[:-1]]} in network topology{ansi_escapes.reset}')
if 'STRICT' not in req.loose_list[:-1]:
msg = (f'Request {req.request_id} could not find a path with user_'
f'include node constraints. Constraint ignored')
LOGGER.warning(msg)
msg = (f'{ansi_escapes.yellow}Request {req.request_id} could not find a path with user_'
f'include node constraints{ansi_escapes.reset}')
LOGGER.info(msg)
print(f'constraint ignored')
total_path = dijkstra_path(network, source, destination, weight='weight')
else:
# one STRICT makes the whole list STRICT
msg = (f'Request {req.request_id} could not find a path with user '
f'include node constraints.\nNo path computed')
msg = (f'{ansi_escapes.yellow}Request {req.request_id} could not find a path with user '
f'include node constraints.\nNo path computed{ansi_escapes.reset}')
LOGGER.critical(msg)
print(msg)
req.blocking_reason = 'NO_PATH_WITH_CONSTRAINT'
total_path = []
return total_path
def filter_si(path: list, equipment: dict, si: SpectralInformation) -> SpectralInformation:
"""Filter spectral information based on the amplifiers common range"""
# First retrieve f_min, f_max spectrum according to amplifiers' spectrum on the path
common_range = find_elements_common_range(path, equipment)
# filter out frequencies that should not be created
filtered_si = []
for band in common_range:
temp = demuxed_spectral_information(si, band)
if temp:
filtered_si.append(temp)
if not filtered_si:
raise ValueError('Defined propagation band does not match amplifiers band.')
return muxed_spectral_information(filtered_si)
def propagate(path, req, equipment):
"""propagates signals in each element according to initial spectrum set by user
Spectrum is specified in request through f_min, f_max and spacing, or initial_spectrum
and amps frequency band on the path is used to filter out frequencies"""
# generates spectrum based on request
if req.initial_spectrum is not None:
si = carriers_to_spectral_information(initial_spectrum=req.initial_spectrum, power=req.power)
else:
si = create_input_spectral_information(
f_min=req.f_min, f_max=req.f_max, roll_off=req.roll_off, baud_rate=req.baud_rate,
spacing=req.spacing, tx_osnr=req.tx_osnr, tx_power=req.tx_power, delta_pdb=req.offset_db)
# filter out frequencies that should not be created
si = filter_si(path, equipment, si)
roadm_osnr = []
for i, el in enumerate(path):
if isinstance(el, Roadm):
si = el(si, degree=path[i + 1].uid, from_degree=path[i - 1].uid)
roadm_osnr.append(el.get_impairment('roadm-osnr', si.frequency,
from_degree=path[i - 1].uid, degree=path[i + 1].uid))
else:
si = el(si)
path[0].update_snr(si.tx_osnr)
path[0].calc_penalties(req.penalties)
roadm_osnr.append(si.tx_osnr)
path[-1].update_snr(*roadm_osnr)
path[-1].calc_penalties(req.penalties)
return si
si = create_input_spectral_information(
req.f_min, req.f_max, req.roll_off, req.baud_rate,
req.power, req.spacing)
for el in path:
si = el(si)
path[-1].update_snr(req.tx_osnr, equipment['Roadm']['default'].add_drop_osnr)
return path
def propagate2(path, req, equipment):
si = create_input_spectral_information(
req.f_min, req.f_max, req.roll_off, req.baud_rate,
req.power, req.spacing)
infos = {}
for el in path:
before_si = si
after_si = si = el(si)
infos[el] = before_si, after_si
path[-1].update_snr(req.tx_osnr, equipment['Roadm']['default'].add_drop_osnr)
return infos
def propagate_and_optimize_mode(path, req, equipment):
# if mode is unknown : loops on the modes starting from the highest baudrate fiting in the
# step 1: create an ordered list of modes based on baudrate and power offset
# order higher baudrate with higher power offset first
baudrate_offset_to_explore = list(set([(this_mode['baud_rate'], this_mode['equalization_offset_db'])
for this_mode in equipment['Transceiver'][req.tsp].mode
if float(this_mode['min_spacing']) <= req.spacing]))
# step 1: create an ordered list of modes based on baudrate
baudrate_to_explore = list(set([this_mode['baud_rate']
for this_mode in equipment['Transceiver'][req.tsp].mode
if float(this_mode['min_spacing']) <= req.spacing]))
# TODO be carefull on limits cases if spacing very close to req spacing eg 50.001 50.000
baudrate_offset_to_explore = sorted(baudrate_offset_to_explore, reverse=True)
if baudrate_offset_to_explore:
baudrate_to_explore = sorted(baudrate_to_explore, reverse=True)
if baudrate_to_explore:
# at least 1 baudrate can be tested wrt spacing
for (this_br, this_offset) in baudrate_offset_to_explore:
for this_br in baudrate_to_explore:
modes_to_explore = [this_mode for this_mode in equipment['Transceiver'][req.tsp].mode
if this_mode['baud_rate'] == this_br
and float(this_mode['min_spacing']) <= req.spacing]
if this_mode['baud_rate'] == this_br and
float(this_mode['min_spacing']) <= req.spacing]
modes_to_explore = sorted(modes_to_explore,
key=lambda x: (x['bit_rate'], x['equalization_offset_db']), reverse=True)
key=lambda x: x['bit_rate'], reverse=True)
# print(modes_to_explore)
# step2: computes propagation for each baudrate: stop and select the first that passes
# TODO: the case of roll off is not included: for now use SI one
# TODO: the case of roll of is not included: for now use SI one
# TODO: if the loop in mode optimization does not have a feasible path, then bugs
if req.initial_spectrum is not None:
# this case is not yet handled: spectrum can not be defined for the path-request-run function
# and this function is only called in this case. so coming here should not be considered yet.
msg = f'Request: {req.request_id} contains a unexpected initial_spectrum.'
raise ServiceError(msg)
spc_info = create_input_spectral_information(f_min=req.f_min, f_max=req.f_max,
roll_off=equipment['SI']['default'].roll_off,
baud_rate=this_br, spacing=req.spacing,
delta_pdb=this_offset, tx_osnr=req.tx_osnr,
tx_power=req.tx_power)
spc_info = filter_si(path, equipment, spc_info)
roadm_osnr = []
for i, el in enumerate(path):
if isinstance(el, Roadm):
spc_info = el(spc_info, degree=path[i + 1].uid, from_degree=path[i - 1].uid)
roadm_osnr.append(el.get_impairment('roadm-osnr', spc_info.frequency,
from_degree=path[i - 1].uid, degree=path[i + 1].uid))
else:
spc_info = el(spc_info)
spc_info = create_input_spectral_information(req.f_min, req.f_max,
equipment['SI']['default'].roll_off,
this_br, req.power, req.spacing)
for el in path:
spc_info = el(spc_info)
for this_mode in modes_to_explore:
if path[-1].snr is not None:
path[0].update_snr(this_mode['tx_osnr'])
path[0].calc_penalties(this_mode['penalties'])
roadm_osnr.append(this_mode['tx_osnr'])
path[-1].update_snr(*roadm_osnr)
# remove the tx_osnr from roadm_osnr list for the next iteration
del roadm_osnr[-1]
path[-1].calc_penalties(this_mode['penalties'])
if round(min(path[-1].snr_01nm - path[-1].total_penalty), 2) \
> this_mode['OSNR'] + equipment['SI']['default'].sys_margins:
path[-1].update_snr(this_mode['tx_osnr'], equipment['Roadm']['default'].add_drop_osnr)
if round(min(path[-1].snr + lin2db(this_br / (12.5e9))), 2) > this_mode['OSNR']:
return path, this_mode
else:
last_explored_mode = this_mode
@@ -449,19 +394,22 @@ def propagate_and_optimize_mode(path, req, equipment):
# returns the last propagated path and mode
msg = f'\tWarning! Request {req.request_id}: no mode satisfies path SNR requirement.\n'
LOGGER.warning(msg)
print(msg)
LOGGER.info(msg)
req.blocking_reason = 'NO_FEASIBLE_MODE'
return path, last_explored_mode
else:
# no baudrate satisfying spacing
msg = f'\tWarning! Request {req.request_id}: no baudrate satisfies spacing requirement.\n'
LOGGER.warning(msg)
print(msg)
LOGGER.info(msg)
req.blocking_reason = 'NO_FEASIBLE_BAUDRATE_WITH_SPACING'
return [], None
def jsontopath_metric(path_metric):
"""a functions that reads resulting metric from json string"""
""" a functions that reads resulting metric from json string
"""
output_snr = next(e['accumulative-value']
for e in path_metric if e['metric-type'] == 'SNR-0.1nm')
output_snrbandwidth = next(e['accumulative-value']
@@ -479,7 +427,9 @@ def jsontopath_metric(path_metric):
def jsontoparams(my_p, tsp, mode, equipment):
"""a function that derives optical params from transponder type and mode supports the no mode case"""
""" a function that derives optical params from transponder type and mode
supports the no mode case
"""
temp = []
for elem in my_p['path-properties']['path-route-objects']:
if 'num-unnum-hop' in elem['path-route-object']:
@@ -489,8 +439,8 @@ def jsontoparams(my_p, tsp, mode, equipment):
temp2 = []
for elem in my_p['path-properties']['path-route-objects']:
if 'label-hop' in elem['path-route-object'].keys():
temp2.append(f'{[e["N"] for e in elem["path-route-object"]["label-hop"]]}, '
+ f'{[e["M"] for e in elem["path-route-object"]["label-hop"]]}')
temp2.append(f'{elem["path-route-object"]["label-hop"]["N"]}, ' +
f'{elem["path-route-object"]["label-hop"]["M"]}')
# OrderedDict.fromkeys returns the unique set of strings.
# TODO: if spectrum changes along the path, we should be able to give the segments
# eg for regeneration case
@@ -514,10 +464,10 @@ def jsontoparams(my_p, tsp, mode, equipment):
def jsontocsv(json_data, equipment, fileout):
"""reads json path result file in accordance with:
Yang model for requesting Path Computation
draft-ietf-teas-yang-path-computation-01.txt.
and write results in an CSV file
""" reads json path result file in accordance with:
Yang model for requesting Path Computation
draft-ietf-teas-yang-path-computation-01.txt.
and write results in an CSV file
"""
mywriter = writer(fileout)
mywriter.writerow(('response-id', 'source', 'destination', 'path_bandwidth', 'Pass?',
@@ -746,8 +696,8 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
# in each loop, dpath is updated with a path for rq that satisfies
# disjunction with each path in dpath
# for example, assume set of requests in the vector (disjunction_list) is {rq1,rq2, rq3}
# rq1 p1: aefhg
# p2: abfhg
# rq1 p1: abfhg
# p2: aefhg
# p3: abcg
# rq2 p8: bf
# rq3 p4: abcgh
@@ -764,7 +714,6 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
# after second loop:
# dpath = [ p3 p8 p6 ]
# since p1 and p4 are not disjoint
# p1 and p6 are not disjoint
# p1 and p7 are not disjoint
# p3 and p4 are not disjoint
# p3 and p7 are not disjoint
@@ -788,6 +737,7 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
temp.append(temp2)
# print(f' coucou {elem1}: \t{temp}')
dpath = temp
# print(dpath)
candidates[dis.disjunction_id] = dpath
# for i in disjunctions_list:
@@ -828,9 +778,9 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
if pth in cndt:
candidates[this_id].remove(cndt)
# for i in disjunctions_list:
# print(i.disjunction_id)
# print(f'\n{candidates[i.disjunction_id]}')
# for i in disjunctions_list:
# print(i.disjunction_id)
# print(f'\n{candidates[i.disjunction_id]}')
# step 4 apply route constraints: remove candidate path that do not satisfy
# the constraint only in the case of disjounction: the simple path is processed in
@@ -838,51 +788,54 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
# TODO: keep a version without the loose constraint
for this_d in disjunctions_list:
temp = []
alternatetemp = []
for j, sol in enumerate(candidates[this_d.disjunction_id]):
testispartok = True
testispartnokloose = True
for pth in sol:
# print(f'test {allpaths[id(pth)].req.request_id}')
# print(f'length of route {len(allpaths[id(pth)].req.nodes_list)}')
if allpaths[id(pth)].req.nodes_list:
# if any pth from sol does not contain the ordered list node,
# remove sol from the candidate, except if constraint was loose:
# then keep sol as an alternate solution
# if pth does not containt the ordered list node, remove sol from the candidate
# except if this was the last solution: then check if the constraint is loose
# or not
if not ispart(allpaths[id(pth)].req.nodes_list, pth):
testispartok = False
if 'STRICT' in allpaths[id(pth)].req.loose_list:
LOGGER.debug(f'removing solution from candidate paths\n{pth}')
testispartnokloose = False
break
# print(f'nb of solutions {len(temp)}')
if j < len(candidates[this_d.disjunction_id]) - 1:
msg = f'removing {sol}'
LOGGER.info(msg)
testispartok = False
# break
else:
if 'LOOSE' in allpaths[id(pth)].req.loose_list:
LOGGER.info(f'Could not apply route constraint' +
f'{allpaths[id(pth)].req.nodes_list} on request' +
f' {allpaths[id(pth)].req.request_id}')
else:
LOGGER.info(f'removing last solution from candidate paths\n{sol}')
testispartok = False
if testispartok:
temp.append(sol)
elif testispartnokloose:
LOGGER.debug(f'Adding solution as alternate solution not satisfying constraint\n{pth}')
alternatetemp.append(sol)
if temp:
candidates[this_d.disjunction_id] = temp
elif alternatetemp:
candidates[this_d.disjunction_id] = alternatetemp
else:
candidates[this_d.disjunction_id] = []
candidates[this_d.disjunction_id] = temp
# step 5 select the first combination that works
pathreslist_disjoint = {}
for dis in disjunctions_list:
if candidates[dis.disjunction_id]:
for pth in candidates[dis.disjunction_id][0]:
if allpaths[id(pth)].req in pathreqlist_disjt:
# print(f'selected path:{pth} for req {allpaths[id(pth)].req.request_id}')
pathreslist_disjoint[allpaths[id(pth)].req] = allpaths[id(pth)].pth
# remove request from list of requests (in case of duplicate)
pathreqlist_disjt.remove(allpaths[id(pth)].req)
# remove duplicated candidates
candidates = remove_candidate(candidates, allpaths, allpaths[id(pth)].req, pth)
else:
msg = 'No disjoint path found with added constraint\nComputation stopped.'
# TODO in this case: replay step 5 with the candidate without constraints
raise DisjunctionError(msg)
test_sol = True
while test_sol:
# print('coucou')
if candidates[dis.disjunction_id]:
for pth in candidates[dis.disjunction_id][0]:
if allpaths[id(pth)].req in pathreqlist_disjt:
# print(f'selected path:{pth} for req {allpaths[id(pth)].req.request_id}')
pathreslist_disjoint[allpaths[id(pth)].req] = allpaths[id(pth)].pth
pathreqlist_disjt.remove(allpaths[id(pth)].req)
candidates = remove_candidate(candidates, allpaths, allpaths[id(pth)].req, pth)
test_sol = False
else:
msg = f'No disjoint path found with added constraint'
LOGGER.critical(msg)
print(f'{msg}\nComputation stopped.')
# TODO in this case: replay step 5 with the candidate without constraints
raise DisjunctionError(msg)
# for i in disjunctions_list:
# print(i.disjunction_id)
@@ -901,7 +854,8 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
def isdisjoint(pth1, pth2):
"""returns 0 if disjoint"""
""" returns 0 if disjoint
"""
edge1 = list(pairwise(pth1))
edge2 = list(pairwise(pth2))
for edge in edge1:
@@ -911,9 +865,9 @@ def isdisjoint(pth1, pth2):
def find_reversed_path(pth):
"""select of intermediate roadms and find the path between them
note that this function may not give an exact result in case of multiple
links between two adjacent nodes.
""" select of intermediate roadms and find the path between them
note that this function may not give an exact result in case of multiple
links between two adjacent nodes.
"""
# TODO add some indication on elements to indicate from which other they
# are the reversed direction. This is partly done with oms indication
@@ -936,8 +890,9 @@ def find_reversed_path(pth):
# concatenation should be [roadma el1 el2 roadmb el3 el4 roadmc]
reversed_path = list(OrderedDict.fromkeys(reversed_path))
else:
msg = f'Error while handling reversed path {pth[-1].uid} to {pth[0].uid}:' \
+ ' can not handle unidir topology. TO DO.'
msg = f'Error while handling reversed path {pth[-1].uid} to {pth[0].uid}:' +\
' can not handle unidir topology. TO DO.'
LOGGER.critical(msg)
raise ValueError(msg)
reversed_path.append(pth[0])
@@ -945,7 +900,9 @@ def find_reversed_path(pth):
def ispart(ptha, pthb):
"""the functions takes two paths a and b and retrns True if all a elements are part of b and in the same order"""
""" the functions takes two paths a and b and retrns True
if all a elements are part of b and in the same order
"""
j = 0
for elem in ptha:
if elem in pthb:
@@ -959,7 +916,8 @@ def ispart(ptha, pthb):
def remove_candidate(candidates, allpaths, rqst, pth):
"""filter duplicate candidates"""
""" filter duplicate candidates
"""
# print(f'coucou {rqst.request_id}')
for key, candidate in candidates.items():
temp = candidate.copy()
@@ -974,7 +932,8 @@ def remove_candidate(candidates, allpaths, rqst, pth):
def compare_reqs(req1, req2, disjlist):
"""compare two requests: returns True or False"""
""" compare two requests: returns True or False
"""
dis1 = [d for d in disjlist if req1.request_id in d.disjunctions_req]
dis2 = [d for d in disjlist if req2.request_id in d.disjunctions_req]
same_disj = False
@@ -1007,7 +966,6 @@ def compare_reqs(req1, req2, disjlist):
req1.format == req2.format and \
req1.OSNR == req2.OSNR and \
req1.roll_off == req2.roll_off and \
req1.tx_power == req2.tx_power and \
same_disj:
return True
else:
@@ -1015,24 +973,19 @@ def compare_reqs(req1, req2, disjlist):
def requests_aggregation(pathreqlist, disjlist):
"""this function aggregates requests so that if several requests
exist between same source and destination and with same transponder type
If transponder mode is defined and identical, then also agregates demands.
""" this function aggregates requests so that if several requests
exist between same source and destination and with same transponder type
"""
# todo maybe add conditions on mode ??, spacing ...
# currently if undefined takes the default values
local_list = pathreqlist.copy()
for req in pathreqlist:
for this_r in local_list:
if req.request_id != this_r.request_id and compare_reqs(req, this_r, disjlist) and\
this_r.tsp_mode is not None:
if req.request_id != this_r.request_id and compare_reqs(req, this_r, disjlist):
# aggregate
this_r.path_bandwidth += req.path_bandwidth
this_r.N = this_r.N + req.N
this_r.M = this_r.M + req.M
temp_r_id = this_r.request_id
this_r.request_id = ' | '.join((this_r.request_id, req.request_id))
# remove request from list
local_list.remove(req)
# todo change also disjunction req with new demand
@@ -1049,22 +1002,23 @@ def requests_aggregation(pathreqlist, disjlist):
def correct_json_route_list(network, pathreqlist):
"""all names in list should be exact name in the network, and there is no ambiguity
This function only checks that list is correct, warns user if the name is incorrect and
suppresses the constraint it it is loose or raises an error if it is strict
""" all names in list should be exact name in the network, and there is no ambiguity
This function only checks that list is correct, warns user if the name is incorrect and
suppresses the constraint it it is loose or raises an error if it is strict
"""
all_uid = [n.uid for n in network.nodes()]
transponders = [n.uid for n in network.nodes() if isinstance(n, Transceiver)]
for pathreq in pathreqlist:
if pathreq.source not in transponders:
msg = f'Request: {pathreq.request_id}: could not find transponder' \
+ f' source : {pathreq.source}.'
msg = f'{ansi_escapes.red}Request: {pathreq.request_id}: could not find transponder' +\
f' source : {pathreq.source}.{ansi_escapes.reset}'
LOGGER.critical(msg)
raise ServiceError(msg)
if pathreq.destination not in transponders:
msg = f'Request: {pathreq.request_id}: could not find transponder' \
+ f' destination : {pathreq.destination}.'
msg = f'{ansi_escapes.red}Request: {pathreq.request_id}: could not find transponder' +\
f' destination : {pathreq.destination}.{ansi_escapes.reset}'
LOGGER.critical(msg)
raise ServiceError(msg)
# silently remove source and dest nodes from the list
@@ -1083,21 +1037,24 @@ def correct_json_route_list(network, pathreqlist):
# if no matching can be found in the network just ignore this constraint
# if it is a loose constraint
# warns the user that this node is not part of the topology
msg = f'invalid route node specified:\n\t\'{n_id}\',' \
+ ' could not use it as constraint, skipped!'
LOGGER.warning(msg)
msg = f'{ansi_escapes.yellow}invalid route node specified:\n\t\'{n_id}\',' +\
f' could not use it as constraint, skipped!{ansi_escapes.reset}'
print(msg)
LOGGER.info(msg)
pathreq.loose_list.pop(pathreq.nodes_list.index(n_id))
pathreq.nodes_list.remove(n_id)
else:
msg = f'could not find node:\n\t \'{n_id}\' in network' \
+ ' topology. Strict constraint can not be applied.'
msg = f'{ansi_escapes.red}could not find node:\n\t \'{n_id}\' in network' +\
f' topology. Strict constraint can not be applied.{ansi_escapes.reset}'
LOGGER.critical(msg)
raise ServiceError(msg)
return pathreqlist
def deduplicate_disjunctions(disjn):
"""clean disjunctions to remove possible repetition"""
""" clean disjunctions to remove possible repetition
"""
local_disjn = disjn.copy()
for elem in local_disjn:
for dis_elem in local_disjn:
@@ -1107,28 +1064,23 @@ def deduplicate_disjunctions(disjn):
return local_disjn
def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist, redesign=False):
"""use a list but a dictionnary might be helpful to find path based on request_id
TODO change all these req, dsjct, res lists into dict !
def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist):
""" use a list but a dictionnary might be helpful to find path based on request_id
TODO change all these req, dsjct, res lists into dict !
"""
path_res_list = []
reversed_path_res_list = []
propagated_reversed_path_res_list = []
total_nb_requests = len(pathreqlist)
if redesign:
LOGGER.warning('Redesign the network for each request channel, '
+ 'using the request channel as the reference channel for the design.')
for i, pathreq in enumerate(pathreqlist):
# use the power specified in requests but might be different from the one
# specified for design the power is an optional parameter for requests
# definition if optional, use the one defines in eqt_config.json
msg = f'\n\trequest {pathreq.request_id}\n' \
+ f'\tComputing path from {pathreq.source} to {pathreq.destination}\n' \
+ f'\twith path constraint: {[pathreq.source] + pathreq.nodes_list}'
# # adding first node to be clearer on the output
print(f'request {pathreq.request_id}')
print(f'Computing path from {pathreq.source} to {pathreq.destination}')
# adding first node to be clearer on the output
print(f'with path constraint: {[pathreq.source] + pathreq.nodes_list}')
# pathlist[i] contains the whole path information for request i
# last element is a transciver and where the result of the propagation is
@@ -1137,35 +1089,22 @@ def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist, red
# elements to simulate performance, several demands having the same destination
# may use the same transponder for the performance simulation. This is why
# we use deepcopy: to ensure that each propagation is recorded and not overwritten
# reversed path is needed for correct spectrum assignment
if redesign:
# this is the legacy case where network was automatically redesigned using the
# request channel as reference (nb and power used for amplifiers total power out)
reversed_path = []
if pathlist[i]:
reversed_path = find_reversed_path(pathlist[i])
network_nodes_for_redesign = pathlist[i] + reversed_path
network_module.design_network(pathreq, network.subgraph(network_nodes_for_redesign), equipment,
set_connector_losses=False, verbose=False)
total_path = deepcopy(pathlist[i])
msg = msg + f'\n\tComputed path (roadms):{[e.uid for e in total_path if isinstance(e, Roadm)]}'
LOGGER.info(msg)
print(f'Computed path (roadms):{[e.uid for e in total_path if isinstance(e, Roadm)]}')
# for debug
# print(f'{pathreq.baud_rate} {pathreq.power} {pathreq.spacing} {pathreq.nb_channel}')
if total_path:
if pathreq.baud_rate is not None:
# means that at this point the mode was entered/forced by user and thus a
# baud_rate was defined
propagate(total_path, pathreq, equipment)
snr01nm_with_penalty = total_path[-1].snr_01nm - total_path[-1].total_penalty
min_ind = argmin(snr01nm_with_penalty)
if round(snr01nm_with_penalty[min_ind], 2) < pathreq.OSNR + equipment['SI']['default'].sys_margins:
msg = f'\tWarning! Request {pathreq.request_id} computed path from' \
+ f' {pathreq.source} to {pathreq.destination} does not pass with {pathreq.tsp_mode}' \
+ f'\n\tcomputed SNR in 0.1nm = {round(total_path[-1].snr_01nm[min_ind], 2)}'
msg = _penalty_msg(total_path, msg, min_ind) \
+ f'\n\trequired osnr = {pathreq.OSNR}' \
+ f'\n\tsystem margin = {equipment["SI"]["default"].sys_margins}'
total_path = propagate(total_path, pathreq, equipment)
temp_snr01nm = round(mean(total_path[-1].snr+lin2db(pathreq.baud_rate/(12.5e9))), 2)
if temp_snr01nm < pathreq.OSNR:
msg = f'\tWarning! Request {pathreq.request_id} computed path from' +\
f' {pathreq.source} to {pathreq.destination} does not pass with' +\
f' {pathreq.tsp_mode}\n\tcomputedSNR in 0.1nm = {temp_snr01nm} ' +\
f'- required osnr {pathreq.OSNR}'
print(msg)
LOGGER.warning(msg)
pathreq.blocking_reason = 'MODE_NOT_FEASIBLE'
else:
@@ -1185,8 +1124,6 @@ def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist, red
pathreq.OSNR = mode['OSNR']
pathreq.tx_osnr = mode['tx_osnr']
pathreq.bit_rate = mode['bit_rate']
pathreq.penalties = mode['penalties']
pathreq.offset_db = mode['equalization_offset_db']
# other blocking reason should not appear at this point
except AttributeError:
pathreq.baud_rate = mode['baud_rate']
@@ -1195,28 +1132,25 @@ def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist, red
pathreq.OSNR = mode['OSNR']
pathreq.tx_osnr = mode['tx_osnr']
pathreq.bit_rate = mode['bit_rate']
pathreq.penalties = mode['penalties']
pathreq.offset_db = mode['equalization_offset_db']
# reversed path is needed for correct spectrum assignment
reversed_path = find_reversed_path(pathlist[i])
if pathreq.bidir and pathreq.baud_rate is not None:
# Both directions requested, and a feasible mode was found
if pathreq.bidir:
# only propagate if bidir is true, but needs the reversed path anyway for
# correct spectrum assignment
rev_p = deepcopy(reversed_path)
msg = f'\n\tPropagating Z to A direction {pathreq.destination} to {pathreq.source}\n' \
+ f'\tPath (roadms) {[r.uid for r in rev_p if isinstance(r,Roadm)]}\n'
LOGGER.info(msg)
propagate(rev_p, pathreq, equipment)
propagated_reversed_path = rev_p
snr01nm_with_penalty = rev_p[-1].snr_01nm - rev_p[-1].total_penalty
min_ind = argmin(snr01nm_with_penalty)
if round(snr01nm_with_penalty[min_ind], 2) < pathreq.OSNR + equipment['SI']['default'].sys_margins:
msg = f'\tWarning! Request {pathreq.request_id} computed path from' \
+ f' {pathreq.destination} to {pathreq.source} does not pass with {pathreq.tsp_mode}' \
+ f'\n\tcomputed SNR in 0.1nm = {round(rev_p[-1].snr_01nm[min_ind], 2)}'
msg = _penalty_msg(rev_p, msg, min_ind) \
+ f'\n\trequired osnr = {pathreq.OSNR}' \
+ f'\n\tsystem margin = {equipment["SI"]["default"].sys_margins}'
print(f'\n\tPropagating Z to A direction {pathreq.destination} to {pathreq.source}')
print(f'\tPath (roadsm) {[r.uid for r in rev_p if isinstance(r,Roadm)]}\n')
propagated_reversed_path = propagate(rev_p, pathreq, equipment)
temp_snr01nm = round(mean(propagated_reversed_path[-1].snr +\
lin2db(pathreq.baud_rate/(12.5e9))), 2)
if temp_snr01nm < pathreq.OSNR:
msg = f'\tWarning! Request {pathreq.request_id} computed path from' +\
f' {pathreq.source} to {pathreq.destination} does not pass with' +\
f' {pathreq.tsp_mode}\n' +\
f'\tcomputedSNR in 0.1nm = {temp_snr01nm} - required osnr {pathreq.OSNR}'
print(msg)
LOGGER.warning(msg)
# TODO selection of mode should also be on reversed direction !!
if not hasattr(pathreq, 'blocking_reason'):
@@ -1224,8 +1158,9 @@ def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist, red
else:
propagated_reversed_path = []
else:
msg = f'Request {pathreq.request_id}: Total path is empty. No propagation'
LOGGER.warning(msg)
msg = 'Total path is empty. No propagation'
print(msg)
LOGGER.info(msg)
reversed_path = []
propagated_reversed_path = []
@@ -1233,80 +1168,5 @@ def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist, red
reversed_path_res_list.append(reversed_path)
propagated_reversed_path_res_list.append(propagated_reversed_path)
# print to have a nice output
print('')
return path_res_list, reversed_path_res_list, propagated_reversed_path_res_list
def compute_spectrum_slot_vs_bandwidth(bandwidth, spacing, bit_rate, slot_width=0.0125e12):
"""Compute the number of required wavelengths and the M value (number of consumed slots)
Each wavelength consumes one `spacing`, and the result is rounded up to consume a natural number of slots.
>>> compute_spectrum_slot_vs_bandwidth(400e9, 50e9, 200e9)
(2, 8)
"""
number_of_wavelengths = ceil(bandwidth / bit_rate)
total_number_of_slots = ceil(spacing / slot_width) * number_of_wavelengths
return number_of_wavelengths, total_number_of_slots
def _penalty_msg(total_path, msg, min_ind):
"""formatting helper for reporting unfeasible paths
The penalty info are optional, so this checks that penalty exists before creating a message."""
penalty_dict = {
'pdl': 'PDL',
'chromatic_dispersion': 'CD',
'pmd': 'PMD'}
for key, pretty in penalty_dict.items():
if key in total_path[-1].penalties:
msg += f'\n\t{pretty} penalty = {round(total_path[-1].penalties[key][min_ind], 2)}'
else:
msg += f'\n\t{pretty} penalty not evaluated'
return msg
def is_adjacent(oms1, oms2):
""" oms1's egress ROADM is oms2's ingress ROADM
"""
return oms1.el_list[-1] == oms2.el_list[0]
def explicit_path(node_list, source, destination, network):
""" if list of nodes leads to adjacent oms, then means that the path is explicit, and no need to compute
the function returns the explicit path (including source and destination ROADMs)
"""
path_oms = []
for elem in node_list:
if hasattr(elem, 'oms'):
path_oms.append(elem.oms)
if not path_oms:
return None
path_oms = unique_ordered(path_oms)
try:
next_node = next(network.successors(source))
source_roadm = next_node if isinstance(next_node, Roadm) else source
previous_node = next(network.predecessors(destination))
destination_roadm = previous_node if isinstance(previous_node, Roadm) else destination
if not (path_oms[0].el_list[0] == source_roadm and path_oms[-1].el_list[-1] == destination_roadm):
return None
except StopIteration:
return None
oms0 = path_oms[0]
path = [source] + oms0.el_list
for oms in path_oms[1:]:
if not is_adjacent(oms0, oms):
return None
oms0 = oms
path.extend(oms.el_list)
path.append(destination)
return unique_ordered(path)
def find_elements_common_range(el_list: list, equipment: dict) -> List[dict]:
"""Find the common frequency range of amps of a given list of elements (for example an OMS or a path)
If there are no amplifiers in the path, then use the SI
"""
amp_bands = [n.params.bands for n in el_list if isinstance(n, (Edfa, Multiband_amplifier))]
return find_common_range(amp_bands, equipment['SI']['default'].f_min, equipment['SI']['default'].f_max,
equipment['SI']['default'].spacing)

View File

@@ -1,11 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: BSD-3-Clause
# gnpy.topology.spectrum_assignment: spectrum assignment functionality
# Copyright (C) 2025 Telecom Infra Project and GNPy contributors
# see AUTHORS.rst for a list of contributors
"""
gnpy.topology.spectrum_assignment
=================================
@@ -20,31 +15,28 @@ element/oms correspondace
from collections import namedtuple
from logging import getLogger
from gnpy.core.elements import Roadm, Transceiver, Edfa, Multiband_amplifier
from math import ceil
from gnpy.core.elements import Roadm, Transceiver
from gnpy.core.exceptions import ServiceError, SpectrumError
from gnpy.core.utils import order_slots, restore_order
from gnpy.topology.request import compute_spectrum_slot_vs_bandwidth, find_elements_common_range
LOGGER = getLogger(__name__)
GUARDBAND = 25e9
class Bitmap:
"""records the spectrum occupation"""
""" records the spectrum occupation
"""
def __init__(self, f_min, f_max, grid, guardband=GUARDBAND, bitmap=None):
# n is the min index including guardband. Guardband is required to be sure
def __init__(self, f_min, f_max, grid, guardband=0.15e12, bitmap=None):
# n is the min index including guardband. Guardband is require to be sure
# that a channel can be assigned with center frequency fmin (means that its
# slot occupation goes below freq_index_min
n_min = frequency_to_n(f_min, grid)
n_max = frequency_to_n(f_max, grid)
n_min = frequency_to_n(f_min - guardband, grid)
n_max = frequency_to_n(f_max + guardband, grid) - 1
self.n_min = n_min
self.n_max = n_max
self.freq_index_min = frequency_to_n(f_min + guardband)
self.freq_index_max = frequency_to_n(f_max - guardband)
self.freq_index_min = frequency_to_n(f_min)
self.freq_index_max = frequency_to_n(f_max)
self.freq_index = list(range(n_min, n_max + 1))
self.guardband = guardband
if bitmap is None:
self.bitmap = [1] * (n_max - n_min + 1)
elif len(bitmap) == len(self.freq_index):
@@ -53,22 +45,26 @@ class Bitmap:
raise SpectrumError(f'bitmap is not consistant with f_min{f_min} - n: {n_min} and f_max{f_max}- n :{n_max}')
def getn(self, i):
"""converts the n (itu grid) into a local index"""
""" converts the n (itu grid) into a local index
"""
return self.freq_index[i]
def geti(self, nvalue):
"""converts the local index into n (itu grid)"""
""" converts the local index into n (itu grid)
"""
return self.freq_index.index(nvalue)
def insert_left(self, newbitmap):
"""insert bitmap on the left to align oms bitmaps if their start frequencies are different"""
""" insert bitmap on the left to align oms bitmaps if their start frequencies are different
"""
self.bitmap = newbitmap + self.bitmap
temp = list(range(self.n_min - len(newbitmap), self.n_min))
self.freq_index = temp + self.freq_index
self.n_min = self.freq_index[0]
def insert_right(self, newbitmap):
"""insert bitmap on the right to align oms bitmaps if their stop frequencies are different"""
""" insert bitmap on the right to align oms bitmaps if their stop frequencies are different
"""
self.bitmap = self.bitmap + newbitmap
self.freq_index = self.freq_index + list(range(self.n_max, self.n_max + len(newbitmap)))
self.n_max = self.freq_index[-1]
@@ -79,8 +75,8 @@ OMSParams = namedtuple('OMSParams', 'oms_id el_id_list el_list')
class OMS:
"""OMS class is the logical container that represent a link between two adjacent ROADMs and
records the crossed elements and the occupied spectrum
""" OMS class is the logical container that represent a link between two adjacent ROADMs and
records the crossed elements and the occupied spectrum
"""
def __init__(self, *args, **params):
@@ -91,6 +87,7 @@ class OMS:
self.spectrum_bitmap = []
self.nb_channels = 0
self.service_list = []
# TODO
def __str__(self):
return '\n\t'.join([f'{type(self).__name__} {self.oms_id}',
@@ -101,28 +98,36 @@ class OMS:
f'{self.el_id_list[0]} - {self.el_id_list[-1]}', '\n'])
def add_element(self, elem):
"""records oms elements"""
""" records oms elements
"""
self.el_id_list.append(elem.uid)
self.el_list.append(elem)
def update_spectrum(self, f_min, f_max, guardband=GUARDBAND, existing_spectrum=None, grid=0.00625e12):
"""Frequencies expressed in Hz.
Add 150 GHz margin to enable a center channel on f_min
Use ITU-T G694.1 Flexible DWDM grid definition
For the flexible DWDM grid, the allowed frequency slots have a nominal central frequency (in THz) defined by:
193.1 + n × 0.00625 where n is a positive or negative integer including 0
and 0.00625 is the nominal central frequency granularity in THz
and a slot width defined by:
12.5 × m where m is a positive integer and 12.5 is the slot width granularity in GHz.
Any combination of frequency slots is allowed as long as no two frequency slots overlap.
If bitmap is not None, then use it: Bitmap checks its consistency with f_min f_max
else a brand new bitmap is created
def update_spectrum(self, f_min, f_max, guardband=0.15e12, existing_spectrum=None,
grid=0.00625e12):
""" frequencies expressed in Hz
"""
self.spectrum_bitmap = Bitmap(f_min=f_min, f_max=f_max, grid=grid, guardband=guardband,
bitmap=existing_spectrum)
if existing_spectrum is None:
# add some 150 GHz margin to enable a center channel on f_min
# use ITU-T G694.1
# Flexible DWDM grid definition
# For the flexible DWDM grid, the allowed frequency slots have a nominal
# central frequency (in THz) defined by:
# 193.1 + n × 0.00625 where n is a positive or negative integer including 0
# and 0.00625 is the nominal central frequency granularity in THz
# and a slot width defined by:
# 12.5 × m where m is a positive integer and 12.5 is the slot width granularity in
# GHz.
# Any combination of frequency slots is allowed as long as no two frequency
# slots overlap.
# TODO : add explaination on that / parametrize ....
self.spectrum_bitmap = Bitmap(f_min, f_max, grid, guardband)
# print(len(self.spectrum_bitmap.bitmap))
def assign_spectrum(self, nvalue, mvalue):
"""change oms spectrum to mark spectrum assigned"""
""" change oms spectrum to mark spectrum assigned
"""
if not isinstance(nvalue, int):
raise SpectrumError(f'N must be a signed integer, got {nvalue}')
if not isinstance(mvalue, int):
@@ -141,16 +146,16 @@ class OMS:
self.spectrum_bitmap.bitmap[self.spectrum_bitmap.geti(startn):self.spectrum_bitmap.geti(stopn) + 1] = [0] * (stopn - startn + 1)
def add_service(self, service_id, nb_wl):
"""record service and mark spectrum as occupied"""
""" record service and mark spectrum as occupied
"""
self.service_list.append(service_id)
self.nb_channels += nb_wl
def frequency_to_n(freq, grid=0.00625e12):
"""converts frequency into the n value (ITU grid)
reference to Recommendation G.694.1 (02/12), Figure I.3
https://www.itu.int/rec/T-REC-G.694.1-201202-I/en
""" converts frequency into the n value (ITU grid)
reference to Recommendation G.694.1 (02/12), Figure I.3
https://www.itu.int/rec/T-REC-G.694.1-201202-I/en
>>> frequency_to_n(193.1375e12)
6
@@ -162,10 +167,9 @@ def frequency_to_n(freq, grid=0.00625e12):
def nvalue_to_frequency(nvalue, grid=0.00625e12):
"""converts n value into a frequency
reference to Recommendation G.694.1 (02/12), Table 1
https://www.itu.int/rec/T-REC-G.694.1-201202-I/en
""" converts n value into a frequency
reference to Recommendation G.694.1 (02/12), Table 1
https://www.itu.int/rec/T-REC-G.694.1-201202-I/en
>>> nvalue_to_frequency(6)
193137500000000.0
@@ -177,17 +181,17 @@ def nvalue_to_frequency(nvalue, grid=0.00625e12):
def mvalue_to_slots(nvalue, mvalue):
"""convert center n an m into start and stop n"""
""" convert center n an m into start and stop n
"""
startn = nvalue - mvalue
stopn = nvalue + mvalue - 1
return startn, stopn
def slots_to_m(startn, stopn):
"""converts the start and stop n values to the center n and m value
reference to Recommendation G.694.1 (02/12), Figure I.3
https://www.itu.int/rec/T-REC-G.694.1-201202-I/en
""" converts the start and stop n values to the center n and m value
reference to Recommendation G.694.1 (02/12), Figure I.3
https://www.itu.int/rec/T-REC-G.694.1-201202-I/en
>>> nval, mval = slots_to_m(6, 20)
>>> nval
@@ -202,11 +206,10 @@ def slots_to_m(startn, stopn):
def m_to_freq(nvalue, mvalue, grid=0.00625e12):
"""converts m into frequency range
spectrum(13,7) is (193137500000000.0, 193225000000000.0)
reference to Recommendation G.694.1 (02/12), Figure I.3
https://www.itu.int/rec/T-REC-G.694.1-201202-I/en
""" converts m into frequency range
spectrum(13,7) is (193137500000000.0, 193225000000000.0)
reference to Recommendation G.694.1 (02/12), Figure I.3
https://www.itu.int/rec/T-REC-G.694.1-201202-I/en
>>> fstart, fstop = m_to_freq(13, 7)
>>> fstart
@@ -222,7 +225,9 @@ def m_to_freq(nvalue, mvalue, grid=0.00625e12):
def align_grids(oms_list):
"""Used to apply same grid to all oms : same starting n, stop n and slot size. Out of grid slots are set to 0."""
""" used to apply same grid to all oms : same starting n, stop n and slot size
out of grid slots are set to 0
"""
n_min = min([o.spectrum_bitmap.n_min for o in oms_list])
n_max = max([o.spectrum_bitmap.n_max for o in oms_list])
for this_o in oms_list:
@@ -233,60 +238,17 @@ def align_grids(oms_list):
return oms_list
def find_network_freq_range(network, equipment):
"""Find the lowest freq from amps and highest freq among all amps to determine the resulting bitmap
"""
amp_bands = [band for n in network.nodes() if isinstance(n, (Edfa, Multiband_amplifier)) for band in n.params.bands]
min_frequencies = [a['f_min'] for a in amp_bands]
max_frequencies = [a['f_max'] for a in amp_bands]
return min(min_frequencies), max(max_frequencies)
def create_oms_bitmap(oms, equipment, f_min, f_max, guardband, grid):
"""Find the highest low freq from oms amps and lowest high freq among oms amps to determine
the possible bitmap window.
f_min and f_max represent the useable spectrum (not the useable center frequencies)
ie n smaller than frequency_to_n(min_freq, grid) are not useable
"""
n_min = frequency_to_n(f_min, grid)
n_max = frequency_to_n(f_max, grid) - 1
common_range = find_elements_common_range(oms.el_list, equipment)
band0 = common_range[0]
band0_n_min = frequency_to_n(band0['f_min'], grid)
band0_n_max = frequency_to_n(band0['f_max'], grid)
bitmap = [0] * (band0_n_min - n_min) + [1] * (band0_n_max - band0_n_min + 1)
i = 1
while i < len(common_range):
band = common_range[i]
band_n_min = frequency_to_n(band['f_min'], grid)
band_n_max = frequency_to_n(band['f_max'], grid)
bitmap = bitmap + [0] * (band_n_min - band0_n_max - 1) + [1] * (band_n_max - band_n_min + 1)
band0_n_max = band_n_max
i += 1
bitmap = bitmap + [0] * (n_max - band0_n_max)
return bitmap
def build_oms_list(network, equipment):
"""initialization of OMS list in the network
an oms is build reading all intermediate nodes between two adjacent ROADMs
each element within the list is being added an oms and oms_id to record the
oms it belongs to.
the function supports different spectrum width and supposes that the whole network
works with the min range among OMSs
""" initialization of OMS list in the network
an oms is build reading all intermediate nodes between two adjacent ROADMs
each element within the list is being added an oms and oms_id to record the
oms it belongs to.
the function supports different spectrum width and supposes that the whole network
works with the min range among OMSs
"""
oms_id = 0
oms_list = []
# identify all vertices of OMS: of course ROADM, but aso links to external chassis transponders
oms_vertices = [n for n in network.nodes() if isinstance(n, Roadm)] +\
[n for n in network.nodes() if isinstance(n, Transceiver)
and not isinstance(next(network.successors(n)), Roadm)]
# determine the size of the bitmap common to all the omses: find min and max frequencies of all amps
# in the network. These gives the band not the center frequency. Thhen we use a reference channel
# slot width (50GHz) to set the f_min, f_max
f_min, f_max = find_network_freq_range(network, equipment)
for node in oms_vertices:
for node in [n for n in network.nodes() if isinstance(n, Roadm)]:
for edge in network.edges([node]):
if not isinstance(edge[1], Transceiver):
nd_in = edge[0] # nd_in is a Roadm
@@ -320,9 +282,8 @@ def build_oms_list(network, equipment):
nd_out.oms_list = []
nd_out.oms_list.append(oms_id)
bitmap = create_oms_bitmap(oms, equipment, f_min=f_min, f_max=f_max, guardband=GUARDBAND,
grid=0.00625e12)
oms.update_spectrum(f_min, f_max, guardband=GUARDBAND, grid=0.00625e12, existing_spectrum=bitmap)
oms.update_spectrum(equipment['SI']['default'].f_min,
equipment['SI']['default'].f_max, grid=0.00625e12)
# oms.assign_spectrum(13,7) gives back (193137500000000.0, 193225000000000.0)
# as in the example in the standard
# oms.assign_spectrum(13,7)
@@ -335,9 +296,8 @@ def build_oms_list(network, equipment):
def reversed_oms(oms_list):
"""identifies reversed OMS
only applicable for non parallel OMS
""" identifies reversed OMS
only applicable for non parallel OMS
"""
for oms in oms_list:
has_reversed = False
@@ -362,42 +322,28 @@ def bitmap_sum(band1, band2):
return res
def build_path_oms_id_list(pth):
def spectrum_selection(pth, oms_list, requested_m, requested_n=None):
"""Collects spectrum availability and call the select_candidate function"""
# use indexes instead of ITU-T n values
path_oms = []
for elem in pth:
if not isinstance(elem, Roadm) and not isinstance(elem, Transceiver):
# only edfa, fused and fibers have oms_id attribute
path_oms.append(elem.oms_id)
# remove duplicate oms_id, order is not important
return list(set(path_oms))
path_oms = list(set(path_oms))
# assuming all oms have same freq index
if not path_oms:
candidate = (None, None, None)
return candidate, path_oms
freq_index = oms_list[path_oms[0]].spectrum_bitmap.freq_index
freq_index_min = oms_list[path_oms[0]].spectrum_bitmap.freq_index_min
freq_index_max = oms_list[path_oms[0]].spectrum_bitmap.freq_index_max
def aggregate_oms_bitmap(path_oms, oms_list):
spectrum = oms_list[path_oms[0]].spectrum_bitmap
bitmap = spectrum.bitmap
# assuming all oms have same freq indices
freq_availability = oms_list[path_oms[0]].spectrum_bitmap.bitmap
for oms in path_oms[1:]:
bitmap = bitmap_sum(oms_list[oms].spectrum_bitmap.bitmap, bitmap)
params = {
'oms_id': 0,
'el_id_list': 0,
'el_list': []
}
freq_min = nvalue_to_frequency(spectrum.n_min)
freq_max = nvalue_to_frequency(spectrum.n_max)
aggregate_oms = OMS(**params)
aggregate_oms.update_spectrum(freq_min, freq_max, grid=0.00625e12, guardband=spectrum.guardband,
existing_spectrum=bitmap)
return aggregate_oms
def spectrum_selection(test_oms, requested_m, requested_n=None):
"""Collects spectrum availability and call the select_candidate function"""
freq_index = test_oms.spectrum_bitmap.freq_index
freq_index_min = test_oms.spectrum_bitmap.freq_index_min
freq_index_max = test_oms.spectrum_bitmap.freq_index_max
freq_availability = test_oms.spectrum_bitmap.bitmap
freq_availability = bitmap_sum(oms_list[oms].spectrum_bitmap.bitmap, freq_availability)
if requested_n is None:
# avoid slots reserved on the edge 0.15e-12 on both sides -> 24
candidates = [(freq_index[i] + requested_m, freq_index[i], freq_index[i] + 2 * requested_m - 1)
@@ -408,36 +354,29 @@ def spectrum_selection(test_oms, requested_m, requested_n=None):
candidate = select_candidate(candidates, policy='first_fit')
else:
i = test_oms.spectrum_bitmap.geti(requested_n)
if (freq_availability[i - requested_m:i + requested_m] == [1] * (2 * requested_m)
and freq_index[i - requested_m] >= freq_index_min
i = oms_list[path_oms[0]].spectrum_bitmap.geti(requested_n)
# print(f'N {requested_n} i {i}')
# print(freq_availability[i-m:i+m] )
# print(freq_index[i-m:i+m])
if (freq_availability[i - requested_m:i + requested_m] == [1] * (2 * requested_m) and
freq_index[i - requested_m] >= freq_index_min
and freq_index[i + requested_m - 1] <= freq_index_max):
# candidate is the triplet center_n, startn and stopn
candidate = (requested_n, requested_n - requested_m, requested_n + requested_m - 1)
else:
candidate = (None, None, None)
return candidate
def determine_slot_numbers(test_oms, requested_n, required_m, per_channel_m):
"""determines max availability around requested_n. requested_n should not be None"""
bitmap = test_oms.spectrum_bitmap
freq_index = bitmap.freq_index
freq_index_min = bitmap.freq_index_min
freq_index_max = bitmap.freq_index_max
freq_availability = bitmap.bitmap
center_i = bitmap.geti(requested_n)
i = per_channel_m
while (freq_availability[center_i - i:center_i + i] == [1] * (2 * i)
and freq_index[center_i - i] >= freq_index_min
and freq_index[center_i + i - 1] <= freq_index_max
and i <= required_m):
i += per_channel_m
return i - per_channel_m
# print("coucou11")
# print(candidate)
# print(freq_availability[321:321+2*m])
# a = [i+321 for i in range(2*m)]
# print(a)
# print(candidate)
return candidate, path_oms
def select_candidate(candidates, policy):
"""selects a candidate among all available spectrum"""
""" selects a candidate among all available spectrum
"""
if policy == 'first_fit':
if candidates:
return candidates[0]
@@ -447,112 +386,62 @@ def select_candidate(candidates, policy):
raise ServiceError('Only first_fit spectrum assignment policy is implemented.')
def compute_n_m(required_m, rq, path_oms, oms_list, per_channel_m, policy='first_fit'):
""" based on requested path_bandwidth fill in M=None values with uint values, using per_channel_m
and center frequency, with first fit strategy. The function checks the available spectrum but check
consistencies among M values of the request, but not with other requests.
For example, if request is for 32 slots corresponding to 8 x 4 slots of 32Gbauds channels,
the following frequency slots will result in the following assignment
N = 0, 8, 16, 32 -> 0, 8, 16, 32
M = 8, None, 8, None -> 8, 8, 8, 8
N = 0, 8, 16, 32 -> 0, , 16
M = None, None, 8, None -> 24, , 8
"""
selected_m = []
selected_n = []
remaining_slots_to_serve = required_m
# order slots for the computation: assign biggest m first
rq_N, rq_M, order = order_slots([{'N': n, 'M': m} for n, m in zip(rq.N, rq.M)])
# Create an oms that represents current assignments of all oms listed in path_oms, and test N and M on it.
# If M is defined, checks that proposed N, M is free
test_oms = aggregate_oms_bitmap(path_oms, oms_list)
for n, m in zip(rq_N, rq_M):
if m is not None and n is not None:
# check availabilityfor this n, m
available_slots = determine_slot_numbers(test_oms, n, m, m)
if available_slots == 0:
# if n, m are not feasible, break at this point no have non zero remaining_slots_to_serve
# in order to blocks the request (even is other N,M where feasible)
break
elif m is not None and n is None:
# find a candidate n
n, _, _ = spectrum_selection(test_oms, m, None)
if n is None:
# if no n is feasible for the m, block the request
break
elif m is None and n is not None:
# find a feasible m for this n. If None is found, then block the request
m = determine_slot_numbers(test_oms, n, remaining_slots_to_serve, per_channel_m)
if m == 0 or remaining_slots_to_serve == 0:
break
else:
# if n and m are not defined, try to find a single assignment to fits the remaining slots to serve
# (first fit strategy)
n, _, _ = spectrum_selection(test_oms, remaining_slots_to_serve, None)
if n is None or remaining_slots_to_serve == 0:
break
else:
m = remaining_slots_to_serve
selected_m.append(m)
selected_n.append(n)
test_oms.assign_spectrum(n, m)
remaining_slots_to_serve = remaining_slots_to_serve - m
# re-order selected_m and selected_n according to initial request N, M order, ignoring None values
not_selected = [None for i in range(len(rq_N) - len(selected_n))]
selected_m = restore_order(selected_m + not_selected, order)
selected_n = restore_order(selected_n + not_selected, order)
return selected_n, selected_m, remaining_slots_to_serve
def pth_assign_spectrum(pths, rqs, oms_list, rpths):
"""basic first fit assignment
if reversed path are provided, means that occupation is bidir
""" basic first fit assignment
if reversed path are provided, means that occupation is bidir
"""
for pth, rq, rpth in zip(pths, rqs, rpths):
if hasattr(rq, 'blocking_reason'):
rq.N = None
rq.M = None
else:
# computes the number of channels required for path_bandwidth and the min required nb of slots
# for one channel (corresponds to the spacing)
nb_wl, required_m = compute_spectrum_slot_vs_bandwidth(rq.path_bandwidth,
rq.spacing, rq.bit_rate)
_, per_channel_m = compute_spectrum_slot_vs_bandwidth(rq.bit_rate,
rq.spacing, rq.bit_rate)
# find oms ids that are concerned both by pth and rpth
path_oms = build_path_oms_id_list(pth + rpth)
if getattr(rq, 'M', None) is not None and all(rq.M):
# if all M are well defined: Consistency check that the requested M are enough to carry the nb_wl:
# check that the integer number of per_channel_m carried in each M value is enough to carry nb_wl.
# if not, blocks the demand
nb_channels_of_request = sum([m // per_channel_m for m in rq.M])
# TODO: elaborate a more accurate estimate with nb_wl * min_spacing + possibly guardbands in case of
for i, pth in enumerate(pths):
# computes the number of channels required
try:
if rqs[i].blocking_reason:
rqs[i].blocked = True
rqs[i].N = None
rqs[i].M = 0
except AttributeError:
nb_wl = ceil(rqs[i].path_bandwidth / rqs[i].bit_rate)
# computes the total nb of slots according to requested spacing
# TODO : express superchannels
# assumes that all channels must be grouped
# TODO : enables non contiguous reservation in case of blocking
requested_m = ceil(rqs[i].spacing / 0.0125e12) * nb_wl
if hasattr(rqs[i], 'M') and rqs[i].M is not None:
# Consistency check between the requested M and path_bandwidth
# M value should be bigger than the computed requested_m (simple estimate)
# TODO: elaborate a more accurate estimate with nb_wl * tx_osnr + possibly guardbands in case of
# superchannel closed packing.
if nb_wl > nb_channels_of_request:
rq.N = None
rq.M = None
rq.blocking_reason = 'NOT_ENOUGH_RESERVED_SPECTRUM'
# need to stop here for this request and not go though spectrum selection process
continue
# Use the req.M even if nb_wl and required_m are smaller.
# first fit strategy: assign as many lambda as possible in the None remaining N, M values
selected_n, selected_m, remaining_slots_to_serve = \
compute_n_m(required_m, rq, path_oms, oms_list, per_channel_m)
# if there are some remaining_slots_to_serve, this means that provided rq.M and rq.N values were
# not possible. Then do not go though spectrum assignment process and blocks the demand
if remaining_slots_to_serve > 0:
rq.N = None
rq.M = None
rq.blocking_reason = 'NO_SPECTRUM'
continue
for oms_elem in path_oms:
for this_n, this_m in zip(selected_n, selected_m):
if this_m is not None:
oms_list[oms_elem].assign_spectrum(this_n, this_m)
oms_list[oms_elem].add_service(rq.request_id, nb_wl)
rq.N = selected_n
rq.M = selected_m
if requested_m <= rqs[i].M:
requested_m = rqs[i].M
else:
# TODO : create a specific blocking reason and following process for this case instead of an exception
raise SpectrumError(f'requested M {rqs[i].M} number of slots for request {rqs[i].request_id} ' +
f'should be greater than {requested_m} to support request ' +
f'{rqs[i].path_bandwidth * 1e-9} Gbit/s with {rqs[i].tsp} {rqs[i].tsp_mode}')
# else: there is no M value so the programs uses the requested_m one
if hasattr(rqs[i], 'N'):
requested_n = rqs[i].N
else:
requested_n = None
(center_n, startn, stopn), path_oms = spectrum_selection(pth + rpths[i], oms_list, requested_m,
requested_n)
# checks that requested_m is fitting startm and stopm
# if not None, center_n and start, stop frequencies are applicable to all oms of pth
# checks that spectrum is not None else indicate blocking reason
if center_n is not None:
# checks that requested_m is fitting startm and stopm
if 2 * requested_m > (stopn - startn + 1):
msg = f'candidate: {(center_n, startn, stopn)} is not consistant ' +\
f'with {requested_m}'
LOGGER.critical(msg)
raise ValueError(msg)
for oms_elem in path_oms:
oms_list[oms_elem].assign_spectrum(center_n, requested_m)
oms_list[oms_elem].add_service(rqs[i].request_id, nb_wl)
rqs[i].blocked = False
rqs[i].N = center_n
rqs[i].M = requested_m
else:
rqs[i].blocked = True
rqs[i].N = None
rqs[i].M = 0
rqs[i].blocking_reason = 'NO_SPECTRUM'

2777
gnpy/yang/api-request.json Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,294 +0,0 @@
module ietf-network-topology {
yang-version 1.1;
namespace "urn:ietf:params:xml:ns:yang:ietf-network-topology";
prefix nt;
import ietf-inet-types {
prefix inet;
reference
"RFC 6991: Common YANG Data Types";
}
import ietf-network {
prefix nw;
reference
"RFC 8345: A YANG Data Model for Network Topologies";
}
organization
"IETF I2RS (Interface to the Routing System) Working Group";
contact
"WG Web: <https://datatracker.ietf.org/wg/i2rs/>
WG List: <mailto:i2rs@ietf.org>
Editor: Alexander Clemm
<mailto:ludwig@clemm.org>
Editor: Jan Medved
<mailto:jmedved@cisco.com>
Editor: Robert Varga
<mailto:robert.varga@pantheon.tech>
Editor: Nitin Bahadur
<mailto:nitin_bahadur@yahoo.com>
Editor: Hariharan Ananthakrishnan
<mailto:hari@packetdesign.com>
Editor: Xufeng Liu
<mailto:xufeng.liu.ietf@gmail.com>";
description
"This module defines a common base model for a network topology,
augmenting the base network data model with links to connect
nodes, as well as termination points to terminate links
on nodes.
Copyright (c) 2018 IETF Trust and the persons identified as
authors of the code. All rights reserved.
Redistribution and use in source and binary forms, with or
without modification, is permitted pursuant to, and subject
to the license terms contained in, the Simplified BSD License
set forth in Section 4.c of the IETF Trust's Legal Provisions
Relating to IETF Documents
(https://trustee.ietf.org/license-info).
This version of this YANG module is part of RFC 8345;
see the RFC itself for full legal notices.";
revision 2018-02-26 {
description
"Initial revision.";
reference
"RFC 8345: A YANG Data Model for Network Topologies";
}
typedef link-id {
type inet:uri;
description
"An identifier for a link in a topology. The precise
structure of the link-id will be up to the implementation.
The identifier SHOULD be chosen such that the same link in a
real network topology will always be identified through the
same identifier, even if the data model is instantiated in
separate datastores. An implementation MAY choose to capture
semantics in the identifier -- for example, to indicate the
type of link and/or the type of topology of which the link is
a part.";
}
typedef tp-id {
type inet:uri;
description
"An identifier for termination points on a node. The precise
structure of the tp-id will be up to the implementation.
The identifier SHOULD be chosen such that the same termination
point in a real network topology will always be identified
through the same identifier, even if the data model is
instantiated in separate datastores. An implementation MAY
choose to capture semantics in the identifier -- for example,
to indicate the type of termination point and/or the type of
node that contains the termination point.";
}
grouping link-ref {
description
"This grouping can be used to reference a link in a specific
network. Although it is not used in this module, it is
defined here for the convenience of augmenting modules.";
leaf link-ref {
type leafref {
path "/nw:networks/nw:network[nw:network-id=current()/../"+
"network-ref]/nt:link/nt:link-id";
require-instance false;
}
description
"A type for an absolute reference to a link instance.
(This type should not be used for relative references.
In such a case, a relative path should be used instead.)";
}
uses nw:network-ref;
}
grouping tp-ref {
description
"This grouping can be used to reference a termination point
in a specific node. Although it is not used in this module,
it is defined here for the convenience of augmenting
modules.";
leaf tp-ref {
type leafref {
path "/nw:networks/nw:network[nw:network-id=current()/../"+
"network-ref]/nw:node[nw:node-id=current()/../"+
"node-ref]/nt:termination-point/nt:tp-id";
require-instance false;
}
description
"A type for an absolute reference to a termination point.
(This type should not be used for relative references.
In such a case, a relative path should be used instead.)";
}
uses nw:node-ref;
}
augment "/nw:networks/nw:network" {
description
"Add links to the network data model.";
list link {
key "link-id";
description
"A network link connects a local (source) node and
a remote (destination) node via a set of the respective
node's termination points. It is possible to have several
links between the same source and destination nodes.
Likewise, a link could potentially be re-homed between
termination points. Therefore, in order to ensure that we
would always know to distinguish between links, every link
is identified by a dedicated link identifier. Note that a
link models a point-to-point link, not a multipoint link.";
leaf link-id {
type link-id;
description
"The identifier of a link in the topology.
A link is specific to a topology to which it belongs.";
}
container source {
description
"This container holds the logical source of a particular
link.";
leaf source-node {
type leafref {
path "../../../nw:node/nw:node-id";
require-instance false;
}
description
"Source node identifier. Must be in the same topology.";
}
leaf source-tp {
type leafref {
path "../../../nw:node[nw:node-id=current()/../"+
"source-node]/termination-point/tp-id";
require-instance false;
}
description
"This termination point is located within the source node
and terminates the link.";
}
}
container destination {
description
"This container holds the logical destination of a
particular link.";
leaf dest-node {
type leafref {
path "../../../nw:node/nw:node-id";
require-instance false;
}
description
"Destination node identifier. Must be in the same
network.";
}
leaf dest-tp {
type leafref {
path "../../../nw:node[nw:node-id=current()/../"+
"dest-node]/termination-point/tp-id";
require-instance false;
}
description
"This termination point is located within the
destination node and terminates the link.";
}
}
list supporting-link {
key "network-ref link-ref";
description
"Identifies the link or links on which this link depends.";
leaf network-ref {
type leafref {
path "../../../nw:supporting-network/nw:network-ref";
require-instance false;
}
description
"This leaf identifies in which underlay topology
the supporting link is present.";
}
leaf link-ref {
type leafref {
path "/nw:networks/nw:network[nw:network-id=current()/"+
"../network-ref]/link/link-id";
require-instance false;
}
description
"This leaf identifies a link that is a part
of this link's underlay. Reference loops in which
a link identifies itself as its underlay, either
directly or transitively, are not allowed.";
}
}
}
}
augment "/nw:networks/nw:network/nw:node" {
description
"Augments termination points that terminate links.
Termination points can ultimately be mapped to interfaces.";
list termination-point {
key "tp-id";
description
"A termination point can terminate a link.
Depending on the type of topology, a termination point
could, for example, refer to a port or an interface.";
leaf tp-id {
type tp-id;
description
"Termination point identifier.";
}
list supporting-termination-point {
key "network-ref node-ref tp-ref";
description
"This list identifies any termination points on which a
given termination point depends or onto which it maps.
Those termination points will themselves be contained
in a supporting node. This dependency information can be
inferred from the dependencies between links. Therefore,
this item is not separately configurable. Hence, no
corresponding constraint needs to be articulated.
The corresponding information is simply provided by the
implementing system.";
leaf network-ref {
type leafref {
path "../../../nw:supporting-node/nw:network-ref";
require-instance false;
}
description
"This leaf identifies in which topology the
supporting termination point is present.";
}
leaf node-ref {
type leafref {
path "../../../nw:supporting-node/nw:node-ref";
require-instance false;
}
description
"This leaf identifies in which node the supporting
termination point is present.";
}
leaf tp-ref {
type leafref {
path "/nw:networks/nw:network[nw:network-id=current()/"+
"../network-ref]/nw:node[nw:node-id=current()/../"+
"node-ref]/termination-point/tp-id";
require-instance false;
}
description
"Reference to the underlay node (the underlay node must
be in a different topology).";
}
}
}
}
}

View File

@@ -1,192 +0,0 @@
module ietf-network {
yang-version 1.1;
namespace "urn:ietf:params:xml:ns:yang:ietf-network";
prefix nw;
import ietf-inet-types {
prefix inet;
reference
"RFC 6991: Common YANG Data Types";
}
organization
"IETF I2RS (Interface to the Routing System) Working Group";
contact
"WG Web: <https://datatracker.ietf.org/wg/i2rs/>
WG List: <mailto:i2rs@ietf.org>
Editor: Alexander Clemm
<mailto:ludwig@clemm.org>
Editor: Jan Medved
<mailto:jmedved@cisco.com>
Editor: Robert Varga
<mailto:robert.varga@pantheon.tech>
Editor: Nitin Bahadur
<mailto:nitin_bahadur@yahoo.com>
Editor: Hariharan Ananthakrishnan
<mailto:hari@packetdesign.com>
Editor: Xufeng Liu
<mailto:xufeng.liu.ietf@gmail.com>";
description
"This module defines a common base data model for a collection
of nodes in a network. Node definitions are further used
in network topologies and inventories.
Copyright (c) 2018 IETF Trust and the persons identified as
authors of the code. All rights reserved.
Redistribution and use in source and binary forms, with or
without modification, is permitted pursuant to, and subject
to the license terms contained in, the Simplified BSD License
set forth in Section 4.c of the IETF Trust's Legal Provisions
Relating to IETF Documents
(https://trustee.ietf.org/license-info).
This version of this YANG module is part of RFC 8345;
see the RFC itself for full legal notices.";
revision 2018-02-26 {
description
"Initial revision.";
reference
"RFC 8345: A YANG Data Model for Network Topologies";
}
typedef node-id {
type inet:uri;
description
"Identifier for a node. The precise structure of the node-id
will be up to the implementation. For example, some
implementations MAY pick a URI that includes the network-id
as part of the path. The identifier SHOULD be chosen
such that the same node in a real network topology will
always be identified through the same identifier, even if
the data model is instantiated in separate datastores. An
implementation MAY choose to capture semantics in the
identifier -- for example, to indicate the type of node.";
}
typedef network-id {
type inet:uri;
description
"Identifier for a network. The precise structure of the
network-id will be up to the implementation. The identifier
SHOULD be chosen such that the same network will always be
identified through the same identifier, even if the data model
is instantiated in separate datastores. An implementation MAY
choose to capture semantics in the identifier -- for example,
to indicate the type of network.";
}
grouping network-ref {
description
"Contains the information necessary to reference a network --
for example, an underlay network.";
leaf network-ref {
type leafref {
path "/nw:networks/nw:network/nw:network-id";
require-instance false;
}
description
"Used to reference a network -- for example, an underlay
network.";
}
}
grouping node-ref {
description
"Contains the information necessary to reference a node.";
leaf node-ref {
type leafref {
path "/nw:networks/nw:network[nw:network-id=current()/../"+
"network-ref]/nw:node/nw:node-id";
require-instance false;
}
description
"Used to reference a node.
Nodes are identified relative to the network that
contains them.";
}
uses network-ref;
}
container networks {
description
"Serves as a top-level container for a list of networks.";
list network {
key "network-id";
description
"Describes a network.
A network typically contains an inventory of nodes,
topological information (augmented through the
network-topology data model), and layering information.";
leaf network-id {
type network-id;
description
"Identifies a network.";
}
container network-types {
description
"Serves as an augmentation target.
The network type is indicated through corresponding
presence containers augmented into this container.";
}
list supporting-network {
key "network-ref";
description
"An underlay network, used to represent layered network
topologies.";
leaf network-ref {
type leafref {
path "/nw:networks/nw:network/nw:network-id";
require-instance false;
}
description
"References the underlay network.";
}
}
list node {
key "node-id";
description
"The inventory of nodes of this network.";
leaf node-id {
type node-id;
description
"Uniquely identifies a node within the containing
network.";
}
list supporting-node {
key "network-ref node-ref";
description
"Represents another node that is in an underlay network
and that supports this node. Used to represent layering
structure.";
leaf network-ref {
type leafref {
path "../../../nw:supporting-network/nw:network-ref";
require-instance false;
}
description
"References the underlay network of which the
underlay node is a part.";
}
leaf node-ref {
type leafref {
path "/nw:networks/nw:network/nw:node/nw:node-id";
require-instance false;
}
description
"References the underlay node itself.";
}
}
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,771 +0,0 @@
module ietf-routing-types {
namespace "urn:ietf:params:xml:ns:yang:ietf-routing-types";
prefix rt-types;
import ietf-yang-types {
prefix yang;
}
import ietf-inet-types {
prefix inet;
}
organization
"IETF RTGWG - Routing Area Working Group";
contact
"WG Web: <https://datatracker.ietf.org/wg/rtgwg/>
WG List: <mailto:rtgwg@ietf.org>
Editors: Xufeng Liu
<mailto:Xufeng_Liu@jabail.com>
Yingzhen Qu
<mailto:yingzhen.qu@huawei.com>
Acee Lindem
<mailto:acee@cisco.com>
Christian Hopps
<mailto:chopps@chopps.org>
Lou Berger
<mailto:lberger@labn.com>";
description
"This module contains a collection of YANG data types
considered generally useful for routing protocols.
Copyright (c) 2017 IETF Trust and the persons
identified as authors of the code. All rights reserved.
Redistribution and use in source and binary forms, with or
without modification, is permitted pursuant to, and subject
to the license terms contained in, the Simplified BSD License
set forth in Section 4.c of the IETF Trust's Legal Provisions
Relating to IETF Documents
(https://trustee.ietf.org/license-info).
This version of this YANG module is part of RFC 8294; see
the RFC itself for full legal notices.";
revision 2017-12-04 {
description "Initial revision.";
reference
"RFC 8294: Common YANG Data Types for the Routing Area.
Section 3.";
}
/*** Identities related to MPLS/GMPLS ***/
identity mpls-label-special-purpose-value {
description
"Base identity for deriving identities describing
special-purpose Multiprotocol Label Switching (MPLS) label
values.";
reference
"RFC 7274: Allocating and Retiring Special-Purpose MPLS
Labels.";
}
identity ipv4-explicit-null-label {
base mpls-label-special-purpose-value;
description
"This identity represents the IPv4 Explicit NULL Label.";
reference
"RFC 3032: MPLS Label Stack Encoding. Section 2.1.";
}
identity router-alert-label {
base mpls-label-special-purpose-value;
description
"This identity represents the Router Alert Label.";
reference
"RFC 3032: MPLS Label Stack Encoding. Section 2.1.";
}
identity ipv6-explicit-null-label {
base mpls-label-special-purpose-value;
description
"This identity represents the IPv6 Explicit NULL Label.";
reference
"RFC 3032: MPLS Label Stack Encoding. Section 2.1.";
}
identity implicit-null-label {
base mpls-label-special-purpose-value;
description
"This identity represents the Implicit NULL Label.";
reference
"RFC 3032: MPLS Label Stack Encoding. Section 2.1.";
}
identity entropy-label-indicator {
base mpls-label-special-purpose-value;
description
"This identity represents the Entropy Label Indicator.";
reference
"RFC 6790: The Use of Entropy Labels in MPLS Forwarding.
Sections 3 and 10.1.";
}
identity gal-label {
base mpls-label-special-purpose-value;
description
"This identity represents the Generic Associated Channel
(G-ACh) Label (GAL).";
reference
"RFC 5586: MPLS Generic Associated Channel.
Sections 4 and 10.";
}
identity oam-alert-label {
base mpls-label-special-purpose-value;
description
"This identity represents the OAM Alert Label.";
reference
"RFC 3429: Assignment of the 'OAM Alert Label' for
Multiprotocol Label Switching Architecture (MPLS)
Operation and Maintenance (OAM) Functions.
Sections 3 and 6.";
}
identity extension-label {
base mpls-label-special-purpose-value;
description
"This identity represents the Extension Label.";
reference
"RFC 7274: Allocating and Retiring Special-Purpose MPLS
Labels. Sections 3.1 and 5.";
}
/*** Collection of types related to routing ***/
typedef router-id {
type yang:dotted-quad;
description
"A 32-bit number in the dotted-quad format assigned to each
router. This number uniquely identifies the router within
an Autonomous System.";
}
/*** Collection of types related to VPNs ***/
typedef route-target {
type string {
pattern
'(0:(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|'
+ '6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0):(429496729[0-5]|'
+ '42949672[0-8][0-9]|'
+ '4294967[01][0-9]{2}|429496[0-6][0-9]{3}|'
+ '42949[0-5][0-9]{4}|'
+ '4294[0-8][0-9]{5}|429[0-3][0-9]{6}|'
+ '42[0-8][0-9]{7}|4[01][0-9]{8}|'
+ '[1-3][0-9]{9}|[1-9][0-9]{0,8}|0))|'
+ '(1:((([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|'
+ '25[0-5])\.){3}([0-9]|[1-9][0-9]|'
+ '1[0-9]{2}|2[0-4][0-9]|25[0-5])):(6553[0-5]|'
+ '655[0-2][0-9]|'
+ '65[0-4][0-9]{2}|6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0))|'
+ '(2:(429496729[0-5]|42949672[0-8][0-9]|'
+ '4294967[01][0-9]{2}|'
+ '429496[0-6][0-9]{3}|42949[0-5][0-9]{4}|'
+ '4294[0-8][0-9]{5}|'
+ '429[0-3][0-9]{6}|42[0-8][0-9]{7}|4[01][0-9]{8}|'
+ '[1-3][0-9]{9}|[1-9][0-9]{0,8}|0):'
+ '(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|'
+ '6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0))|'
+ '(6(:[a-fA-F0-9]{2}){6})|'
+ '(([3-57-9a-fA-F]|[1-9a-fA-F][0-9a-fA-F]{1,3}):'
+ '[0-9a-fA-F]{1,12})';
}
description
"A Route Target is an 8-octet BGP extended community
initially identifying a set of sites in a BGP VPN
(RFC 4364). However, it has since taken on a more general
role in BGP route filtering. A Route Target consists of two
or three fields: a 2-octet Type field, an administrator
field, and, optionally, an assigned number field.
According to the data formats for types 0, 1, 2, and 6 as
defined in RFC 4360, RFC 5668, and RFC 7432, the encoding
pattern is defined as:
0:2-octet-asn:4-octet-number
1:4-octet-ipv4addr:2-octet-number
2:4-octet-asn:2-octet-number
6:6-octet-mac-address
Additionally, a generic pattern is defined for future
Route Target types:
2-octet-other-hex-number:6-octet-hex-number
Some valid examples are 0:100:100, 1:1.1.1.1:100,
2:1234567890:203, and 6:26:00:08:92:78:00.";
reference
"RFC 4360: BGP Extended Communities Attribute.
RFC 4364: BGP/MPLS IP Virtual Private Networks (VPNs).
RFC 5668: 4-Octet AS Specific BGP Extended Community.
RFC 7432: BGP MPLS-Based Ethernet VPN.";
}
typedef ipv6-route-target {
type string {
pattern
'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}'
+ '((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|'
+ '(((25[0-5]|2[0-4][0-9]|1[0-9]{2}|[1-9]?[0-9])\.){3}'
+ '(25[0-5]|2[0-4][0-9]|1[0-9]{2}|[1-9]?[0-9])))'
+ ':'
+ '(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|'
+ '6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0)';
pattern '((([^:]+:){6}(([^:]+:[^:]+)|(.*\..*)))|'
+ '((([^:]+:)*[^:]+)?::(([^:]+:)*[^:]+)?))'
+ ':'
+ '(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|'
+ '6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0)';
}
description
"An IPv6 Route Target is a 20-octet BGP IPv6 Address
Specific Extended Community serving the same function
as a standard 8-octet Route Target, except that it only
allows an IPv6 address as the global administrator.
The format is <ipv6-address:2-octet-number>.
Two valid examples are 2001:db8::1:6544 and
2001:db8::5eb1:791:6b37:17958.";
reference
"RFC 5701: IPv6 Address Specific BGP Extended Community
Attribute.";
}
typedef route-target-type {
type enumeration {
enum import {
value 0;
description
"The Route Target applies to route import.";
}
enum export {
value 1;
description
"The Route Target applies to route export.";
}
enum both {
value 2;
description
"The Route Target applies to both route import and
route export.";
}
}
description
"Indicates the role a Route Target takes in route filtering.";
reference
"RFC 4364: BGP/MPLS IP Virtual Private Networks (VPNs).";
}
typedef route-distinguisher {
type string {
pattern
'(0:(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|'
+ '6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0):(429496729[0-5]|'
+ '42949672[0-8][0-9]|'
+ '4294967[01][0-9]{2}|429496[0-6][0-9]{3}|'
+ '42949[0-5][0-9]{4}|'
+ '4294[0-8][0-9]{5}|429[0-3][0-9]{6}|'
+ '42[0-8][0-9]{7}|4[01][0-9]{8}|'
+ '[1-3][0-9]{9}|[1-9][0-9]{0,8}|0))|'
+ '(1:((([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|'
+ '25[0-5])\.){3}([0-9]|[1-9][0-9]|'
+ '1[0-9]{2}|2[0-4][0-9]|25[0-5])):(6553[0-5]|'
+ '655[0-2][0-9]|'
+ '65[0-4][0-9]{2}|6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0))|'
+ '(2:(429496729[0-5]|42949672[0-8][0-9]|'
+ '4294967[01][0-9]{2}|'
+ '429496[0-6][0-9]{3}|42949[0-5][0-9]{4}|'
+ '4294[0-8][0-9]{5}|'
+ '429[0-3][0-9]{6}|42[0-8][0-9]{7}|4[01][0-9]{8}|'
+ '[1-3][0-9]{9}|[1-9][0-9]{0,8}|0):'
+ '(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|'
+ '6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0))|'
+ '(6(:[a-fA-F0-9]{2}){6})|'
+ '(([3-57-9a-fA-F]|[1-9a-fA-F][0-9a-fA-F]{1,3}):'
+ '[0-9a-fA-F]{1,12})';
}
description
"A Route Distinguisher is an 8-octet value used to
distinguish routes from different BGP VPNs (RFC 4364).
A Route Distinguisher will have the same format as a
Route Target as per RFC 4360 and will consist of
two or three fields: a 2-octet Type field, an administrator
field, and, optionally, an assigned number field.
According to the data formats for types 0, 1, 2, and 6 as
defined in RFC 4360, RFC 5668, and RFC 7432, the encoding
pattern is defined as:
0:2-octet-asn:4-octet-number
1:4-octet-ipv4addr:2-octet-number
2:4-octet-asn:2-octet-number
6:6-octet-mac-address
Additionally, a generic pattern is defined for future
route discriminator types:
2-octet-other-hex-number:6-octet-hex-number
Some valid examples are 0:100:100, 1:1.1.1.1:100,
2:1234567890:203, and 6:26:00:08:92:78:00.";
reference
"RFC 4360: BGP Extended Communities Attribute.
RFC 4364: BGP/MPLS IP Virtual Private Networks (VPNs).
RFC 5668: 4-Octet AS Specific BGP Extended Community.
RFC 7432: BGP MPLS-Based Ethernet VPN.";
}
typedef route-origin {
type string {
pattern
'(0:(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|'
+ '6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0):(429496729[0-5]|'
+ '42949672[0-8][0-9]|'
+ '4294967[01][0-9]{2}|429496[0-6][0-9]{3}|'
+ '42949[0-5][0-9]{4}|'
+ '4294[0-8][0-9]{5}|429[0-3][0-9]{6}|'
+ '42[0-8][0-9]{7}|4[01][0-9]{8}|'
+ '[1-3][0-9]{9}|[1-9][0-9]{0,8}|0))|'
+ '(1:((([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|'
+ '25[0-5])\.){3}([0-9]|[1-9][0-9]|'
+ '1[0-9]{2}|2[0-4][0-9]|25[0-5])):(6553[0-5]|'
+ '655[0-2][0-9]|'
+ '65[0-4][0-9]{2}|6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0))|'
+ '(2:(429496729[0-5]|42949672[0-8][0-9]|'
+ '4294967[01][0-9]{2}|'
+ '429496[0-6][0-9]{3}|42949[0-5][0-9]{4}|'
+ '4294[0-8][0-9]{5}|'
+ '429[0-3][0-9]{6}|42[0-8][0-9]{7}|4[01][0-9]{8}|'
+ '[1-3][0-9]{9}|[1-9][0-9]{0,8}|0):'
+ '(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|'
+ '6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0))|'
+ '(6(:[a-fA-F0-9]{2}){6})|'
+ '(([3-57-9a-fA-F]|[1-9a-fA-F][0-9a-fA-F]{1,3}):'
+ '[0-9a-fA-F]{1,12})';
}
description
"A Route Origin is an 8-octet BGP extended community
identifying the set of sites where the BGP route
originated (RFC 4364). A Route Origin will have the same
format as a Route Target as per RFC 4360 and will consist
of two or three fields: a 2-octet Type field, an
administrator field, and, optionally, an assigned number
field.
According to the data formats for types 0, 1, 2, and 6 as
defined in RFC 4360, RFC 5668, and RFC 7432, the encoding
pattern is defined as:
0:2-octet-asn:4-octet-number
1:4-octet-ipv4addr:2-octet-number
2:4-octet-asn:2-octet-number
6:6-octet-mac-address
Additionally, a generic pattern is defined for future
Route Origin types:
2-octet-other-hex-number:6-octet-hex-number
Some valid examples are 0:100:100, 1:1.1.1.1:100,
2:1234567890:203, and 6:26:00:08:92:78:00.";
reference
"RFC 4360: BGP Extended Communities Attribute.
RFC 4364: BGP/MPLS IP Virtual Private Networks (VPNs).
RFC 5668: 4-Octet AS Specific BGP Extended Community.
RFC 7432: BGP MPLS-Based Ethernet VPN.";
}
typedef ipv6-route-origin {
type string {
pattern
'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}'
+ '((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|'
+ '(((25[0-5]|2[0-4][0-9]|1[0-9]{2}|[1-9]?[0-9])\.){3}'
+ '(25[0-5]|2[0-4][0-9]|1[0-9]{2}|[1-9]?[0-9])))'
+ ':'
+ '(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|'
+ '6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0)';
pattern '((([^:]+:){6}(([^:]+:[^:]+)|(.*\..*)))|'
+ '((([^:]+:)*[^:]+)?::(([^:]+:)*[^:]+)?))'
+ ':'
+ '(6553[0-5]|655[0-2][0-9]|65[0-4][0-9]{2}|'
+ '6[0-4][0-9]{3}|'
+ '[1-5][0-9]{4}|[1-9][0-9]{0,3}|0)';
}
description
"An IPv6 Route Origin is a 20-octet BGP IPv6 Address
Specific Extended Community serving the same function
as a standard 8-octet route, except that it only allows
an IPv6 address as the global administrator. The format
is <ipv6-address:2-octet-number>.
Two valid examples are 2001:db8::1:6544 and
2001:db8::5eb1:791:6b37:17958.";
reference
"RFC 5701: IPv6 Address Specific BGP Extended Community
Attribute.";
}
/*** Collection of types common to multicast ***/
typedef ipv4-multicast-group-address {
type inet:ipv4-address {
pattern '(2((2[4-9])|(3[0-9]))\.).*';
}
description
"This type represents an IPv4 multicast group address,
which is in the range of 224.0.0.0 to 239.255.255.255.";
reference
"RFC 1112: Host Extensions for IP Multicasting.";
}
typedef ipv6-multicast-group-address {
type inet:ipv6-address {
pattern '(([fF]{2}[0-9a-fA-F]{2}):).*';
}
description
"This type represents an IPv6 multicast group address,
which is in the range of ff00::/8.";
reference
"RFC 4291: IP Version 6 Addressing Architecture. Section 2.7.
RFC 7346: IPv6 Multicast Address Scopes.";
}
typedef ip-multicast-group-address {
type union {
type ipv4-multicast-group-address;
type ipv6-multicast-group-address;
}
description
"This type represents a version-neutral IP multicast group
address. The format of the textual representation implies
the IP version.";
}
typedef ipv4-multicast-source-address {
type union {
type enumeration {
enum * {
description
"Any source address.";
}
}
type inet:ipv4-address;
}
description
"Multicast source IPv4 address type.";
}
typedef ipv6-multicast-source-address {
type union {
type enumeration {
enum * {
description
"Any source address.";
}
}
type inet:ipv6-address;
}
description
"Multicast source IPv6 address type.";
}
/*** Collection of types common to protocols ***/
typedef bandwidth-ieee-float32 {
type string {
pattern
'0[xX](0((\.0?)?[pP](\+)?0?|(\.0?))|'
+ '1(\.([0-9a-fA-F]{0,5}[02468aAcCeE]?)?)?[pP](\+)?(12[0-7]|'
+ '1[01][0-9]|0?[0-9]?[0-9])?)';
}
description
"Bandwidth in IEEE 754 floating-point 32-bit binary format:
(-1)**(S) * 2**(Exponent-127) * (1 + Fraction),
where Exponent uses 8 bits and Fraction uses 23 bits.
The units are octets per second.
The encoding format is the external hexadecimal-significant
character sequences specified in IEEE 754 and ISO/IEC C99.
The format is restricted to be normalized, non-negative, and
non-fraction: 0x1.hhhhhhp{+}d, 0X1.HHHHHHP{+}D, or 0x0p0,
where 'h' and 'H' are hexadecimal digits and 'd' and 'D' are
integers in the range of [0..127].
When six hexadecimal digits are used for 'hhhhhh' or
'HHHHHH', the least significant digit must be an even
number. 'x' and 'X' indicate hexadecimal; 'p' and 'P'
indicate a power of two. Some examples are 0x0p0, 0x1p10,
and 0x1.abcde2p+20.";
reference
"IEEE Std 754-2008: IEEE Standard for Floating-Point
Arithmetic.
ISO/IEC C99: Information technology - Programming
Languages - C.";
}
typedef link-access-type {
type enumeration {
enum broadcast {
description
"Specify broadcast multi-access network.";
}
enum non-broadcast-multiaccess {
description
"Specify Non-Broadcast Multi-Access (NBMA) network.";
}
enum point-to-multipoint {
description
"Specify point-to-multipoint network.";
}
enum point-to-point {
description
"Specify point-to-point network.";
}
}
description
"Link access type.";
}
typedef timer-multiplier {
type uint8;
description
"The number of timer value intervals that should be
interpreted as a failure.";
}
typedef timer-value-seconds16 {
type union {
type uint16 {
range "1..65535";
}
type enumeration {
enum infinity {
description
"The timer is set to infinity.";
}
enum not-set {
description
"The timer is not set.";
}
}
}
units "seconds";
description
"Timer value type, in seconds (16-bit range).";
}
typedef timer-value-seconds32 {
type union {
type uint32 {
range "1..4294967295";
}
type enumeration {
enum infinity {
description
"The timer is set to infinity.";
}
enum not-set {
description
"The timer is not set.";
}
}
}
units "seconds";
description
"Timer value type, in seconds (32-bit range).";
}
typedef timer-value-milliseconds {
type union {
type uint32 {
range "1..4294967295";
}
type enumeration {
enum infinity {
description
"The timer is set to infinity.";
}
enum not-set {
description
"The timer is not set.";
}
}
}
units "milliseconds";
description
"Timer value type, in milliseconds.";
}
typedef percentage {
type uint8 {
range "0..100";
}
description
"Integer indicating a percentage value.";
}
typedef timeticks64 {
type uint64;
description
"This type is based on the timeticks type defined in
RFC 6991, but with 64-bit width. It represents the time,
modulo 2^64, in hundredths of a second between two epochs.";
reference
"RFC 6991: Common YANG Data Types.";
}
typedef uint24 {
type uint32 {
range "0..16777215";
}
description
"24-bit unsigned integer.";
}
/*** Collection of types related to MPLS/GMPLS ***/
typedef generalized-label {
type binary;
description
"Generalized Label. Nodes sending and receiving the
Generalized Label are aware of the link-specific
label context and type.";
reference
"RFC 3471: Generalized Multi-Protocol Label Switching (GMPLS)
Signaling Functional Description. Section 3.2.";
}
typedef mpls-label-special-purpose {
type identityref {
base mpls-label-special-purpose-value;
}
description
"This type represents the special-purpose MPLS label values.";
reference
"RFC 3032: MPLS Label Stack Encoding.
RFC 7274: Allocating and Retiring Special-Purpose MPLS
Labels.";
}
typedef mpls-label-general-use {
type uint32 {
range "16..1048575";
}
description
"The 20-bit label value in an MPLS label stack as specified
in RFC 3032. This label value does not include the
encodings of Traffic Class and TTL (Time to Live).
The label range specified by this type is for general use,
with special-purpose MPLS label values excluded.";
reference
"RFC 3032: MPLS Label Stack Encoding.";
}
typedef mpls-label {
type union {
type mpls-label-special-purpose;
type mpls-label-general-use;
}
description
"The 20-bit label value in an MPLS label stack as specified
in RFC 3032. This label value does not include the
encodings of Traffic Class and TTL.";
reference
"RFC 3032: MPLS Label Stack Encoding.";
}
/*** Groupings **/
grouping mpls-label-stack {
description
"This grouping specifies an MPLS label stack. The label
stack is encoded as a list of label stack entries. The
list key is an identifier that indicates the relative
ordering of each entry, with the lowest-value identifier
corresponding to the top of the label stack.";
container mpls-label-stack {
description
"Container for a list of MPLS label stack entries.";
list entry {
key "id";
description
"List of MPLS label stack entries.";
leaf id {
type uint8;
description
"Identifies the entry in a sequence of MPLS label
stack entries. An entry with a smaller identifier
value precedes an entry with a larger identifier
value in the label stack. The value of this ID has
no semantic meaning other than relative ordering
and referencing the entry.";
}
leaf label {
type rt-types:mpls-label;
description
"Label value.";
}
leaf ttl {
type uint8;
description
"Time to Live (TTL).";
reference
"RFC 3032: MPLS Label Stack Encoding.";
}
leaf traffic-class {
type uint8 {
range "0..7";
}
description
"Traffic Class (TC).";
reference
"RFC 5462: Multiprotocol Label Switching (MPLS) Label
Stack Entry: 'EXP' Field Renamed to 'Traffic Class'
Field.";
}
}
}
}
grouping vpn-route-targets {
description
"A grouping that specifies Route Target import-export rules
used in BGP-enabled VPNs.";
reference
"RFC 4364: BGP/MPLS IP Virtual Private Networks (VPNs).
RFC 4664: Framework for Layer 2 Virtual Private Networks
(L2VPNs).";
list vpn-target {
key "route-target";
description
"List of Route Targets.";
leaf route-target {
type rt-types:route-target;
description
"Route Target value.";
}
leaf route-target-type {
type rt-types:route-target-type;
mandatory true;
description
"Import/export type of the Route Target.";
}
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,474 +0,0 @@
module ietf-yang-types {
namespace "urn:ietf:params:xml:ns:yang:ietf-yang-types";
prefix "yang";
organization
"IETF NETMOD (NETCONF Data Modeling Language) Working Group";
contact
"WG Web: <http://tools.ietf.org/wg/netmod/>
WG List: <mailto:netmod@ietf.org>
WG Chair: David Kessens
<mailto:david.kessens@nsn.com>
WG Chair: Juergen Schoenwaelder
<mailto:j.schoenwaelder@jacobs-university.de>
Editor: Juergen Schoenwaelder
<mailto:j.schoenwaelder@jacobs-university.de>";
description
"This module contains a collection of generally useful derived
YANG data types.
Copyright (c) 2013 IETF Trust and the persons identified as
authors of the code. All rights reserved.
Redistribution and use in source and binary forms, with or
without modification, is permitted pursuant to, and subject
to the license terms contained in, the Simplified BSD License
set forth in Section 4.c of the IETF Trust's Legal Provisions
Relating to IETF Documents
(http://trustee.ietf.org/license-info).
This version of this YANG module is part of RFC 6991; see
the RFC itself for full legal notices.";
revision 2013-07-15 {
description
"This revision adds the following new data types:
- yang-identifier
- hex-string
- uuid
- dotted-quad";
reference
"RFC 6991: Common YANG Data Types";
}
revision 2010-09-24 {
description
"Initial revision.";
reference
"RFC 6021: Common YANG Data Types";
}
/*** collection of counter and gauge types ***/
typedef counter32 {
type uint32;
description
"The counter32 type represents a non-negative integer
that monotonically increases until it reaches a
maximum value of 2^32-1 (4294967295 decimal), when it
wraps around and starts increasing again from zero.
Counters have no defined 'initial' value, and thus, a
single value of a counter has (in general) no information
content. Discontinuities in the monotonically increasing
value normally occur at re-initialization of the
management system, and at other times as specified in the
description of a schema node using this type. If such
other times can occur, for example, the creation of
a schema node of type counter32 at times other than
re-initialization, then a corresponding schema node
should be defined, with an appropriate type, to indicate
the last discontinuity.
The counter32 type should not be used for configuration
schema nodes. A default statement SHOULD NOT be used in
combination with the type counter32.
In the value set and its semantics, this type is equivalent
to the Counter32 type of the SMIv2.";
reference
"RFC 2578: Structure of Management Information Version 2
(SMIv2)";
}
typedef zero-based-counter32 {
type yang:counter32;
default "0";
description
"The zero-based-counter32 type represents a counter32
that has the defined 'initial' value zero.
A schema node of this type will be set to zero (0) on creation
and will thereafter increase monotonically until it reaches
a maximum value of 2^32-1 (4294967295 decimal), when it
wraps around and starts increasing again from zero.
Provided that an application discovers a new schema node
of this type within the minimum time to wrap, it can use the
'initial' value as a delta. It is important for a management
station to be aware of this minimum time and the actual time
between polls, and to discard data if the actual time is too
long or there is no defined minimum time.
In the value set and its semantics, this type is equivalent
to the ZeroBasedCounter32 textual convention of the SMIv2.";
reference
"RFC 4502: Remote Network Monitoring Management Information
Base Version 2";
}
typedef counter64 {
type uint64;
description
"The counter64 type represents a non-negative integer
that monotonically increases until it reaches a
maximum value of 2^64-1 (18446744073709551615 decimal),
when it wraps around and starts increasing again from zero.
Counters have no defined 'initial' value, and thus, a
single value of a counter has (in general) no information
content. Discontinuities in the monotonically increasing
value normally occur at re-initialization of the
management system, and at other times as specified in the
description of a schema node using this type. If such
other times can occur, for example, the creation of
a schema node of type counter64 at times other than
re-initialization, then a corresponding schema node
should be defined, with an appropriate type, to indicate
the last discontinuity.
The counter64 type should not be used for configuration
schema nodes. A default statement SHOULD NOT be used in
combination with the type counter64.
In the value set and its semantics, this type is equivalent
to the Counter64 type of the SMIv2.";
reference
"RFC 2578: Structure of Management Information Version 2
(SMIv2)";
}
typedef zero-based-counter64 {
type yang:counter64;
default "0";
description
"The zero-based-counter64 type represents a counter64 that
has the defined 'initial' value zero.
A schema node of this type will be set to zero (0) on creation
and will thereafter increase monotonically until it reaches
a maximum value of 2^64-1 (18446744073709551615 decimal),
when it wraps around and starts increasing again from zero.
Provided that an application discovers a new schema node
of this type within the minimum time to wrap, it can use the
'initial' value as a delta. It is important for a management
station to be aware of this minimum time and the actual time
between polls, and to discard data if the actual time is too
long or there is no defined minimum time.
In the value set and its semantics, this type is equivalent
to the ZeroBasedCounter64 textual convention of the SMIv2.";
reference
"RFC 2856: Textual Conventions for Additional High Capacity
Data Types";
}
typedef gauge32 {
type uint32;
description
"The gauge32 type represents a non-negative integer, which
may increase or decrease, but shall never exceed a maximum
value, nor fall below a minimum value. The maximum value
cannot be greater than 2^32-1 (4294967295 decimal), and
the minimum value cannot be smaller than 0. The value of
a gauge32 has its maximum value whenever the information
being modeled is greater than or equal to its maximum
value, and has its minimum value whenever the information
being modeled is smaller than or equal to its minimum value.
If the information being modeled subsequently decreases
below (increases above) the maximum (minimum) value, the
gauge32 also decreases (increases).
In the value set and its semantics, this type is equivalent
to the Gauge32 type of the SMIv2.";
reference
"RFC 2578: Structure of Management Information Version 2
(SMIv2)";
}
typedef gauge64 {
type uint64;
description
"The gauge64 type represents a non-negative integer, which
may increase or decrease, but shall never exceed a maximum
value, nor fall below a minimum value. The maximum value
cannot be greater than 2^64-1 (18446744073709551615), and
the minimum value cannot be smaller than 0. The value of
a gauge64 has its maximum value whenever the information
being modeled is greater than or equal to its maximum
value, and has its minimum value whenever the information
being modeled is smaller than or equal to its minimum value.
If the information being modeled subsequently decreases
below (increases above) the maximum (minimum) value, the
gauge64 also decreases (increases).
In the value set and its semantics, this type is equivalent
to the CounterBasedGauge64 SMIv2 textual convention defined
in RFC 2856";
reference
"RFC 2856: Textual Conventions for Additional High Capacity
Data Types";
}
/*** collection of identifier-related types ***/
typedef object-identifier {
type string {
pattern '(([0-1](\.[1-3]?[0-9]))|(2\.(0|([1-9]\d*))))'
+ '(\.(0|([1-9]\d*)))*';
}
description
"The object-identifier type represents administratively
assigned names in a registration-hierarchical-name tree.
Values of this type are denoted as a sequence of numerical
non-negative sub-identifier values. Each sub-identifier
value MUST NOT exceed 2^32-1 (4294967295). Sub-identifiers
are separated by single dots and without any intermediate
whitespace.
The ASN.1 standard restricts the value space of the first
sub-identifier to 0, 1, or 2. Furthermore, the value space
of the second sub-identifier is restricted to the range
0 to 39 if the first sub-identifier is 0 or 1. Finally,
the ASN.1 standard requires that an object identifier
has always at least two sub-identifiers. The pattern
captures these restrictions.
Although the number of sub-identifiers is not limited,
module designers should realize that there may be
implementations that stick with the SMIv2 limit of 128
sub-identifiers.
This type is a superset of the SMIv2 OBJECT IDENTIFIER type
since it is not restricted to 128 sub-identifiers. Hence,
this type SHOULD NOT be used to represent the SMIv2 OBJECT
IDENTIFIER type; the object-identifier-128 type SHOULD be
used instead.";
reference
"ISO9834-1: Information technology -- Open Systems
Interconnection -- Procedures for the operation of OSI
Registration Authorities: General procedures and top
arcs of the ASN.1 Object Identifier tree";
}
typedef object-identifier-128 {
type object-identifier {
pattern '\d*(\.\d*){1,127}';
}
description
"This type represents object-identifiers restricted to 128
sub-identifiers.
In the value set and its semantics, this type is equivalent
to the OBJECT IDENTIFIER type of the SMIv2.";
reference
"RFC 2578: Structure of Management Information Version 2
(SMIv2)";
}
typedef yang-identifier {
type string {
length "1..max";
pattern '[a-zA-Z_][a-zA-Z0-9\-_.]*';
pattern '.|..|[^xX].*|.[^mM].*|..[^lL].*';
}
description
"A YANG identifier string as defined by the 'identifier'
rule in Section 12 of RFC 6020. An identifier must
start with an alphabetic character or an underscore
followed by an arbitrary sequence of alphabetic or
numeric characters, underscores, hyphens, or dots.
A YANG identifier MUST NOT start with any possible
combination of the lowercase or uppercase character
sequence 'xml'.";
reference
"RFC 6020: YANG - A Data Modeling Language for the Network
Configuration Protocol (NETCONF)";
}
/*** collection of types related to date and time***/
typedef date-and-time {
type string {
pattern '\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?'
+ '(Z|[\+\-]\d{2}:\d{2})';
}
description
"The date-and-time type is a profile of the ISO 8601
standard for representation of dates and times using the
Gregorian calendar. The profile is defined by the
date-time production in Section 5.6 of RFC 3339.
The date-and-time type is compatible with the dateTime XML
schema type with the following notable exceptions:
(a) The date-and-time type does not allow negative years.
(b) The date-and-time time-offset -00:00 indicates an unknown
time zone (see RFC 3339) while -00:00 and +00:00 and Z
all represent the same time zone in dateTime.
(c) The canonical format (see below) of data-and-time values
differs from the canonical format used by the dateTime XML
schema type, which requires all times to be in UTC using
the time-offset 'Z'.
This type is not equivalent to the DateAndTime textual
convention of the SMIv2 since RFC 3339 uses a different
separator between full-date and full-time and provides
higher resolution of time-secfrac.
The canonical format for date-and-time values with a known time
zone uses a numeric time zone offset that is calculated using
the device's configured known offset to UTC time. A change of
the device's offset to UTC time will cause date-and-time values
to change accordingly. Such changes might happen periodically
in case a server follows automatically daylight saving time
(DST) time zone offset changes. The canonical format for
date-and-time values with an unknown time zone (usually
referring to the notion of local time) uses the time-offset
-00:00.";
reference
"RFC 3339: Date and Time on the Internet: Timestamps
RFC 2579: Textual Conventions for SMIv2
XSD-TYPES: XML Schema Part 2: Datatypes Second Edition";
}
typedef timeticks {
type uint32;
description
"The timeticks type represents a non-negative integer that
represents the time, modulo 2^32 (4294967296 decimal), in
hundredths of a second between two epochs. When a schema
node is defined that uses this type, the description of
the schema node identifies both of the reference epochs.
In the value set and its semantics, this type is equivalent
to the TimeTicks type of the SMIv2.";
reference
"RFC 2578: Structure of Management Information Version 2
(SMIv2)";
}
typedef timestamp {
type yang:timeticks;
description
"The timestamp type represents the value of an associated
timeticks schema node at which a specific occurrence
happened. The specific occurrence must be defined in the
description of any schema node defined using this type. When
the specific occurrence occurred prior to the last time the
associated timeticks attribute was zero, then the timestamp
value is zero. Note that this requires all timestamp values
to be reset to zero when the value of the associated timeticks
attribute reaches 497+ days and wraps around to zero.
The associated timeticks schema node must be specified
in the description of any schema node using this type.
In the value set and its semantics, this type is equivalent
to the TimeStamp textual convention of the SMIv2.";
reference
"RFC 2579: Textual Conventions for SMIv2";
}
/*** collection of generic address types ***/
typedef phys-address {
type string {
pattern '([0-9a-fA-F]{2}(:[0-9a-fA-F]{2})*)?';
}
description
"Represents media- or physical-level addresses represented
as a sequence octets, each octet represented by two hexadecimal
numbers. Octets are separated by colons. The canonical
representation uses lowercase characters.
In the value set and its semantics, this type is equivalent
to the PhysAddress textual convention of the SMIv2.";
reference
"RFC 2579: Textual Conventions for SMIv2";
}
typedef mac-address {
type string {
pattern '[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}';
}
description
"The mac-address type represents an IEEE 802 MAC address.
The canonical representation uses lowercase characters.
In the value set and its semantics, this type is equivalent
to the MacAddress textual convention of the SMIv2.";
reference
"IEEE 802: IEEE Standard for Local and Metropolitan Area
Networks: Overview and Architecture
RFC 2579: Textual Conventions for SMIv2";
}
/*** collection of XML-specific types ***/
typedef xpath1.0 {
type string;
description
"This type represents an XPATH 1.0 expression.
When a schema node is defined that uses this type, the
description of the schema node MUST specify the XPath
context in which the XPath expression is evaluated.";
reference
"XPATH: XML Path Language (XPath) Version 1.0";
}
/*** collection of string types ***/
typedef hex-string {
type string {
pattern '([0-9a-fA-F]{2}(:[0-9a-fA-F]{2})*)?';
}
description
"A hexadecimal string with octets represented as hex digits
separated by colons. The canonical representation uses
lowercase characters.";
}
typedef uuid {
type string {
pattern '[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-'
+ '[0-9a-fA-F]{4}-[0-9a-fA-F]{12}';
}
description
"A Universally Unique IDentifier in the string representation
defined in RFC 4122. The canonical representation uses
lowercase characters.
The following is an example of a UUID in string representation:
f81d4fae-7dec-11d0-a765-00a0c91e6bf6
";
reference
"RFC 4122: A Universally Unique IDentifier (UUID) URN
Namespace";
}
typedef dotted-quad {
type string {
pattern
'(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\.){3}'
+ '([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])';
}
description
"An unsigned 32-bit number expressed in the dotted-quad
notation, i.e., four octets written as decimal numbers
and separated with the '.' (full stop) character.";
}
}

View File

@@ -0,0 +1,53 @@
module gnpy-api {
yang-version 1.1;
namespace "gnpy:gnpy-api";
prefix gnpyapi;
import gnpy-network-topology {
prefix gnpynt;
}
import gnpy-path-computation-simplified {
prefix gnpypc;
}
import gnpy-eqpt-config {
prefix gnpyeqpt;
}
organization
"Telecom Infra Project OOPT PSE Working Group";
contact
"WG Web: <https://github.com/Telecominfraproject/oopt-gnpy>
contact: <mailto:ahmed.triki@orange.com>
contact: <mailto:esther.lerouzic@orange.com>
";
description
"YANG model for gnpy api input for path computation - TransportPCE preversion";
revision 2020-10-22 {
description
"draft for experimental/2020-candi";
reference
"YANG model for api input for path computation with gnpy";
}
container service {
description
"Describe the service file to connect to gnpy";
uses gnpypc:service;
}
container result {
uses gnpypc:result;
description
"Describe the response object to gnpy";
}
container topology {
description
"Describe the topology file to connect to gnpy";
uses gnpynt:topo;
}
container equipment {
description
"Describe the equipment library to connect to gnpy";
uses gnpyeqpt:eqpt;
}
}

Some files were not shown because too many files have changed in this diff Show More