mirror of
https://github.com/Telecominfraproject/oopt-gnpy.git
synced 2025-11-01 02:28:05 +00:00
Merge branch 'develop'
Change-Id: If7860b243cb504613a7fffafad2d601510000af7
This commit is contained in:
@@ -6,6 +6,7 @@
|
|||||||
"destination": "trx Vannes_KBE",
|
"destination": "trx Vannes_KBE",
|
||||||
"src-tp-id": "trx Lorient_KMA",
|
"src-tp-id": "trx Lorient_KMA",
|
||||||
"dst-tp-id": "trx Vannes_KBE",
|
"dst-tp-id": "trx Vannes_KBE",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -30,6 +31,7 @@
|
|||||||
"destination": "trx Vannes_KBE",
|
"destination": "trx Vannes_KBE",
|
||||||
"src-tp-id": "trx Brest_KLA",
|
"src-tp-id": "trx Brest_KLA",
|
||||||
"dst-tp-id": "trx Vannes_KBE",
|
"dst-tp-id": "trx Vannes_KBE",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -94,6 +96,7 @@
|
|||||||
"destination": "trx Rennes_STA",
|
"destination": "trx Rennes_STA",
|
||||||
"src-tp-id": "trx Lannion_CAS",
|
"src-tp-id": "trx Lannion_CAS",
|
||||||
"dst-tp-id": "trx Rennes_STA",
|
"dst-tp-id": "trx Rennes_STA",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -118,6 +121,7 @@
|
|||||||
"destination": "trx Lannion_CAS",
|
"destination": "trx Lannion_CAS",
|
||||||
"src-tp-id": "trx Rennes_STA",
|
"src-tp-id": "trx Rennes_STA",
|
||||||
"dst-tp-id": "trx Lannion_CAS",
|
"dst-tp-id": "trx Lannion_CAS",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -142,6 +146,7 @@
|
|||||||
"destination": "trx Lannion_CAS",
|
"destination": "trx Lannion_CAS",
|
||||||
"src-tp-id": "trx Rennes_STA",
|
"src-tp-id": "trx Rennes_STA",
|
||||||
"dst-tp-id": "trx Lannion_CAS",
|
"dst-tp-id": "trx Lannion_CAS",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -166,6 +171,7 @@
|
|||||||
"destination": "trx Lorient_KMA",
|
"destination": "trx Lorient_KMA",
|
||||||
"src-tp-id": "trx Lannion_CAS",
|
"src-tp-id": "trx Lannion_CAS",
|
||||||
"dst-tp-id": "trx Lorient_KMA",
|
"dst-tp-id": "trx Lorient_KMA",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -190,6 +196,7 @@
|
|||||||
"destination": "trx Lorient_KMA",
|
"destination": "trx Lorient_KMA",
|
||||||
"src-tp-id": "trx Lannion_CAS",
|
"src-tp-id": "trx Lannion_CAS",
|
||||||
"dst-tp-id": "trx Lorient_KMA",
|
"dst-tp-id": "trx Lorient_KMA",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -214,6 +221,7 @@
|
|||||||
"destination": "trx Lorient_KMA",
|
"destination": "trx Lorient_KMA",
|
||||||
"src-tp-id": "trx Lannion_CAS",
|
"src-tp-id": "trx Lannion_CAS",
|
||||||
"dst-tp-id": "trx Lorient_KMA",
|
"dst-tp-id": "trx Lorient_KMA",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
|
|||||||
@@ -18,37 +18,51 @@ from pathlib import Path
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from logging import getLogger, basicConfig, CRITICAL, DEBUG, INFO
|
from logging import getLogger, basicConfig, CRITICAL, DEBUG, INFO
|
||||||
from json import dumps, loads
|
from json import dumps, loads
|
||||||
from networkx import (draw_networkx_nodes, draw_networkx_edges,
|
|
||||||
draw_networkx_labels)
|
|
||||||
from numpy import mean
|
from numpy import mean
|
||||||
from gnpy.core.service_sheet import convert_service_sheet, Request_element, Element
|
from gnpy.core.service_sheet import convert_service_sheet, Request_element, Element
|
||||||
from gnpy.core.utils import load_json
|
from gnpy.core.utils import load_json
|
||||||
from gnpy.core.network import load_network, build_network, save_network
|
from gnpy.core.network import load_network, build_network, save_network
|
||||||
from gnpy.core.equipment import load_equipment, trx_mode_params, automatic_nch, automatic_spacing
|
from gnpy.core.equipment import load_equipment, trx_mode_params, automatic_nch
|
||||||
from gnpy.core.elements import Transceiver, Roadm, Edfa, Fused, Fiber
|
from gnpy.core.elements import Transceiver, Roadm
|
||||||
from gnpy.core.utils import db2lin, lin2db
|
from gnpy.core.utils import db2lin, lin2db
|
||||||
from gnpy.core.request import (Path_request, Result_element, compute_constrained_path,
|
from gnpy.core.request import (Path_request, Result_element,
|
||||||
propagate, jsontocsv, Disjunction, compute_path_dsjctn, requests_aggregation,
|
propagate, jsontocsv, Disjunction, compute_path_dsjctn,
|
||||||
propagate_and_optimize_mode)
|
requests_aggregation, propagate_and_optimize_mode,
|
||||||
from gnpy.core.exceptions import ConfigurationError, EquipmentConfigError, NetworkTopologyError
|
BLOCKING_NOPATH, BLOCKING_NOMODE,
|
||||||
|
find_reversed_path)
|
||||||
|
from gnpy.core.exceptions import (ConfigurationError, EquipmentConfigError, NetworkTopologyError,
|
||||||
|
ServiceError, DisjunctionError)
|
||||||
import gnpy.core.ansi_escapes as ansi_escapes
|
import gnpy.core.ansi_escapes as ansi_escapes
|
||||||
|
from gnpy.core.spectrum_assignment import (build_oms_list, pth_assign_spectrum)
|
||||||
from copy import copy, deepcopy
|
from copy import copy, deepcopy
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from math import ceil
|
from math import ceil
|
||||||
|
|
||||||
#EQPT_LIBRARY_FILENAME = Path(__file__).parent / 'eqpt_config.json'
|
#EQPT_LIBRARY_FILENAME = Path(__file__).parent / 'eqpt_config.json'
|
||||||
|
|
||||||
logger = getLogger(__name__)
|
LOGGER = getLogger(__name__)
|
||||||
|
|
||||||
parser = ArgumentParser(description = 'A function that computes performances for a list of services provided in a json file or an excel sheet.')
|
PARSER = ArgumentParser(description='A function that computes performances for a list of ' +
|
||||||
parser.add_argument('network_filename', nargs='?', type = Path, default= Path(__file__).parent / 'meshTopologyExampleV2.xls')
|
'services provided in a json file or an excel sheet.')
|
||||||
parser.add_argument('service_filename', nargs='?', type = Path, default= Path(__file__).parent / 'meshTopologyExampleV2.xls')
|
PARSER.add_argument('network_filename', nargs='?', type=Path,\
|
||||||
parser.add_argument('eqpt_filename', nargs='?', type = Path, default=Path(__file__).parent / 'eqpt_config.json')
|
default=Path(__file__).parent / 'meshTopologyExampleV2.xls',\
|
||||||
parser.add_argument('-v', '--verbose', action='count', default=0, help='increases verbosity for each occurence')
|
help='input topology file in xls or json')
|
||||||
parser.add_argument('-o', '--output', type = Path)
|
PARSER.add_argument('service_filename', nargs='?', type=Path,\
|
||||||
|
default=Path(__file__).parent / 'meshTopologyExampleV2.xls',\
|
||||||
|
help='input service file in xls or json')
|
||||||
|
PARSER.add_argument('eqpt_filename', nargs='?', type=Path,\
|
||||||
|
default=Path(__file__).parent / 'eqpt_config.json',\
|
||||||
|
help='input equipment library in json. Default is eqpt_config.json')
|
||||||
|
PARSER.add_argument('-bi', '--bidir', action='store_true',\
|
||||||
|
help='considers that all demands are bidir')
|
||||||
|
PARSER.add_argument('-v', '--verbose', action='count', default=0,\
|
||||||
|
help='increases verbosity for each occurence')
|
||||||
|
PARSER.add_argument('-o', '--output', type=Path)
|
||||||
|
|
||||||
|
|
||||||
def requests_from_json(json_data,equipment):
|
def requests_from_json(json_data, equipment):
|
||||||
|
""" converts the json data into a list of requests elements
|
||||||
|
"""
|
||||||
requests_list = []
|
requests_list = []
|
||||||
|
|
||||||
for req in json_data['path-request']:
|
for req in json_data['path-request']:
|
||||||
@@ -56,12 +70,13 @@ def requests_from_json(json_data,equipment):
|
|||||||
params = {}
|
params = {}
|
||||||
params['request_id'] = req['request-id']
|
params['request_id'] = req['request-id']
|
||||||
params['source'] = req['source']
|
params['source'] = req['source']
|
||||||
|
params['bidir'] = req['bidirectional']
|
||||||
params['destination'] = req['destination']
|
params['destination'] = req['destination']
|
||||||
params['trx_type'] = req['path-constraints']['te-bandwidth']['trx_type']
|
params['trx_type'] = req['path-constraints']['te-bandwidth']['trx_type']
|
||||||
params['trx_mode'] = req['path-constraints']['te-bandwidth']['trx_mode']
|
params['trx_mode'] = req['path-constraints']['te-bandwidth']['trx_mode']
|
||||||
params['format'] = params['trx_mode']
|
params['format'] = params['trx_mode']
|
||||||
params['spacing'] = req['path-constraints']['te-bandwidth']['spacing']
|
params['spacing'] = req['path-constraints']['te-bandwidth']['spacing']
|
||||||
try :
|
try:
|
||||||
nd_list = req['explicit-route-objects']['route-object-include-exclude']
|
nd_list = req['explicit-route-objects']['route-object-include-exclude']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
nd_list = []
|
nd_list = []
|
||||||
@@ -70,7 +85,7 @@ def requests_from_json(json_data,equipment):
|
|||||||
# recover trx physical param (baudrate, ...) from type and mode
|
# recover trx physical param (baudrate, ...) from type and mode
|
||||||
# in trx_mode_params optical power is read from equipment['SI']['default'] and
|
# in trx_mode_params optical power is read from equipment['SI']['default'] and
|
||||||
# nb_channel is computed based on min max frequency and spacing
|
# nb_channel is computed based on min max frequency and spacing
|
||||||
trx_params = trx_mode_params(equipment,params['trx_type'],params['trx_mode'],True)
|
trx_params = trx_mode_params(equipment, params['trx_type'], params['trx_mode'], True)
|
||||||
params.update(trx_params)
|
params.update(trx_params)
|
||||||
# print(trx_params['min_spacing'])
|
# print(trx_params['min_spacing'])
|
||||||
# optical power might be set differently in the request. if it is indicated then the
|
# optical power might be set differently in the request. if it is indicated then the
|
||||||
@@ -89,13 +104,13 @@ def requests_from_json(json_data,equipment):
|
|||||||
params['nb_channel'] = nch
|
params['nb_channel'] = nch
|
||||||
spacing = params['spacing']
|
spacing = params['spacing']
|
||||||
params['f_max'] = f_min + nch*spacing
|
params['f_max'] = f_min + nch*spacing
|
||||||
else :
|
else:
|
||||||
params['nb_channel'] = automatic_nch(f_min,f_max_from_si,params['spacing'])
|
params['nb_channel'] = automatic_nch(f_min, f_max_from_si, params['spacing'])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
params['nb_channel'] = automatic_nch(f_min,f_max_from_si,params['spacing'])
|
params['nb_channel'] = automatic_nch(f_min, f_max_from_si, params['spacing'])
|
||||||
consistency_check(params, f_max_from_si)
|
consistency_check(params, f_max_from_si)
|
||||||
|
|
||||||
try :
|
try:
|
||||||
params['path_bandwidth'] = req['path-constraints']['te-bandwidth']['path_bandwidth']
|
params['path_bandwidth'] = req['path-constraints']['te-bandwidth']['path_bandwidth']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
@@ -103,30 +118,35 @@ def requests_from_json(json_data,equipment):
|
|||||||
return requests_list
|
return requests_list
|
||||||
|
|
||||||
def consistency_check(params, f_max_from_si):
|
def consistency_check(params, f_max_from_si):
|
||||||
|
""" checks that the requested parameters are consistant (spacing vs nb channel,
|
||||||
|
vs transponder mode...)
|
||||||
|
"""
|
||||||
f_min = params['f_min']
|
f_min = params['f_min']
|
||||||
f_max = params['f_max']
|
f_max = params['f_max']
|
||||||
max_recommanded_nb_channels = automatic_nch(f_min,f_max,
|
max_recommanded_nb_channels = automatic_nch(f_min, f_max, params['spacing'])
|
||||||
params['spacing'])
|
|
||||||
if params['baud_rate'] is not None:
|
if params['baud_rate'] is not None:
|
||||||
#implicitely means that a mode is defined with min_spacing
|
#implicitely means that a mode is defined with min_spacing
|
||||||
if params['min_spacing']>params['spacing'] :
|
if params['min_spacing'] > params['spacing']:
|
||||||
msg = f'Request {params["request_id"]} has spacing below transponder {params["trx_type"]}'+\
|
msg = f'Request {params["request_id"]} has spacing below transponder ' +\
|
||||||
f' {params["trx_mode"]} min spacing value {params["min_spacing"]*1e-9}GHz.\n'+\
|
f'{params["trx_type"]} {params["trx_mode"]} min spacing value ' +\
|
||||||
'Computation stopped'
|
f'{params["min_spacing"]*1e-9}GHz.\nComputation stopped'
|
||||||
print(msg)
|
print(msg)
|
||||||
logger.critical(msg)
|
LOGGER.critical(msg)
|
||||||
exit()
|
raise ServiceError(msg)
|
||||||
if f_max>f_max_from_si:
|
if f_max > f_max_from_si:
|
||||||
msg = dedent(f'''
|
msg = dedent(f'''
|
||||||
Requested channel number {params["nb_channel"]}, baud rate {params["baud_rate"]} GHz and requested spacing {params["spacing"]*1e-9}GHz
|
Requested channel number {params["nb_channel"]}, baud rate {params["baud_rate"]} GHz and requested spacing {params["spacing"]*1e-9}GHz
|
||||||
is not consistent with frequency range {f_min*1e-12} THz, {f_max*1e-12} THz, min recommanded spacing {params["min_spacing"]*1e-9}GHz.
|
is not consistent with frequency range {f_min*1e-12} THz, {f_max*1e-12} THz, min recommanded spacing {params["min_spacing"]*1e-9}GHz.
|
||||||
max recommanded nb of channels is {max_recommanded_nb_channels}
|
max recommanded nb of channels is {max_recommanded_nb_channels}
|
||||||
Computation stopped.''')
|
Computation stopped.''')
|
||||||
logger.critical(msg)
|
LOGGER.critical(msg)
|
||||||
exit()
|
raise ServiceError(msg)
|
||||||
|
|
||||||
|
|
||||||
def disjunctions_from_json(json_data):
|
def disjunctions_from_json(json_data):
|
||||||
|
""" reads the disjunction requests from the json dict and create the list
|
||||||
|
of requested disjunctions for this set of requests
|
||||||
|
"""
|
||||||
disjunctions_list = []
|
disjunctions_list = []
|
||||||
try:
|
try:
|
||||||
temp_test = json_data['synchronization']
|
temp_test = json_data['synchronization']
|
||||||
@@ -145,159 +165,229 @@ def disjunctions_from_json(json_data):
|
|||||||
return disjunctions_list
|
return disjunctions_list
|
||||||
|
|
||||||
|
|
||||||
def load_requests(filename,eqpt_filename):
|
def load_requests(filename, eqpt_filename, bidir):
|
||||||
|
""" loads the requests from a json or an excel file into a data string
|
||||||
|
"""
|
||||||
if filename.suffix.lower() == '.xls':
|
if filename.suffix.lower() == '.xls':
|
||||||
logger.info('Automatically converting requests from XLS to JSON')
|
LOGGER.info('Automatically converting requests from XLS to JSON')
|
||||||
json_data = convert_service_sheet(filename,eqpt_filename)
|
try:
|
||||||
|
json_data = convert_service_sheet(filename, eqpt_filename, bidir=bidir)
|
||||||
|
except ServiceError as this_e:
|
||||||
|
print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {this_e}')
|
||||||
|
exit(1)
|
||||||
else:
|
else:
|
||||||
with open(filename, encoding='utf-8') as f:
|
with open(filename, encoding='utf-8') as my_f:
|
||||||
json_data = loads(f.read())
|
json_data = loads(my_f.read())
|
||||||
return json_data
|
return json_data
|
||||||
|
|
||||||
def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist):
|
def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist):
|
||||||
|
""" use a list but a dictionnary might be helpful to find path based on request_id
|
||||||
# use a list but a dictionnary might be helpful to find path bathsed on request_id
|
TODO change all these req, dsjct, res lists into dict !
|
||||||
# TODO change all these req, dsjct, res lists into dict !
|
"""
|
||||||
path_res_list = []
|
path_res_list = []
|
||||||
|
reversed_path_res_list = []
|
||||||
|
propagated_reversed_path_res_list = []
|
||||||
|
|
||||||
for i,pathreq in enumerate(pathreqlist):
|
for i, pathreq in enumerate(pathreqlist):
|
||||||
|
|
||||||
# use the power specified in requests but might be different from the one specified for design
|
# use the power specified in requests but might be different from the one
|
||||||
# the power is an optional parameter for requests definition
|
# specified for design the power is an optional parameter for requests
|
||||||
# if optional, use the one defines in eqt_config.json
|
# definition if optional, use the one defines in eqt_config.json
|
||||||
p_db = lin2db(pathreq.power*1e3)
|
p_db = lin2db(pathreq.power*1e3)
|
||||||
p_total_db = p_db + lin2db(pathreq.nb_channel)
|
p_total_db = p_db + lin2db(pathreq.nb_channel)
|
||||||
print(f'request {pathreq.request_id}')
|
print(f'request {pathreq.request_id}')
|
||||||
print(f'Computing path from {pathreq.source} to {pathreq.destination}')
|
print(f'Computing path from {pathreq.source} to {pathreq.destination}')
|
||||||
print(f'with path constraint: {[pathreq.source]+pathreq.nodes_list}') #adding first node to be clearer on the output
|
# adding first node to be clearer on the output
|
||||||
|
print(f'with path constraint: {[pathreq.source] + pathreq.nodes_list}')
|
||||||
|
|
||||||
total_path = pathlist[i]
|
# pathlist[i] contains the whole path information for request i
|
||||||
print(f'Computed path (roadms):{[e.uid for e in total_path if isinstance(e, Roadm)]}\n')
|
# last element is a transciver and where the result of the propagation is
|
||||||
|
# recorded.
|
||||||
|
# Important Note: since transceivers attached to roadms are actually logical
|
||||||
|
# elements to simulate performance, several demands having the same destination
|
||||||
|
# may use the same transponder for the performance simulation. This is why
|
||||||
|
# we use deepcopy: to ensure that each propagation is recorded and not overwritten
|
||||||
|
total_path = deepcopy(pathlist[i])
|
||||||
|
print(f'Computed path (roadms):{[e.uid for e in total_path if isinstance(e, Roadm)]}')
|
||||||
# for debug
|
# for debug
|
||||||
# print(f'{pathreq.baud_rate} {pathreq.power} {pathreq.spacing} {pathreq.nb_channel}')
|
# print(f'{pathreq.baud_rate} {pathreq.power} {pathreq.spacing} {pathreq.nb_channel}')
|
||||||
if total_path :
|
if total_path:
|
||||||
if pathreq.baud_rate is not None:
|
if pathreq.baud_rate is not None:
|
||||||
total_path = propagate(total_path,pathreq,equipment)
|
# means that at this point the mode was entered/forced by user and thus a
|
||||||
# for el in total_path: print(el)
|
# baud_rate was defined
|
||||||
temp_snr01nm = round(mean(total_path[-1].snr+lin2db(pathreq.baud_rate/(12.5e9))),2)
|
total_path = propagate(total_path, pathreq, equipment)
|
||||||
if temp_snr01nm < pathreq.OSNR :
|
temp_snr01nm = round(mean(total_path[-1].snr+lin2db(pathreq.baud_rate/(12.5e9))), 2)
|
||||||
msg = f'\tWarning! Request {pathreq.request_id} computed path from {pathreq.source} to {pathreq.destination} does not pass with {pathreq.tsp_mode}\n' +\
|
if temp_snr01nm < pathreq.OSNR:
|
||||||
f'\tcomputedSNR in 0.1nm = {temp_snr01nm} - required osnr {pathreq.OSNR}\n'
|
msg = f'\tWarning! Request {pathreq.request_id} computed path from' +\
|
||||||
|
f' {pathreq.source} to {pathreq.destination} does not pass with' +\
|
||||||
|
f' {pathreq.tsp_mode}\n\tcomputedSNR in 0.1nm = {temp_snr01nm} ' +\
|
||||||
|
f'- required osnr {pathreq.OSNR}'
|
||||||
print(msg)
|
print(msg)
|
||||||
logger.warning(msg)
|
LOGGER.warning(msg)
|
||||||
total_path = []
|
pathreq.blocking_reason = 'MODE_NOT_FEASIBLE'
|
||||||
else:
|
else:
|
||||||
total_path,mode = propagate_and_optimize_mode(total_path,pathreq,equipment)
|
total_path, mode = propagate_and_optimize_mode(total_path, pathreq, equipment)
|
||||||
# if no baudrate satisfies spacing, no mode is returned and an empty path is returned
|
# if no baudrate satisfies spacing, no mode is returned and the last explored mode
|
||||||
# a warning is shown in the propagate_and_optimize_mode
|
# a warning is shown in the propagate_and_optimize_mode
|
||||||
if mode is not None :
|
|
||||||
# propagate_and_optimize_mode function returns the mode with the highest bitrate
|
# propagate_and_optimize_mode function returns the mode with the highest bitrate
|
||||||
# that passes. if no mode passes, then it returns an empty path
|
# that passes. if no mode passes, then a attribute blocking_reason is added on
|
||||||
|
# pathreq that contains the reason for blocking: 'NO_PATH', 'NO_FEASIBLE_MODE', ...
|
||||||
|
try:
|
||||||
|
if pathreq.blocking_reason in BLOCKING_NOPATH:
|
||||||
|
total_path = []
|
||||||
|
elif pathreq.blocking_reason in BLOCKING_NOMODE:
|
||||||
|
pathreq.baud_rate = mode['baud_rate']
|
||||||
|
pathreq.tsp_mode = mode['format']
|
||||||
|
pathreq.format = mode['format']
|
||||||
|
pathreq.OSNR = mode['OSNR']
|
||||||
|
pathreq.tx_osnr = mode['tx_osnr']
|
||||||
|
pathreq.bit_rate = mode['bit_rate']
|
||||||
|
# other blocking reason should not appear at this point
|
||||||
|
except AttributeError:
|
||||||
pathreq.baud_rate = mode['baud_rate']
|
pathreq.baud_rate = mode['baud_rate']
|
||||||
pathreq.tsp_mode = mode['format']
|
pathreq.tsp_mode = mode['format']
|
||||||
pathreq.format = mode['format']
|
pathreq.format = mode['format']
|
||||||
pathreq.OSNR = mode['OSNR']
|
pathreq.OSNR = mode['OSNR']
|
||||||
pathreq.tx_osnr = mode['tx_osnr']
|
pathreq.tx_osnr = mode['tx_osnr']
|
||||||
pathreq.bit_rate = mode['bit_rate']
|
pathreq.bit_rate = mode['bit_rate']
|
||||||
else :
|
|
||||||
total_path = []
|
|
||||||
# we record the last tranceiver object in order to have th whole
|
|
||||||
# information about spectrum. Important Note: since transceivers
|
|
||||||
# attached to roadms are actually logical elements to simulate
|
|
||||||
# performance, several demands having the same destination may use
|
|
||||||
# the same transponder for the performance simaulation. This is why
|
|
||||||
# we use deepcopy: to ensure each propagation is recorded and not
|
|
||||||
# overwritten
|
|
||||||
|
|
||||||
path_res_list.append(deepcopy(total_path))
|
# reversed path is needed for correct spectrum assignment
|
||||||
return path_res_list
|
reversed_path = find_reversed_path(pathlist[i])
|
||||||
|
if pathreq.bidir:
|
||||||
|
# only propagate if bidir is true, but needs the reversed path anyway for
|
||||||
|
# correct spectrum assignment
|
||||||
|
rev_p = deepcopy(reversed_path)
|
||||||
|
|
||||||
|
print(f'\n\tPropagating Z to A direction {pathreq.destination} to {pathreq.source}')
|
||||||
|
print(f'\tPath (roadsm) {[r.uid for r in rev_p if isinstance(r,Roadm)]}\n')
|
||||||
|
propagated_reversed_path = propagate(rev_p, pathreq, equipment)
|
||||||
|
temp_snr01nm = round(mean(propagated_reversed_path[-1].snr +\
|
||||||
|
lin2db(pathreq.baud_rate/(12.5e9))), 2)
|
||||||
|
if temp_snr01nm < pathreq.OSNR:
|
||||||
|
msg = f'\tWarning! Request {pathreq.request_id} computed path from' +\
|
||||||
|
f' {pathreq.source} to {pathreq.destination} does not pass with' +\
|
||||||
|
f' {pathreq.tsp_mode}\n' +\
|
||||||
|
f'\tcomputedSNR in 0.1nm = {temp_snr01nm} - required osnr {pathreq.OSNR}'
|
||||||
|
print(msg)
|
||||||
|
LOGGER.warning(msg)
|
||||||
|
# TODO selection of mode should also be on reversed direction !!
|
||||||
|
pathreq.blocking_reason = 'MODE_NOT_FEASIBLE'
|
||||||
|
else:
|
||||||
|
propagated_reversed_path = []
|
||||||
|
else:
|
||||||
|
msg = 'Total path is empty. No propagation'
|
||||||
|
print(msg)
|
||||||
|
LOGGER.info(msg)
|
||||||
|
reversed_path = []
|
||||||
|
propagated_reversed_path = []
|
||||||
|
|
||||||
|
path_res_list.append(total_path)
|
||||||
|
reversed_path_res_list.append(reversed_path)
|
||||||
|
propagated_reversed_path_res_list.append(propagated_reversed_path)
|
||||||
|
# print to have a nice output
|
||||||
|
print('')
|
||||||
|
return path_res_list, reversed_path_res_list, propagated_reversed_path_res_list
|
||||||
|
|
||||||
def correct_route_list(network, pathreqlist):
|
def correct_route_list(network, pathreqlist):
|
||||||
# prepares the format of route list of nodes to be consistant
|
""" prepares the format of route list of nodes to be consistant
|
||||||
# remove wrong names, remove endpoints
|
remove wrong names, remove endpoints
|
||||||
# also correct source and destination
|
also correct source and destination
|
||||||
|
"""
|
||||||
anytype = [n.uid for n in network.nodes()]
|
anytype = [n.uid for n in network.nodes()]
|
||||||
# TODO there is a problem of identification of fibers in case of parallel fibers bitween two adjacent roadms
|
# TODO there is a problem of identification of fibers in case of parallel fibers
|
||||||
# so fiber constraint is not supported
|
# between two adjacent roadms so fiber constraint is not supported
|
||||||
transponders = [n.uid for n in network.nodes() if isinstance(n, Transceiver)]
|
transponders = [n.uid for n in network.nodes() if isinstance(n, Transceiver)]
|
||||||
for pathreq in pathreqlist:
|
for pathreq in pathreqlist:
|
||||||
for i,n_id in enumerate(pathreq.nodes_list):
|
for i, n_id in enumerate(pathreq.nodes_list):
|
||||||
# replace possibly wrong name with a formated roadm name
|
# replace possibly wrong name with a formated roadm name
|
||||||
# print(n_id)
|
# print(n_id)
|
||||||
if n_id not in anytype :
|
if n_id not in anytype:
|
||||||
# find nodes name that include constraint among all possible names except
|
# find nodes name that include constraint among all possible names except
|
||||||
# transponders (not yet supported as constraints).
|
# transponders (not yet supported as constraints).
|
||||||
nodes_suggestion = [uid for uid in anytype \
|
nodes_suggestion = [uid for uid in anytype \
|
||||||
if n_id.lower() in uid.lower() and uid not in transponders]
|
if n_id.lower() in uid.lower() and uid not in transponders]
|
||||||
if pathreq.loose_list[i] == 'LOOSE':
|
if pathreq.loose_list[i] == 'LOOSE':
|
||||||
if len(nodes_suggestion)>0 :
|
if len(nodes_suggestion) > 0:
|
||||||
new_n = nodes_suggestion[0]
|
new_n = nodes_suggestion[0]
|
||||||
print(f'invalid route node specified:\
|
print(f'invalid route node specified:\
|
||||||
\n\'{n_id}\', replaced with \'{new_n}\'')
|
\n\'{n_id}\', replaced with \'{new_n}\'')
|
||||||
pathreq.nodes_list[i] = new_n
|
pathreq.nodes_list[i] = new_n
|
||||||
else:
|
else:
|
||||||
print(f'\x1b[1;33;40m'+f'invalid route node specified \'{n_id}\', could not use it as constraint, skipped!'+'\x1b[0m')
|
print(f'\x1b[1;33;40m'+f'invalid route node specified \'{n_id}\',' +\
|
||||||
|
f' could not use it as constraint, skipped!'+'\x1b[0m')
|
||||||
pathreq.nodes_list.remove(n_id)
|
pathreq.nodes_list.remove(n_id)
|
||||||
pathreq.loose_list.pop(i)
|
pathreq.loose_list.pop(i)
|
||||||
else:
|
else:
|
||||||
msg = f'\x1b[1;33;40m'+f'could not find node : {n_id} in network topology. Strict constraint can not be applied.'+'\x1b[0m'
|
msg = f'\x1b[1;33;40m'+f'could not find node: {n_id} in network topology.' +\
|
||||||
logger.critical(msg)
|
f' Strict constraint can not be applied.' + '\x1b[0m'
|
||||||
|
LOGGER.critical(msg)
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
if pathreq.source not in transponders:
|
if pathreq.source not in transponders:
|
||||||
msg = f'\x1b[1;31;40m'+f'Request: {pathreq.request_id}: could not find transponder source : {pathreq.source}.'+'\x1b[0m'
|
msg = f'\x1b[1;31;40m' + f'Request: {pathreq.request_id}: could not find' +\
|
||||||
logger.critical(msg)
|
f' transponder source: {pathreq.source}.'+'\x1b[0m'
|
||||||
|
LOGGER.critical(msg)
|
||||||
print(f'{msg}\nComputation stopped.')
|
print(f'{msg}\nComputation stopped.')
|
||||||
exit()
|
raise ServiceError(msg)
|
||||||
|
|
||||||
if pathreq.destination not in transponders:
|
if pathreq.destination not in transponders:
|
||||||
msg = f'\x1b[1;31;40m'+f'Request: {pathreq.request_id}: could not find transponder destination : {pathreq.destination}.'+'\x1b[0m'
|
msg = f'\x1b[1;31;40m'+f'Request: {pathreq.request_id}: could not find' +\
|
||||||
logger.critical(msg)
|
f' transponder destination: {pathreq.destination}.'+'\x1b[0m'
|
||||||
|
LOGGER.critical(msg)
|
||||||
print(f'{msg}\nComputation stopped.')
|
print(f'{msg}\nComputation stopped.')
|
||||||
exit()
|
raise ServiceError(msg)
|
||||||
|
|
||||||
# TODO remove endpoints from this list in case they were added by the user in the xls or json files
|
# TODO remove endpoints from this list in case they were added by the user
|
||||||
|
# in the xls or json files
|
||||||
return pathreqlist
|
return pathreqlist
|
||||||
|
|
||||||
def correct_disjn(disjn):
|
def correct_disjn(disjn):
|
||||||
|
""" clean disjunctions to remove possible repetition
|
||||||
|
"""
|
||||||
local_disjn = disjn.copy()
|
local_disjn = disjn.copy()
|
||||||
for el in local_disjn:
|
for elem in local_disjn:
|
||||||
for d in local_disjn:
|
for dis_elem in local_disjn:
|
||||||
if set(el.disjunctions_req) == set(d.disjunctions_req) and\
|
if set(elem.disjunctions_req) == set(dis_elem.disjunctions_req) and\
|
||||||
el.disjunction_id != d.disjunction_id:
|
elem.disjunction_id != dis_elem.disjunction_id:
|
||||||
local_disjn.remove(d)
|
local_disjn.remove(dis_elem)
|
||||||
return local_disjn
|
return local_disjn
|
||||||
|
|
||||||
|
|
||||||
def path_result_json(pathresult):
|
def path_result_json(pathresult):
|
||||||
|
""" create the response dictionnary
|
||||||
|
"""
|
||||||
data = {
|
data = {
|
||||||
'response': [n.json for n in pathresult]
|
'response': [n.json for n in pathresult]
|
||||||
}
|
}
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def main(args):
|
||||||
if __name__ == '__main__':
|
""" main function that calls all functions
|
||||||
args = parser.parse_args()
|
"""
|
||||||
basicConfig(level={2: DEBUG, 1: INFO, 0: CRITICAL}.get(args.verbose, DEBUG))
|
LOGGER.info(f'Computing path requests {args.service_filename} into JSON format')
|
||||||
logger.info(f'Computing path requests {args.service_filename} into JSON format')
|
print('\x1b[1;34;40m' +\
|
||||||
print('\x1b[1;34;40m'+f'Computing path requests {args.service_filename} into JSON format'+ '\x1b[0m')
|
f'Computing path requests {args.service_filename} into JSON format'+ '\x1b[0m')
|
||||||
# for debug
|
# for debug
|
||||||
# print( args.eqpt_filename)
|
# print( args.eqpt_filename)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = load_requests(args.service_filename,args.eqpt_filename)
|
data = load_requests(args.service_filename, args.eqpt_filename, args.bidir)
|
||||||
equipment = load_equipment(args.eqpt_filename)
|
equipment = load_equipment(args.eqpt_filename)
|
||||||
network = load_network(args.network_filename,equipment)
|
network = load_network(args.network_filename, equipment)
|
||||||
except EquipmentConfigError as e:
|
except EquipmentConfigError as this_e:
|
||||||
print(f'{ansi_escapes.red}Configuration error in the equipment library:{ansi_escapes.reset} {e}')
|
print(f'{ansi_escapes.red}Configuration error in the equipment library:{ansi_escapes.reset} {this_e}')
|
||||||
exit(1)
|
exit(1)
|
||||||
except NetworkTopologyError as e:
|
except NetworkTopologyError as this_e:
|
||||||
print(f'{ansi_escapes.red}Invalid network definition:{ansi_escapes.reset} {e}')
|
print(f'{ansi_escapes.red}Invalid network definition:{ansi_escapes.reset} {this_e}')
|
||||||
exit(1)
|
exit(1)
|
||||||
except ConfigurationError as e:
|
except ConfigurationError as this_e:
|
||||||
print(f'{ansi_escapes.red}Configuration error:{ansi_escapes.reset} {e}')
|
print(f'{ansi_escapes.red}Configuration error:{ansi_escapes.reset} {this_e}')
|
||||||
|
exit(1)
|
||||||
|
except ServiceError as this_e:
|
||||||
|
print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {this_e}')
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
# Build the network once using the default power defined in SI in eqpt config
|
# Build the network once using the default power defined in SI in eqpt config
|
||||||
# TODO power density : db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by
|
# TODO power density: db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by
|
||||||
# spacing, f_min and f_max
|
# spacing, f_min and f_max
|
||||||
p_db = equipment['SI']['default'].power_dbm
|
p_db = equipment['SI']['default'].power_dbm
|
||||||
|
|
||||||
@@ -306,77 +396,119 @@ if __name__ == '__main__':
|
|||||||
build_network(network, equipment, p_db, p_total_db)
|
build_network(network, equipment, p_db, p_total_db)
|
||||||
save_network(args.network_filename, network)
|
save_network(args.network_filename, network)
|
||||||
|
|
||||||
rqs = requests_from_json(data, equipment)
|
oms_list = build_oms_list(network, equipment)
|
||||||
|
|
||||||
|
try:
|
||||||
|
rqs = requests_from_json(data, equipment)
|
||||||
|
except ServiceError as this_e:
|
||||||
|
print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {this_e}')
|
||||||
|
exit(1)
|
||||||
# check that request ids are unique. Non unique ids, may
|
# check that request ids are unique. Non unique ids, may
|
||||||
# mess the computation : better to stop the computation
|
# mess the computation: better to stop the computation
|
||||||
all_ids = [r.request_id for r in rqs]
|
all_ids = [r.request_id for r in rqs]
|
||||||
if len(all_ids) != len(set(all_ids)):
|
if len(all_ids) != len(set(all_ids)):
|
||||||
for a in list(set(all_ids)):
|
for item in list(set(all_ids)):
|
||||||
all_ids.remove(a)
|
all_ids.remove(item)
|
||||||
msg = f'Requests id {all_ids} are not unique'
|
msg = f'Requests id {all_ids} are not unique'
|
||||||
logger.critical(msg)
|
LOGGER.critical(msg)
|
||||||
exit()
|
exit()
|
||||||
|
try:
|
||||||
rqs = correct_route_list(network, rqs)
|
rqs = correct_route_list(network, rqs)
|
||||||
|
except ServiceError as this_e:
|
||||||
|
print(f'{ansi_escapes.red}Service error:{ansi_escapes.reset} {this_e}')
|
||||||
|
exit(1)
|
||||||
# pths = compute_path(network, equipment, rqs)
|
# pths = compute_path(network, equipment, rqs)
|
||||||
dsjn = disjunctions_from_json(data)
|
dsjn = disjunctions_from_json(data)
|
||||||
|
|
||||||
print('\x1b[1;34;40m'+f'List of disjunctions'+ '\x1b[0m')
|
print('\x1b[1;34;40m' + f'List of disjunctions' + '\x1b[0m')
|
||||||
print(dsjn)
|
print(dsjn)
|
||||||
# need to warn or correct in case of wrong disjunction form
|
# need to warn or correct in case of wrong disjunction form
|
||||||
# disjunction must not be repeated with same or different ids
|
# disjunction must not be repeated with same or different ids
|
||||||
dsjn = correct_disjn(dsjn)
|
dsjn = correct_disjn(dsjn)
|
||||||
|
|
||||||
# Aggregate demands with same exact constraints
|
# Aggregate demands with same exact constraints
|
||||||
print('\x1b[1;34;40m'+f'Aggregating similar requests'+ '\x1b[0m')
|
print('\x1b[1;34;40m' + f'Aggregating similar requests' + '\x1b[0m')
|
||||||
|
|
||||||
rqs,dsjn = requests_aggregation(rqs,dsjn)
|
rqs, dsjn = requests_aggregation(rqs, dsjn)
|
||||||
# TODO export novel set of aggregated demands in a json file
|
# TODO export novel set of aggregated demands in a json file
|
||||||
|
|
||||||
print('\x1b[1;34;40m'+'The following services have been requested:'+ '\x1b[0m')
|
print('\x1b[1;34;40m' + 'The following services have been requested:' + '\x1b[0m')
|
||||||
print(rqs)
|
print(rqs)
|
||||||
|
|
||||||
print('\x1b[1;34;40m'+f'Computing all paths with constraints'+ '\x1b[0m')
|
print('\x1b[1;34;40m' + f'Computing all paths with constraints' + '\x1b[0m')
|
||||||
|
try:
|
||||||
pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
|
pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
|
||||||
|
except DisjunctionError as this_e:
|
||||||
|
print(f'{ansi_escapes.red}Disjunction error:{ansi_escapes.reset} {this_e}')
|
||||||
|
exit(1)
|
||||||
|
|
||||||
print('\x1b[1;34;40m'+f'Propagating on selected path'+ '\x1b[0m')
|
print('\x1b[1;34;40m' + f'Propagating on selected path' + '\x1b[0m')
|
||||||
propagatedpths = compute_path_with_disjunction(network, equipment, rqs, pths)
|
propagatedpths, reversed_pths, reversed_propagatedpths = \
|
||||||
|
compute_path_with_disjunction(network, equipment, rqs, pths)
|
||||||
|
# Note that deepcopy used in compute_path_with_disjunction returns
|
||||||
|
# a list of nodes which are not belonging to network (they are copies of the node objects).
|
||||||
|
# so there can not be propagation on these nodes.
|
||||||
|
|
||||||
|
pth_assign_spectrum(pths, rqs, oms_list, reversed_pths)
|
||||||
|
|
||||||
print('\x1b[1;34;40m'+f'Result summary'+ '\x1b[0m')
|
print('\x1b[1;34;40m'+f'Result summary'+ '\x1b[0m')
|
||||||
|
header = ['req id', ' demand', ' snr@bandwidth A-Z (Z-A)', ' snr@0.1nm A-Z (Z-A)',\
|
||||||
header = ['req id', ' demand',' snr@bandwidth',' snr@0.1nm',' Receiver minOSNR', ' mode', ' Gbit/s' , ' nb of tsp pairs']
|
' Receiver minOSNR', ' mode', ' Gbit/s', ' nb of tsp pairs',\
|
||||||
|
'N,M or blocking reason']
|
||||||
data = []
|
data = []
|
||||||
data.append(header)
|
data.append(header)
|
||||||
for i, p in enumerate(propagatedpths):
|
for i, this_p in enumerate(propagatedpths):
|
||||||
if p:
|
rev_pth = reversed_propagatedpths[i]
|
||||||
line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', f'{round(mean(p[-1].snr),2)}',\
|
if rev_pth and this_p:
|
||||||
f'{round(mean(p[-1].snr+lin2db(rqs[i].baud_rate/(12.5e9))),2)}',\
|
psnrb = f'{round(mean(this_p[-1].snr),2)} ({round(mean(rev_pth[-1].snr),2)})'
|
||||||
f'{rqs[i].OSNR}', f'{rqs[i].tsp_mode}' , f'{round(rqs[i].path_bandwidth * 1e-9,2)}' , f'{ceil(rqs[i].path_bandwidth / rqs[i].bit_rate) }']
|
psnr = f'{round(mean(this_p[-1].snr_01nm), 2)}' +\
|
||||||
|
f' ({round(mean(rev_pth[-1].snr_01nm),2)})'
|
||||||
|
elif this_p:
|
||||||
|
psnrb = f'{round(mean(this_p[-1].snr),2)}'
|
||||||
|
psnr = f'{round(mean(this_p[-1].snr_01nm),2)}'
|
||||||
|
|
||||||
|
try :
|
||||||
|
if rqs[i].blocking_reason in BLOCKING_NOPATH:
|
||||||
|
line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} :',\
|
||||||
|
f'-', f'-', f'-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9,2)}',\
|
||||||
|
f'-', f'{rqs[i].blocking_reason}']
|
||||||
else:
|
else:
|
||||||
line = [f'{rqs[i].request_id}',f' {rqs[i].source} to {rqs[i].destination} : not feasible ']
|
line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', psnrb,\
|
||||||
|
psnr, f'-', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9, 2)}',\
|
||||||
|
f'-', f'{rqs[i].blocking_reason}']
|
||||||
|
except AttributeError:
|
||||||
|
line = [f'{rqs[i].request_id}', f' {rqs[i].source} to {rqs[i].destination} : ', psnrb,\
|
||||||
|
psnr, f'{rqs[i].OSNR}', f'{rqs[i].tsp_mode}', f'{round(rqs[i].path_bandwidth * 1e-9,2)}',\
|
||||||
|
f'{ceil(rqs[i].path_bandwidth / rqs[i].bit_rate) }', f'({rqs[i].N},{rqs[i].M})']
|
||||||
data.append(line)
|
data.append(line)
|
||||||
|
|
||||||
col_width = max(len(word) for row in data for word in row[2:]) # padding
|
col_width = max(len(word) for row in data for word in row[2:]) # padding
|
||||||
firstcol_width = max(len(row[0]) for row in data ) # padding
|
firstcol_width = max(len(row[0]) for row in data) # padding
|
||||||
secondcol_width = max(len(row[1]) for row in data ) # padding
|
secondcol_width = max(len(row[1]) for row in data) # padding
|
||||||
for row in data:
|
for row in data:
|
||||||
firstcol = ''.join(row[0].ljust(firstcol_width))
|
firstcol = ''.join(row[0].ljust(firstcol_width))
|
||||||
secondcol = ''.join(row[1].ljust(secondcol_width))
|
secondcol = ''.join(row[1].ljust(secondcol_width))
|
||||||
remainingcols = ''.join(word.center(col_width,' ') for word in row[2:])
|
remainingcols = ''.join(word.center(col_width, ' ') for word in row[2:])
|
||||||
print(f'{firstcol} {secondcol} {remainingcols}')
|
print(f'{firstcol} {secondcol} {remainingcols}')
|
||||||
|
print('\x1b[1;33;40m'+f'Result summary shows mean SNR and OSNR (average over all channels)' +\
|
||||||
|
'\x1b[0m')
|
||||||
|
|
||||||
|
if args.output:
|
||||||
if args.output :
|
|
||||||
result = []
|
result = []
|
||||||
# assumes that list of rqs and list of propgatedpths have same order
|
# assumes that list of rqs and list of propgatedpths have same order
|
||||||
for i,p in enumerate(propagatedpths):
|
for i, pth in enumerate(propagatedpths):
|
||||||
result.append(Result_element(rqs[i],p))
|
result.append(Result_element(rqs[i], pth, reversed_propagatedpths[i]))
|
||||||
temp = path_result_json(result)
|
temp = path_result_json(result)
|
||||||
fnamecsv = f'{str(args.output)[0:len(str(args.output))-len(str(args.output.suffix))]}.csv'
|
fnamecsv = f'{str(args.output)[0:len(str(args.output))-len(str(args.output.suffix))]}.csv'
|
||||||
fnamejson = f'{str(args.output)[0:len(str(args.output))-len(str(args.output.suffix))]}.json'
|
fnamejson = f'{str(args.output)[0:len(str(args.output))-len(str(args.output.suffix))]}.json'
|
||||||
with open(fnamejson, 'w', encoding='utf-8') as f:
|
with open(fnamejson, 'w', encoding='utf-8') as fjson:
|
||||||
f.write(dumps(path_result_json(result), indent=2, ensure_ascii=False))
|
fjson.write(dumps(path_result_json(result), indent=2, ensure_ascii=False))
|
||||||
with open(fnamecsv,"w", encoding='utf-8') as fcsv :
|
with open(fnamecsv, "w", encoding='utf-8') as fcsv:
|
||||||
jsontocsv(temp,equipment,fcsv)
|
jsontocsv(temp, equipment, fcsv)
|
||||||
print('\x1b[1;34;40m'+f'saving in {args.output} and {fnamecsv}'+ '\x1b[0m')
|
print('\x1b[1;34;40m'+f'saving in {args.output} and {fnamecsv}'+ '\x1b[0m')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
ARGS = PARSER.parse_args()
|
||||||
|
basicConfig(level={2: DEBUG, 1: INFO, 0: CRITICAL}.get(ARGS.verbose, DEBUG))
|
||||||
|
main(ARGS)
|
||||||
|
|||||||
@@ -282,6 +282,7 @@ if __name__ == '__main__':
|
|||||||
params['trx_mode'] = ''
|
params['trx_mode'] = ''
|
||||||
params['source'] = source.uid
|
params['source'] = source.uid
|
||||||
params['destination'] = destination.uid
|
params['destination'] = destination.uid
|
||||||
|
params['bidir'] = False
|
||||||
params['nodes_list'] = [destination.uid]
|
params['nodes_list'] = [destination.uid]
|
||||||
params['loose_list'] = ['strict']
|
params['loose_list'] = ['strict']
|
||||||
params['format'] = ''
|
params['format'] = ''
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ from json import dumps
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from difflib import get_close_matches
|
from difflib import get_close_matches
|
||||||
from gnpy.core.utils import silent_remove
|
from gnpy.core.utils import silent_remove
|
||||||
|
from gnpy.core.exceptions import NetworkTopologyError
|
||||||
import time
|
import time
|
||||||
|
|
||||||
all_rows = lambda sh, start=0: (sh.row(x) for x in range(start, sh.nrows))
|
all_rows = lambda sh, start=0: (sh.row(x) for x in range(start, sh.nrows))
|
||||||
@@ -509,9 +510,12 @@ def parse_excel(input_filename):
|
|||||||
all_cities = Counter(n.city for n in nodes)
|
all_cities = Counter(n.city for n in nodes)
|
||||||
if len(all_cities) != len(nodes):
|
if len(all_cities) != len(nodes):
|
||||||
raise ValueError(f'Duplicate city: {all_cities}')
|
raise ValueError(f'Duplicate city: {all_cities}')
|
||||||
if any(ln.from_city not in all_cities or
|
bad_links = []
|
||||||
ln.to_city not in all_cities for ln in links):
|
for lnk in links:
|
||||||
raise ValueError(f'Bad link.')
|
if lnk.from_city not in all_cities or lnk.to_city not in all_cities:
|
||||||
|
bad_links.append([lnk.from_city, lnk.to_city])
|
||||||
|
if bad_links:
|
||||||
|
raise NetworkTopologyError(f'Bad link(s): {bad_links}.')
|
||||||
|
|
||||||
return nodes, links, eqpts
|
return nodes, links, eqpts
|
||||||
|
|
||||||
|
|||||||
@@ -17,3 +17,13 @@ class EquipmentConfigError(ConfigurationError):
|
|||||||
|
|
||||||
class NetworkTopologyError(ConfigurationError):
|
class NetworkTopologyError(ConfigurationError):
|
||||||
'''Topology of user-provided network is wrong'''
|
'''Topology of user-provided network is wrong'''
|
||||||
|
|
||||||
|
class ServiceError(Exception):
|
||||||
|
'''Service of user-provided request is wrong'''
|
||||||
|
|
||||||
|
class DisjunctionError(ServiceError):
|
||||||
|
'''Disjunction of user-provided request can not be satisfied'''
|
||||||
|
|
||||||
|
class SpectrumError(Exception):
|
||||||
|
'''Spectrum errors of the program'''
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -22,6 +22,7 @@ from json import dumps
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from gnpy.core.equipment import load_equipment
|
from gnpy.core.equipment import load_equipment
|
||||||
from gnpy.core.utils import db2lin, lin2db
|
from gnpy.core.utils import db2lin, lin2db
|
||||||
|
from gnpy.core.exceptions import ServiceError
|
||||||
|
|
||||||
SERVICES_COLUMN = 12
|
SERVICES_COLUMN = 12
|
||||||
#EQPT_LIBRARY_FILENAME = Path(__file__).parent / 'eqpt_config.json'
|
#EQPT_LIBRARY_FILENAME = Path(__file__).parent / 'eqpt_config.json'
|
||||||
@@ -43,7 +44,7 @@ class Element:
|
|||||||
return hash((type(self), self.uid))
|
return hash((type(self), self.uid))
|
||||||
|
|
||||||
class Request_element(Element):
|
class Request_element(Element):
|
||||||
def __init__(self,Request,eqpt_filename):
|
def __init__(self, Request, eqpt_filename, bidir):
|
||||||
# request_id is str
|
# request_id is str
|
||||||
# excel has automatic number formatting that adds .0 on integer values
|
# excel has automatic number formatting that adds .0 on integer values
|
||||||
# the next lines recover the pure int value, assuming this .0 is unwanted
|
# the next lines recover the pure int value, assuming this .0 is unwanted
|
||||||
@@ -54,6 +55,7 @@ class Request_element(Element):
|
|||||||
# be a string starting with 'trx' : this is manually added here.
|
# be a string starting with 'trx' : this is manually added here.
|
||||||
self.srctpid = f'trx {Request.source}'
|
self.srctpid = f'trx {Request.source}'
|
||||||
self.dsttpid = f'trx {Request.destination}'
|
self.dsttpid = f'trx {Request.destination}'
|
||||||
|
self.bidir = bidir
|
||||||
# test that trx_type belongs to eqpt_config.json
|
# test that trx_type belongs to eqpt_config.json
|
||||||
# if not replace it with a default
|
# if not replace it with a default
|
||||||
equipment = load_equipment(eqpt_filename)
|
equipment = load_equipment(eqpt_filename)
|
||||||
@@ -76,14 +78,14 @@ class Request_element(Element):
|
|||||||
msg = f'Request Id: {self.request_id} - could not find tsp : \'{Request.trx_type}\' with mode: \'{Request.mode}\' in eqpt library \nComputation stopped.'
|
msg = f'Request Id: {self.request_id} - could not find tsp : \'{Request.trx_type}\' with mode: \'{Request.mode}\' in eqpt library \nComputation stopped.'
|
||||||
#print(msg)
|
#print(msg)
|
||||||
logger.critical(msg)
|
logger.critical(msg)
|
||||||
exit()
|
raise ServiceError(msg)
|
||||||
# excel input are in GHz and dBm
|
# excel input are in GHz and dBm
|
||||||
if Request.spacing is not None:
|
if Request.spacing is not None:
|
||||||
self.spacing = Request.spacing * 1e9
|
self.spacing = Request.spacing * 1e9
|
||||||
else:
|
else:
|
||||||
msg = f'Request {self.request_id} missing spacing: spacing is mandatory.\ncomputation stopped'
|
msg = f'Request {self.request_id} missing spacing: spacing is mandatory.\ncomputation stopped'
|
||||||
logger.critical(msg)
|
logger.critical(msg)
|
||||||
exit()
|
raise ServiceError(msg)
|
||||||
if Request.power is not None:
|
if Request.power is not None:
|
||||||
self.power = db2lin(Request.power) * 1e-3
|
self.power = db2lin(Request.power) * 1e-3
|
||||||
else:
|
else:
|
||||||
@@ -132,13 +134,14 @@ class Request_element(Element):
|
|||||||
uid = property(lambda self: repr(self))
|
uid = property(lambda self: repr(self))
|
||||||
@property
|
@property
|
||||||
def pathrequest(self):
|
def pathrequest(self):
|
||||||
|
# Default assumption for bidir is False
|
||||||
req_dictionnary = {
|
req_dictionnary = {
|
||||||
'request-id':self.request_id,
|
'request-id':self.request_id,
|
||||||
'source': self.source,
|
'source': self.source,
|
||||||
'destination': self.destination,
|
'destination': self.destination,
|
||||||
'src-tp-id': self.srctpid,
|
'src-tp-id': self.srctpid,
|
||||||
'dst-tp-id': self.dsttpid,
|
'dst-tp-id': self.dsttpid,
|
||||||
|
'bidirectional': self.bidir,
|
||||||
'path-constraints':{
|
'path-constraints':{
|
||||||
'te-bandwidth': {
|
'te-bandwidth': {
|
||||||
'technology': 'flexi-grid',
|
'technology': 'flexi-grid',
|
||||||
@@ -187,9 +190,13 @@ class Request_element(Element):
|
|||||||
def json(self):
|
def json(self):
|
||||||
return self.pathrequest , self.pathsync
|
return self.pathrequest , self.pathsync
|
||||||
|
|
||||||
def convert_service_sheet(input_filename, eqpt_filename, output_filename='', filter_region=[]):
|
def convert_service_sheet(input_filename, eqpt_filename, output_filename='', bidir=False, filter_region=None):
|
||||||
|
""" converts a service sheet into a json structure
|
||||||
|
"""
|
||||||
|
if filter_region is None:
|
||||||
|
filter_region = []
|
||||||
service = parse_excel(input_filename)
|
service = parse_excel(input_filename)
|
||||||
req = [Request_element(n,eqpt_filename) for n in service]
|
req = [Request_element(n, eqpt_filename, bidir) for n in service]
|
||||||
# dumps the output into a json file with name
|
# dumps the output into a json file with name
|
||||||
# split_filename = [input_filename[0:len(input_filename)-len(suffix_filename)] , suffix_filename[1:]]
|
# split_filename = [input_filename[0:len(input_filename)-len(suffix_filename)] , suffix_filename[1:]]
|
||||||
if output_filename=='':
|
if output_filename=='':
|
||||||
@@ -233,21 +240,25 @@ def parse_excel(input_filename):
|
|||||||
return services
|
return services
|
||||||
|
|
||||||
def parse_service_sheet(service_sheet):
|
def parse_service_sheet(service_sheet):
|
||||||
|
""" reads each column according to authorized fieldnames. order is not important.
|
||||||
|
"""
|
||||||
logger.info(f'Validating headers on {service_sheet.name!r}')
|
logger.info(f'Validating headers on {service_sheet.name!r}')
|
||||||
# add a test on field to enable the '' field case that arises when columns on the
|
# add a test on field to enable the '' field case that arises when columns on the
|
||||||
# right hand side are used as comments or drawing in the excel sheet
|
# right hand side are used as comments or drawing in the excel sheet
|
||||||
header = [x.value.strip() for x in service_sheet.row(4)[0:SERVICES_COLUMN] if len(x.value.strip())>0]
|
header = [x.value.strip() for x in service_sheet.row(4)[0:SERVICES_COLUMN]
|
||||||
|
if len(x.value.strip()) > 0]
|
||||||
|
|
||||||
# create a service_fieldname independant from the excel column order
|
# create a service_fieldname independant from the excel column order
|
||||||
# to be compatible with any version of the sheet
|
# to be compatible with any version of the sheet
|
||||||
# the following dictionnary records the excel field names and the corresponding parameter's name
|
# the following dictionnary records the excel field names and the corresponding parameter's name
|
||||||
|
|
||||||
authorized_fieldnames = {'route id':'request_id', 'Source':'source', 'Destination':'destination', \
|
authorized_fieldnames = {
|
||||||
|
'route id':'request_id', 'Source':'source', 'Destination':'destination', \
|
||||||
'TRX type':'trx_type', 'Mode' : 'mode', 'System: spacing':'spacing', \
|
'TRX type':'trx_type', 'Mode' : 'mode', 'System: spacing':'spacing', \
|
||||||
'System: input power (dBm)':'power', 'System: nb of channels':'nb_channel',\
|
'System: input power (dBm)':'power', 'System: nb of channels':'nb_channel',\
|
||||||
'routing: disjoint from': 'disjoint_from', 'routing: path':'nodes_list',\
|
'routing: disjoint from': 'disjoint_from', 'routing: path':'nodes_list',\
|
||||||
'routing: is loose?':'is_loose', 'path bandwidth':'path_bandwidth'}
|
'routing: is loose?':'is_loose', 'path bandwidth':'path_bandwidth'}
|
||||||
try :
|
try:
|
||||||
service_fieldnames = [authorized_fieldnames[e] for e in header]
|
service_fieldnames = [authorized_fieldnames[e] for e in header]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
msg = f'Malformed header on Service sheet: {header} field not in {authorized_fieldnames}'
|
msg = f'Malformed header on Service sheet: {header} field not in {authorized_fieldnames}'
|
||||||
|
|||||||
386
gnpy/core/spectrum_assignment.py
Normal file
386
gnpy/core/spectrum_assignment.py
Normal file
@@ -0,0 +1,386 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
gnpy.core.spectrum_assignment
|
||||||
|
=============================
|
||||||
|
|
||||||
|
This module contains the Oms and Bitmap classes and the different method to
|
||||||
|
select and assign spectrum. Spectrum_selection function identifies the free
|
||||||
|
slots and select_candidate selects the candidate spectrum according to
|
||||||
|
strategy: for example first fit
|
||||||
|
oms records its elements, and elements are updated with an oms to have
|
||||||
|
element/oms correspondace
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections import namedtuple
|
||||||
|
from logging import getLogger
|
||||||
|
from math import ceil
|
||||||
|
from gnpy.core.elements import Roadm, Transceiver
|
||||||
|
from gnpy.core.exceptions import SpectrumError
|
||||||
|
|
||||||
|
LOGGER = getLogger(__name__)
|
||||||
|
|
||||||
|
class Bitmap:
|
||||||
|
""" records the spectrum occupation
|
||||||
|
"""
|
||||||
|
def __init__(self, f_min, f_max, grid, guardband=0.15e12, bitmap=None):
|
||||||
|
# n is the min index including guardband. Guardband is require to be sure
|
||||||
|
# that a channel can be assigned with center frequency fmin (means that its
|
||||||
|
# slot occupation goes below freq_index_min
|
||||||
|
n_min = frequency_to_n(f_min-guardband, grid)
|
||||||
|
n_max = frequency_to_n(f_max+guardband, grid) - 1
|
||||||
|
self.n_min = n_min
|
||||||
|
self.n_max = n_max
|
||||||
|
self.freq_index_min = frequency_to_n(f_min)
|
||||||
|
self.freq_index_max = frequency_to_n(f_max)
|
||||||
|
self.freq_index = list(range(n_min, n_max+1))
|
||||||
|
if bitmap is None:
|
||||||
|
self.bitmap = [1] * (n_max-n_min+1)
|
||||||
|
elif len(bitmap) == len(self.freq_index):
|
||||||
|
self.bitmap = bitmap
|
||||||
|
else:
|
||||||
|
raise SpectrumError(f'bitmap is not consistant with f_min{f_min} - n: {n_min} and f_max{f_max}- n :{n_max}')
|
||||||
|
|
||||||
|
def getn(self, i):
|
||||||
|
""" converts the n (itu grid) into a local index
|
||||||
|
"""
|
||||||
|
return self.freq_index[i]
|
||||||
|
def geti(self, nvalue):
|
||||||
|
""" converts the local index into n (itu grid)
|
||||||
|
"""
|
||||||
|
return self.freq_index.index(nvalue)
|
||||||
|
def insert_left(self, newbitmap):
|
||||||
|
""" insert bitmap on the left to align oms bitmaps if their start frequencies are different
|
||||||
|
"""
|
||||||
|
self.bitmap = newbitmap + self.bitmap
|
||||||
|
temp = list(range(self.n_min-len(newbitmap), self.n_min))
|
||||||
|
self.freq_index = temp + self.freq_index
|
||||||
|
self.n_min = self.freq_index[0]
|
||||||
|
def insert_right(self, newbitmap):
|
||||||
|
""" insert bitmap on the right to align oms bitmaps if their stop frequencies are different
|
||||||
|
"""
|
||||||
|
self.bitmap = self.bitmap + newbitmap
|
||||||
|
self.freq_index = self.freq_index + list(range(self.n_max, self.n_max+len(newbitmap)))
|
||||||
|
self.n_max = self.freq_index[-1]
|
||||||
|
|
||||||
|
# +'grid available_slots f_min f_max services_list')
|
||||||
|
OMSParams = namedtuple('OMSParams', 'oms_id el_id_list el_list')
|
||||||
|
|
||||||
|
class OMS:
|
||||||
|
""" OMS class is the logical container that represent a link between two adjacent ROADMs and
|
||||||
|
records the crossed elements and the occupied spectrum
|
||||||
|
"""
|
||||||
|
def __init__(self, *args, **params):
|
||||||
|
params = OMSParams(**params)
|
||||||
|
self.oms_id = params.oms_id
|
||||||
|
self.el_id_list = params.el_id_list
|
||||||
|
self.el_list = params.el_list
|
||||||
|
self.spectrum_bitmap = []
|
||||||
|
self.nb_channels = 0
|
||||||
|
self.service_list = []
|
||||||
|
# TODO
|
||||||
|
def __str__(self):
|
||||||
|
return '\n\t'.join([f'{type(self).__name__} {self.oms_id}',
|
||||||
|
f'{self.el_id_list[0]} - {self.el_id_list[-1]}'])
|
||||||
|
def __repr__(self):
|
||||||
|
return '\n\t'.join([f'{type(self).__name__} {self.oms_id}',
|
||||||
|
f'{self.el_id_list[0]} - {self.el_id_list[-1]}', '\n'])
|
||||||
|
|
||||||
|
def add_element(self, elem):
|
||||||
|
""" records oms elements
|
||||||
|
"""
|
||||||
|
self.el_id_list.append(elem.uid)
|
||||||
|
self.el_list.append(elem)
|
||||||
|
|
||||||
|
def update_spectrum(self, f_min, f_max, guardband=0.15e12, existing_spectrum=None,
|
||||||
|
grid=0.00625e12):
|
||||||
|
""" frequencies expressed in Hz
|
||||||
|
"""
|
||||||
|
if existing_spectrum is None:
|
||||||
|
# add some 150 GHz margin to enable a center channel on f_min
|
||||||
|
# use ITU-T G694.1
|
||||||
|
# Flexible DWDM grid definition
|
||||||
|
# For the flexible DWDM grid, the allowed frequency slots have a nominal
|
||||||
|
# central frequency (in THz) defined by:
|
||||||
|
# 193.1 + n × 0.00625 where n is a positive or negative integer including 0
|
||||||
|
# and 0.00625 is the nominal central frequency granularity in THz
|
||||||
|
# and a slot width defined by:
|
||||||
|
# 12.5 × m where m is a positive integer and 12.5 is the slot width granularity in
|
||||||
|
# GHz.
|
||||||
|
# Any combination of frequency slots is allowed as long as no two frequency
|
||||||
|
# slots overlap.
|
||||||
|
|
||||||
|
# TODO : add explaination on that / parametrize ....
|
||||||
|
self.spectrum_bitmap = Bitmap(f_min, f_max, grid, guardband)
|
||||||
|
# print(len(self.spectrum_bitmap.bitmap))
|
||||||
|
|
||||||
|
def assign_spectrum(self, nvalue, mvalue):
|
||||||
|
""" change oms spectrum to mark spectrum assigned
|
||||||
|
"""
|
||||||
|
if (nvalue is None or mvalue is None or isinstance(nvalue, float)
|
||||||
|
or isinstance(mvalue, float) or mvalue == 0):
|
||||||
|
raise SpectrumError('could not assign None values')
|
||||||
|
startn, stopn = mvalue_to_slots(nvalue, mvalue)
|
||||||
|
# print(f'startn stop n {startn} , {stopn}')
|
||||||
|
# assumes that guardbands are sufficient to ensure that assigning a center channel
|
||||||
|
# at fmin or fmax is OK is startn > self.spectrum_bitmap.n_min
|
||||||
|
if (nvalue <= self.spectrum_bitmap.freq_index_max and
|
||||||
|
nvalue >= self.spectrum_bitmap.freq_index_min and
|
||||||
|
stopn <= self.spectrum_bitmap.n_max and
|
||||||
|
startn > self.spectrum_bitmap.n_min):
|
||||||
|
# verification that both length are identical
|
||||||
|
self.spectrum_bitmap.bitmap[self.spectrum_bitmap.geti(startn):self.spectrum_bitmap.geti(stopn)+1] = [0] * (stopn-startn+1)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
msg = f'Could not assign n {nvalue}, m {mvalue} values:' +\
|
||||||
|
f' one or several slots are not available'
|
||||||
|
LOGGER.info(msg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def add_service(self, service_id, nb_wl):
|
||||||
|
""" record service and mark spectrum as occupied
|
||||||
|
"""
|
||||||
|
self.service_list.append(service_id)
|
||||||
|
self.nb_channels += nb_wl
|
||||||
|
|
||||||
|
def frequency_to_n(freq, grid=0.00625e12):
|
||||||
|
""" converts frequency into the n value (ITU grid)
|
||||||
|
"""
|
||||||
|
return (int)((freq-193.1e12)/grid)
|
||||||
|
|
||||||
|
def nvalue_to_frequency(nvalue, grid=0.00625e12):
|
||||||
|
""" converts n value into a frequency
|
||||||
|
"""
|
||||||
|
return 193.1e12 + nvalue * grid
|
||||||
|
|
||||||
|
def mvalue_to_slots(nvalue, mvalue):
|
||||||
|
""" convert center n an m into start and stop n
|
||||||
|
"""
|
||||||
|
startn = nvalue - mvalue
|
||||||
|
stopn = nvalue + mvalue -1
|
||||||
|
return startn, stopn
|
||||||
|
|
||||||
|
def slots_to_m(startn, stopn):
|
||||||
|
""" converts the start and stop n values to the center n and m value
|
||||||
|
"""
|
||||||
|
nvalue = (int)((startn+stopn+1)/2)
|
||||||
|
mvalue = (int)((stopn-startn+1)/2)
|
||||||
|
return nvalue, mvalue
|
||||||
|
|
||||||
|
def m_to_freq(nvalue, mvalue, grid=0.00625e12):
|
||||||
|
""" converts m into frequency range
|
||||||
|
"""
|
||||||
|
startn, stopn = mvalue_to_slots(nvalue, mvalue)
|
||||||
|
fstart = nvalue_to_frequency(startn, grid)
|
||||||
|
fstop = nvalue_to_frequency(stopn+1, grid)
|
||||||
|
return fstart, fstop
|
||||||
|
|
||||||
|
def align_grids(oms_list):
|
||||||
|
""" used to apply same grid to all oms : same starting n, stop n and slot size
|
||||||
|
out of grid slots are set to 0
|
||||||
|
"""
|
||||||
|
n_min = min([o.spectrum_bitmap.n_min for o in oms_list])
|
||||||
|
n_max = max([o.spectrum_bitmap.n_max for o in oms_list])
|
||||||
|
for this_o in oms_list:
|
||||||
|
if (this_o.spectrum_bitmap.n_min - n_min) > 0:
|
||||||
|
this_o.spectrum_bitmap.insert_left([0] * (this_o.spectrum_bitmap.n_min - n_min))
|
||||||
|
if (n_max - this_o.spectrum_bitmap.n_max) > 0:
|
||||||
|
this_o.spectrum_bitmap.insert_right([0] * (n_max - this_o.spectrum_bitmap.n_max))
|
||||||
|
return oms_list
|
||||||
|
|
||||||
|
def build_oms_list(network, equipment):
|
||||||
|
""" initialization of OMS list in the network
|
||||||
|
an oms is build reading all intermediate nodes between two adjacent ROADMs
|
||||||
|
each element within the list is being added an oms and oms_id to record the
|
||||||
|
oms it belongs to.
|
||||||
|
the function supports different spectrum width and supposes that the whole network
|
||||||
|
works with the min range among OMSs
|
||||||
|
"""
|
||||||
|
oms_id = 0
|
||||||
|
oms_list = []
|
||||||
|
for node in [n for n in network.nodes() if isinstance(n, Roadm)]:
|
||||||
|
for edge in network.edges([node]):
|
||||||
|
if not isinstance(edge[1], Transceiver):
|
||||||
|
nd_in = edge[0] # nd_in is a Roadm
|
||||||
|
try:
|
||||||
|
nd_in.oms_list.append(oms_id)
|
||||||
|
except AttributeError:
|
||||||
|
nd_in.oms_list = []
|
||||||
|
nd_in.oms_list.append(oms_id)
|
||||||
|
nd_out = edge[1]
|
||||||
|
|
||||||
|
params = {}
|
||||||
|
params['oms_id'] = oms_id
|
||||||
|
params['el_id_list'] = []
|
||||||
|
params['el_list'] = []
|
||||||
|
oms = OMS(**params)
|
||||||
|
oms.add_element(nd_in)
|
||||||
|
while not isinstance(nd_out, Roadm):
|
||||||
|
oms.add_element(nd_out)
|
||||||
|
# add an oms_id in the element
|
||||||
|
nd_out.oms_id = oms_id
|
||||||
|
nd_out.oms = oms
|
||||||
|
n_temp = nd_out
|
||||||
|
nd_out = next(n[1] for n in network.edges([n_temp]) if n[1].uid != nd_in.uid)
|
||||||
|
nd_in = n_temp
|
||||||
|
|
||||||
|
oms.add_element(nd_out)
|
||||||
|
# nd_out is a Roadm
|
||||||
|
try:
|
||||||
|
nd_out.oms_list.append(oms_id)
|
||||||
|
except AttributeError:
|
||||||
|
nd_out.oms_list = []
|
||||||
|
nd_out.oms_list.append(oms_id)
|
||||||
|
|
||||||
|
oms.update_spectrum(equipment['SI']['default'].f_min,
|
||||||
|
equipment['SI']['default'].f_max, grid=0.00625e12)
|
||||||
|
# oms.assign_spectrum(13,7) gives back (193137500000000.0, 193225000000000.0)
|
||||||
|
# as in the example in the standard
|
||||||
|
# oms.assign_spectrum(13,7)
|
||||||
|
|
||||||
|
oms_list.append(oms)
|
||||||
|
oms_id += 1
|
||||||
|
oms_list = align_grids(oms_list)
|
||||||
|
reversed_oms(oms_list)
|
||||||
|
return oms_list
|
||||||
|
|
||||||
|
def reversed_oms(oms_list):
|
||||||
|
""" identifies reversed OMS
|
||||||
|
only applicable for non parallel OMS
|
||||||
|
"""
|
||||||
|
for oms in oms_list:
|
||||||
|
has_reversed = False
|
||||||
|
for this_o in oms_list:
|
||||||
|
if (oms.el_id_list[0] == this_o.el_id_list[-1] and
|
||||||
|
oms.el_id_list[-1] == this_o.el_id_list[0]):
|
||||||
|
oms.reversed_oms = this_o
|
||||||
|
has_reversed = True
|
||||||
|
break
|
||||||
|
if not has_reversed:
|
||||||
|
oms.reversed_oms = None
|
||||||
|
|
||||||
|
|
||||||
|
def bitmap_sum(band1, band2):
|
||||||
|
""" a functions that marks occupied bitmap by 0 if the slot is occupied in band1 or in band2
|
||||||
|
"""
|
||||||
|
res = []
|
||||||
|
for i, elem in enumerate(band1):
|
||||||
|
if band2[i] * elem == 0:
|
||||||
|
res.append(0)
|
||||||
|
else:
|
||||||
|
res.append(1)
|
||||||
|
return res
|
||||||
|
|
||||||
|
def spectrum_selection(pth, oms_list, requested_m, requested_n=None):
|
||||||
|
""" collects spectrum availability and call the select_candidate function
|
||||||
|
# step 1 collects pth spectrum availability
|
||||||
|
# step 2 if n is not None try to assign the spectrum
|
||||||
|
# if the spectrum is not available then sends back an "error"
|
||||||
|
# if n is None selects candidate spectrum
|
||||||
|
# select spectrum that fits the policy ( first fit, random, ABP...)
|
||||||
|
# step3 returns the selection
|
||||||
|
"""
|
||||||
|
|
||||||
|
# use indexes instead of ITU-T n values
|
||||||
|
path_oms = []
|
||||||
|
for elem in pth:
|
||||||
|
if not isinstance(elem, Roadm) and not isinstance(elem, Transceiver):
|
||||||
|
# only edfa, fused and fibers have oms_id attribute
|
||||||
|
path_oms.append(elem.oms_id)
|
||||||
|
# remove duplicate oms_id, order is not important
|
||||||
|
path_oms = list(set(path_oms))
|
||||||
|
# assuming all oms have same freq index
|
||||||
|
if not path_oms:
|
||||||
|
candidate = (None, None, None)
|
||||||
|
return candidate, path_oms
|
||||||
|
freq_index = oms_list[path_oms[0]].spectrum_bitmap.freq_index
|
||||||
|
freq_index_min = oms_list[path_oms[0]].spectrum_bitmap.freq_index_min
|
||||||
|
freq_index_max = oms_list[path_oms[0]].spectrum_bitmap.freq_index_max
|
||||||
|
|
||||||
|
freq_availability = oms_list[path_oms[0]].spectrum_bitmap.bitmap
|
||||||
|
for oms in path_oms[1:]:
|
||||||
|
freq_availability = bitmap_sum(oms_list[oms].spectrum_bitmap.bitmap, freq_availability)
|
||||||
|
if requested_n is None:
|
||||||
|
# avoid slots reserved on the edge 0.15e-12 on both sides -> 24
|
||||||
|
candidates = [(freq_index[i]+requested_m, freq_index[i], freq_index[i]+2*requested_m-1)
|
||||||
|
for i in range(len(freq_availability))
|
||||||
|
if freq_availability[i:i+2*requested_m] == [1] * (2*requested_m)
|
||||||
|
and freq_index[i] >= freq_index_min
|
||||||
|
and freq_index[i+2*requested_m-1] <= freq_index_max]
|
||||||
|
|
||||||
|
candidate = select_candidate(candidates, policy='first_fit')
|
||||||
|
else:
|
||||||
|
i = oms_list[path_oms[0]].spectrum_bitmap.geti(requested_n)
|
||||||
|
# print(f'N {requested_n} i {i}')
|
||||||
|
# print(freq_availability[i-m:i+m] )
|
||||||
|
# print(freq_index[i-m:i+m])
|
||||||
|
if (freq_availability[i-requested_m:i+requested_m] == [1] * (2*requested_m) and
|
||||||
|
freq_index[i-requested_m] >= freq_index_min
|
||||||
|
and freq_index[i+requested_m-1] <= freq_index_max):
|
||||||
|
# candidate is the triplet center_n, startn and stopn
|
||||||
|
candidate = (requested_n, requested_n-requested_m, requested_n+requested_m-1)
|
||||||
|
else:
|
||||||
|
candidate = (None, None, None)
|
||||||
|
# print("coucou11")
|
||||||
|
# print(candidate)
|
||||||
|
# print(freq_availability[321:321+2*m])
|
||||||
|
# a = [i+321 for i in range(2*m)]
|
||||||
|
# print(a)
|
||||||
|
# print(candidate)
|
||||||
|
return candidate, path_oms
|
||||||
|
|
||||||
|
def select_candidate(candidates, policy):
|
||||||
|
""" selects a candidate among all available spectrum
|
||||||
|
"""
|
||||||
|
if policy == 'first_fit':
|
||||||
|
if candidates:
|
||||||
|
return candidates[0]
|
||||||
|
else:
|
||||||
|
return (None, None, None)
|
||||||
|
else:
|
||||||
|
raise ServiceError('Only first_fit spectrum assignment policy is implemented.')
|
||||||
|
|
||||||
|
def pth_assign_spectrum(pths, rqs, oms_list, rpths):
|
||||||
|
""" basic first fit assignment
|
||||||
|
if reversed path are provided, means that occupation is bidir
|
||||||
|
"""
|
||||||
|
for i, pth in enumerate(pths):
|
||||||
|
# computes the number of channels required
|
||||||
|
try:
|
||||||
|
if rqs[i].blocking_reason:
|
||||||
|
rqs[i].blocked = True
|
||||||
|
rqs[i].N = 0
|
||||||
|
rqs[i].M = 0
|
||||||
|
except AttributeError:
|
||||||
|
nb_wl = ceil(rqs[i].path_bandwidth / rqs[i].bit_rate)
|
||||||
|
# computes the total nb of slots according to requested spacing
|
||||||
|
# TODO : express superchannels
|
||||||
|
# assumes that all channels must be grouped
|
||||||
|
# TODO : enables non contiguous reservation in case of blocking
|
||||||
|
requested_m = ceil(rqs[i].spacing / 0.0125e12) * nb_wl
|
||||||
|
# concatenate all path and reversed path elements to derive slots availability
|
||||||
|
(center_n, startn, stopn), path_oms = spectrum_selection(pth + rpths[i], oms_list, requested_m,
|
||||||
|
requested_n=None)
|
||||||
|
# checks that requested_m is fitting startm and stopm
|
||||||
|
# if not None, center_n and start, stop frequencies are applicable to all oms of pth
|
||||||
|
# checks that spectrum is not None else indicate blocking reason
|
||||||
|
if center_n is not None:
|
||||||
|
# checks that requested_m is fitting startm and stopm
|
||||||
|
if 2 * requested_m > (stopn - startn + 1):
|
||||||
|
msg = f'candidate: {(center_n, startn, stopn)} is not consistant ' +\
|
||||||
|
f'with {requested_m}'
|
||||||
|
LOGGER.critical(msg)
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
for oms_elem in path_oms:
|
||||||
|
oms_list[oms_elem].assign_spectrum(center_n, requested_m)
|
||||||
|
oms_list[oms_elem].add_service(rqs[i].request_id, nb_wl)
|
||||||
|
rqs[i].blocked = False
|
||||||
|
rqs[i].N = center_n
|
||||||
|
rqs[i].M = requested_m
|
||||||
|
else:
|
||||||
|
rqs[i].blocked = True
|
||||||
|
rqs[i].N = 0
|
||||||
|
rqs[i].M = 0
|
||||||
|
rqs[i].blocking_reason = 'NO_SPECTRUM'
|
||||||
@@ -77,6 +77,9 @@ def compare_networks(expected, actual):
|
|||||||
def compare_services(expected, actual):
|
def compare_services(expected, actual):
|
||||||
requests = compare(expected['path-request'], actual['path-request'],
|
requests = compare(expected['path-request'], actual['path-request'],
|
||||||
key=lambda el: el['request-id'])
|
key=lambda el: el['request-id'])
|
||||||
|
synchronizations = compare(expected['path-request'], expected['path-request'],
|
||||||
|
key=lambda el: el['request-id'])
|
||||||
|
if 'synchronization' in expected.keys():
|
||||||
synchronizations = compare(expected['synchronization'], actual['synchronization'],
|
synchronizations = compare(expected['synchronization'], actual['synchronization'],
|
||||||
key=lambda el: el['synchronization-id'])
|
key=lambda el: el['synchronization-id'])
|
||||||
return ServicesResults(requests, synchronizations)
|
return ServicesResults(requests, synchronizations)
|
||||||
|
|||||||
97
tests/data/expected_results_science_utils.csv
Normal file
97
tests/data/expected_results_science_utils.csv
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
,signal,ase,nli
|
||||||
|
0,0.0002869472910750076,3.829243751386179e-08,2.157043502374111e-07
|
||||||
|
1,0.000284426444181902,3.8108068606265256e-08,2.1799950841472648e-07
|
||||||
|
2,0.0002819286625240274,3.7925434667811625e-08,2.2023841125044652e-07
|
||||||
|
3,0.0002794537215642205,3.774451238936698e-08,2.224218994135113e-07
|
||||||
|
4,0.0002756243295734432,3.739256063612741e-08,2.2343448272114653e-07
|
||||||
|
5,0.0002718482755003954,3.7044477620123535e-08,2.2437826192962217e-07
|
||||||
|
6,0.0002681247979313455,3.6700201831013766e-08,2.2525495466695055e-07
|
||||||
|
7,0.0002644507001383656,3.635953568122817e-08,2.2606415187870565e-07
|
||||||
|
8,0.0002608253488031495,3.602242321653821e-08,2.268074852150968e-07
|
||||||
|
9,0.00025690468888571607,3.564391587795796e-08,2.2718285844824803e-07
|
||||||
|
10,0.0002530414048173237,3.5269661038482016e-08,2.2749429758476786e-07
|
||||||
|
11,0.0002492279873568786,3.4899736994459975e-08,2.277374766526846e-07
|
||||||
|
12,0.0002454639458992114,3.4534068616323406e-08,2.2791414400784552e-07
|
||||||
|
13,0.00024174879168999762,3.417258192135115e-08,2.280260208417629e-07
|
||||||
|
14,0.00023798746912556782,3.3802278288721e-08,2.2798420759779948e-07
|
||||||
|
15,0.00023427697848575827,3.3436265380528345e-08,2.2788101592690985e-07
|
||||||
|
16,0.00023061678363205047,3.30744682841412e-08,2.2771816297652923e-07
|
||||||
|
17,0.00022700656967542085,3.271682680678683e-08,2.2749755602884014e-07
|
||||||
|
18,0.0002234457948096593,3.236326805537296e-08,2.236182244259085e-07
|
||||||
|
19,0.0002195336193536736,3.195819496314336e-08,2.193976173454328e-07
|
||||||
|
20,0.00021568313139087874,3.155821230359698e-08,2.1524945887103656e-07
|
||||||
|
21,0.00021189361260563733,3.116322489050993e-08,2.1117277567390236e-07
|
||||||
|
22,0.00020816423698459606,3.0773141693336075e-08,2.0716649124094935e-07
|
||||||
|
23,0.0002044941867087381,3.038787321635763e-08,2.032295417993187e-07
|
||||||
|
24,0.00020116081520673765,3.00440338127331e-08,1.9963693210324778e-07
|
||||||
|
25,0.00019787569461895006,2.9704199888387147e-08,1.9610141536963145e-07
|
||||||
|
26,0.00019463824873065924,2.9368302916351224e-08,1.9262221997372471e-07
|
||||||
|
27,0.0001914486066928752,2.903632427420397e-08,1.8919927457565086e-07
|
||||||
|
28,0.00018830616497930887,2.870819640079397e-08,1.858317840670677e-07
|
||||||
|
29,0.00018521032563368435,2.838385281897912e-08,1.8251896218718178e-07
|
||||||
|
30,0.00018216049720979434,2.8063228018898468e-08,1.7926003240909075e-07
|
||||||
|
31,0.0001791561867005718,2.7746255438682553e-08,1.76054318231933e-07
|
||||||
|
32,0.00017619680881744213,2.7432871709278503e-08,1.7290105534292413e-07
|
||||||
|
33,0.00017328178390236163,2.7123014438128492e-08,1.6979948820364567e-07
|
||||||
|
34,0.00017049664136784971,2.6828118382010868e-08,1.668331233176527e-07
|
||||||
|
35,0.0001677518922618999,2.6536524600591003e-08,1.639139770351797e-07
|
||||||
|
36,0.00016504703499520338,2.6248178236430935e-08,1.6104139135571758e-07
|
||||||
|
37,0.0001623826677977635,2.596311344676757e-08,1.579538179464147e-07
|
||||||
|
38,0.0001597582427278653,2.5681275450827438e-08,1.549209871570718e-07
|
||||||
|
39,0.0001571732182028194,2.5402610321183817e-08,1.5194201541886346e-07
|
||||||
|
40,0.00015462705891566638,2.512706495768609e-08,1.490160317195833e-07
|
||||||
|
41,0.00015212101646392648,2.4854546722771583e-08,1.4614388817377845e-07
|
||||||
|
42,0.00014965447757986727,2.4585006051161647e-08,1.4332463586636234e-07
|
||||||
|
43,0.00014722683809507942,2.4318394065447274e-08,1.4055734193947907e-07
|
||||||
|
44,0.0001447164668892396,2.4034548127308286e-08,1.3772590008270512e-07
|
||||||
|
45,0.00014224784112375704,2.3753926686114635e-08,1.3494914625939818e-07
|
||||||
|
46,0.00013982028367499942,2.3476475779461364e-08,1.3222606385780792e-07
|
||||||
|
47,0.00013743418748445304,2.3202244204140228e-08,1.2955665313419502e-07
|
||||||
|
48,0.00013508884015386575,2.2931178307200807e-08,1.269398709602497e-07
|
||||||
|
49,0.00013278354172499636,2.2663225269637508e-08,1.243746944213211e-07
|
||||||
|
50,0.0001305176041972383,2.2398333101097452e-08,1.2186012017916144e-07
|
||||||
|
51,0.00012829168984639723,2.2136419884279648e-08,1.1939640981690787e-07
|
||||||
|
52,0.00012610506317956035,2.1877436733290284e-08,1.169825203056231e-07
|
||||||
|
53,0.000123957002859191,2.1621335420785434e-08,1.1461743054419468e-07
|
||||||
|
54,0.00012180241033649304,2.1360152817604167e-08,1.1225922783038433e-07
|
||||||
|
55,0.00011968650905779935,2.1101906890578305e-08,1.0994951537259513e-07
|
||||||
|
56,0.000117608577762061,2.0846548870078847e-08,1.0757395097864581e-07
|
||||||
|
57,0.00011556891128259058,2.0594151467353748e-08,1.0524972555992308e-07
|
||||||
|
58,0.00011356676177301841,2.0344667169015006e-08,1.0297570549831857e-07
|
||||||
|
59,0.00011160139690545192,2.00980493433389e-08,1.0075078305548045e-07
|
||||||
|
60,0.00010967209909252646,1.985425227516509e-08,9.857387536569511e-08
|
||||||
|
61,0.00010777915187087522,1.9613208260272527e-08,9.644480679616336e-08
|
||||||
|
62,0.00010592181397175155,1.937487453011716e-08,9.436248424611683e-08
|
||||||
|
63,0.00010409936038610526,1.913920913597429e-08,9.23258408012148e-08
|
||||||
|
64,0.00010246447558375888,1.8936226281729442e-08,9.046927135291653e-08
|
||||||
|
65,0.00010085803630104006,1.87354387522902e-08,8.865067925960373e-08
|
||||||
|
66,9.927950010553608e-05,1.853681852284204e-08,8.686925127146881e-08
|
||||||
|
67,9.772837346090978e-05,1.834034443508121e-08,8.512422533827548e-08
|
||||||
|
68,9.620413430112097e-05,1.8145990199784238e-08,8.341482250639003e-08
|
||||||
|
69,9.470627135913274e-05,1.795373041706864e-08,8.174028142913882e-08
|
||||||
|
70,9.323428359797426e-05,1.776354066998682e-08,8.009985766376296e-08
|
||||||
|
71,9.178813743816942e-05,1.7575386852678668e-08,7.849321446941785e-08
|
||||||
|
72,9.03673300948529e-05,1.7389247191220127e-08,7.691961625609547e-08
|
||||||
|
73,8.897136946427622e-05,1.7205101122769978e-08,7.537834446342857e-08
|
||||||
|
74,8.760740745800998e-05,1.7025337039390582e-08,7.387513417420477e-08
|
||||||
|
75,8.626710469266086e-05,1.684760610568072e-08,7.274492099363918e-08
|
||||||
|
76,8.495000573672162e-05,1.6671894857242002e-08,7.163427447510873e-08
|
||||||
|
77,8.365569697520994e-05,1.649819993412593e-08,7.054284583689279e-08
|
||||||
|
78,8.238374036674246e-05,1.6326513144182658e-08,6.947026569965565e-08
|
||||||
|
79,8.113370706498376e-05,1.6156829499842502e-08,6.841617243780552e-08
|
||||||
|
80,7.990517700269747e-05,1.5989147949913657e-08,6.738021182874466e-08
|
||||||
|
81,7.86978423091888e-05,1.5823469853370494e-08,6.636212425984957e-08
|
||||||
|
82,7.751129541079691e-05,1.5659805288834794e-08,6.536156604375694e-08
|
||||||
|
83,7.634513730458643e-05,1.549817228640182e-08,6.4378200720386e-08
|
||||||
|
84,7.530262080974352e-05,1.5364274253504764e-08,6.349909645089537e-08
|
||||||
|
85,7.427675504203847e-05,1.523236211656126e-08,6.263403294276386e-08
|
||||||
|
86,7.326723873728748e-05,1.5102509684796054e-08,6.17827561543225e-08
|
||||||
|
87,7.227232864621635e-05,1.497407531211962e-08,6.094379608688325e-08
|
||||||
|
88,7.129179755315639e-05,1.4847053209180731e-08,6.011696114034632e-08
|
||||||
|
89,7.032542203609286e-05,1.4721438007057792e-08,5.930206291361871e-08
|
||||||
|
90,6.937298231674387e-05,1.4597224779058979e-08,5.8498916078193026e-08
|
||||||
|
91,6.843339696762452e-05,1.4474430063551042e-08,5.7706608718023995e-08
|
||||||
|
92,6.750649045006184e-05,1.435304906112738e-08,5.692499280974924e-08
|
||||||
|
93,6.659208967850971e-05,1.4233077472549144e-08,5.615392239861094e-08
|
||||||
|
94,6.554258932109723e-05,1.4075047005202515e-08,5.5268928972034715e-08
|
||||||
|
95,6.450957734109015e-05,1.3918652473373596e-08,5.439783940505763e-08
|
||||||
|
223
tests/data/raman_fiber_config.json
Normal file
223
tests/data/raman_fiber_config.json
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
{
|
||||||
|
"uid": "Span1",
|
||||||
|
"params": {
|
||||||
|
"length": 80,
|
||||||
|
"loss_coef": 0.2,
|
||||||
|
"length_units": "km",
|
||||||
|
"att_in": 0,
|
||||||
|
"con_in": 0.5,
|
||||||
|
"con_out": 0.5,
|
||||||
|
"type_variety": "SSMF",
|
||||||
|
"dispersion": 0.0000167,
|
||||||
|
"gamma": 0.00127,
|
||||||
|
"raman_efficiency": {
|
||||||
|
"cr": [
|
||||||
|
0,
|
||||||
|
0.0000094,
|
||||||
|
0.0000292,
|
||||||
|
0.0000488,
|
||||||
|
0.0000682,
|
||||||
|
0.0000831,
|
||||||
|
0.000094,
|
||||||
|
0.0001014,
|
||||||
|
0.0001069,
|
||||||
|
0.0001119,
|
||||||
|
0.0001217,
|
||||||
|
0.0001268,
|
||||||
|
0.0001365,
|
||||||
|
0.000149,
|
||||||
|
0.000165,
|
||||||
|
0.000181,
|
||||||
|
0.0001977,
|
||||||
|
0.0002192,
|
||||||
|
0.0002469,
|
||||||
|
0.0002749,
|
||||||
|
0.0002999,
|
||||||
|
0.0003206,
|
||||||
|
0.0003405,
|
||||||
|
0.0003592,
|
||||||
|
0.000374,
|
||||||
|
0.0003826,
|
||||||
|
0.0003841,
|
||||||
|
0.0003826,
|
||||||
|
0.0003802,
|
||||||
|
0.0003756,
|
||||||
|
0.0003549,
|
||||||
|
0.0003795,
|
||||||
|
0.000344,
|
||||||
|
0.0002933,
|
||||||
|
0.0002024,
|
||||||
|
0.0001158,
|
||||||
|
0.0000846,
|
||||||
|
0.0000714,
|
||||||
|
0.0000686,
|
||||||
|
0.000085,
|
||||||
|
0.0000893,
|
||||||
|
0.0000901,
|
||||||
|
0.0000815,
|
||||||
|
0.0000667,
|
||||||
|
0.0000437,
|
||||||
|
0.0000328,
|
||||||
|
0.0000296,
|
||||||
|
0.0000265,
|
||||||
|
0.0000257,
|
||||||
|
0.0000281,
|
||||||
|
0.0000308,
|
||||||
|
0.0000367,
|
||||||
|
0.0000585,
|
||||||
|
0.0000663,
|
||||||
|
0.0000636,
|
||||||
|
0.000055,
|
||||||
|
0.0000406,
|
||||||
|
0.0000277,
|
||||||
|
0.0000242,
|
||||||
|
0.0000187,
|
||||||
|
0.000016,
|
||||||
|
0.000014,
|
||||||
|
0.0000113,
|
||||||
|
0.0000105,
|
||||||
|
0.0000098,
|
||||||
|
0.0000098,
|
||||||
|
0.0000113,
|
||||||
|
0.0000164,
|
||||||
|
0.0000195,
|
||||||
|
0.0000238,
|
||||||
|
0.0000226,
|
||||||
|
0.0000203,
|
||||||
|
0.0000148,
|
||||||
|
0.0000109,
|
||||||
|
0.0000098,
|
||||||
|
0.0000105,
|
||||||
|
0.0000117,
|
||||||
|
0.0000125,
|
||||||
|
0.0000121,
|
||||||
|
0.0000109,
|
||||||
|
0.0000098,
|
||||||
|
0.0000082,
|
||||||
|
0.0000066,
|
||||||
|
0.0000047,
|
||||||
|
0.0000027,
|
||||||
|
0.0000019,
|
||||||
|
0.0000012,
|
||||||
|
4e-7,
|
||||||
|
2e-7,
|
||||||
|
1e-7
|
||||||
|
],
|
||||||
|
"frequency_offset": [
|
||||||
|
0,
|
||||||
|
500000000000,
|
||||||
|
1000000000000,
|
||||||
|
1500000000000,
|
||||||
|
2000000000000,
|
||||||
|
2500000000000,
|
||||||
|
3000000000000,
|
||||||
|
3500000000000,
|
||||||
|
4000000000000,
|
||||||
|
4500000000000,
|
||||||
|
5000000000000,
|
||||||
|
5500000000000,
|
||||||
|
6000000000000,
|
||||||
|
6500000000000,
|
||||||
|
7000000000000,
|
||||||
|
7500000000000,
|
||||||
|
8000000000000,
|
||||||
|
8500000000000,
|
||||||
|
9000000000000,
|
||||||
|
9500000000000,
|
||||||
|
10000000000000,
|
||||||
|
10500000000000,
|
||||||
|
11000000000000,
|
||||||
|
11500000000000,
|
||||||
|
12000000000000,
|
||||||
|
12500000000000,
|
||||||
|
12750000000000,
|
||||||
|
13000000000000,
|
||||||
|
13250000000000,
|
||||||
|
13500000000000,
|
||||||
|
14000000000000,
|
||||||
|
14500000000000,
|
||||||
|
14750000000000,
|
||||||
|
15000000000000,
|
||||||
|
15500000000000,
|
||||||
|
16000000000000,
|
||||||
|
16500000000000,
|
||||||
|
17000000000000,
|
||||||
|
17500000000000,
|
||||||
|
18000000000000,
|
||||||
|
18250000000000,
|
||||||
|
18500000000000,
|
||||||
|
18750000000000,
|
||||||
|
19000000000000,
|
||||||
|
19500000000000,
|
||||||
|
20000000000000,
|
||||||
|
20500000000000,
|
||||||
|
21000000000000,
|
||||||
|
21500000000000,
|
||||||
|
22000000000000,
|
||||||
|
22500000000000,
|
||||||
|
23000000000000,
|
||||||
|
23500000000000,
|
||||||
|
24000000000000,
|
||||||
|
24500000000000,
|
||||||
|
25000000000000,
|
||||||
|
25500000000000,
|
||||||
|
26000000000000,
|
||||||
|
26500000000000,
|
||||||
|
27000000000000,
|
||||||
|
27500000000000,
|
||||||
|
28000000000000,
|
||||||
|
28500000000000,
|
||||||
|
29000000000000,
|
||||||
|
29500000000000,
|
||||||
|
30000000000000,
|
||||||
|
30500000000000,
|
||||||
|
31000000000000,
|
||||||
|
31500000000000,
|
||||||
|
32000000000000,
|
||||||
|
32500000000000,
|
||||||
|
33000000000000,
|
||||||
|
33500000000000,
|
||||||
|
34000000000000,
|
||||||
|
34500000000000,
|
||||||
|
35000000000000,
|
||||||
|
35500000000000,
|
||||||
|
36000000000000,
|
||||||
|
36500000000000,
|
||||||
|
37000000000000,
|
||||||
|
37500000000000,
|
||||||
|
38000000000000,
|
||||||
|
38500000000000,
|
||||||
|
39000000000000,
|
||||||
|
39500000000000,
|
||||||
|
40000000000000,
|
||||||
|
40500000000000,
|
||||||
|
41000000000000,
|
||||||
|
41500000000000,
|
||||||
|
42000000000000
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"operational": {
|
||||||
|
"temperature": 283,
|
||||||
|
"raman_pumps": [
|
||||||
|
{
|
||||||
|
"power": 0.2,
|
||||||
|
"frequency": 205000000000000,
|
||||||
|
"propagation_direction": "counterprop"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"power": 0.206,
|
||||||
|
"frequency": 201000000000000,
|
||||||
|
"propagation_direction": "counterprop"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"location": {
|
||||||
|
"latitude": 1,
|
||||||
|
"longitude": 0,
|
||||||
|
"city": null,
|
||||||
|
"region": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
14
tests/data/sim_params.json
Normal file
14
tests/data/sim_params.json
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"raman_computed_channels": [1, 18, 37, 56, 75],
|
||||||
|
"raman_parameters": {
|
||||||
|
"flag_raman": true,
|
||||||
|
"space_resolution": 10e3,
|
||||||
|
"tolerance": 1e-8
|
||||||
|
},
|
||||||
|
"nli_parameters": {
|
||||||
|
"nli_method_name": "ggn_spectrally_separated",
|
||||||
|
"wdm_grid_size": 50e9,
|
||||||
|
"dispersion_tolerance": 1,
|
||||||
|
"phase_shift_tollerance": 0.1
|
||||||
|
}
|
||||||
|
}
|
||||||
BIN
tests/data/testService.xls
Normal file
BIN
tests/data/testService.xls
Normal file
Binary file not shown.
79
tests/data/testService_services_expected.json
Normal file
79
tests/data/testService_services_expected.json
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
{
|
||||||
|
"path-request": [
|
||||||
|
{
|
||||||
|
"request-id": "0",
|
||||||
|
"source": "trx Lorient_KMA",
|
||||||
|
"destination": "trx Vannes_KBE",
|
||||||
|
"src-tp-id": "trx Lorient_KMA",
|
||||||
|
"dst-tp-id": "trx Vannes_KBE",
|
||||||
|
"bidirectional": false,
|
||||||
|
"path-constraints": {
|
||||||
|
"te-bandwidth": {
|
||||||
|
"technology": "flexi-grid",
|
||||||
|
"trx_type": "Voyager",
|
||||||
|
"trx_mode": "mode 1",
|
||||||
|
"effective-freq-slot": [
|
||||||
|
{
|
||||||
|
"N": "null",
|
||||||
|
"M": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"spacing": 50000000000.0,
|
||||||
|
"max-nb-of-channel": 80,
|
||||||
|
"output-power": null,
|
||||||
|
"path_bandwidth": 100000000000.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"request-id": "1",
|
||||||
|
"source": "trx Brest_KLA",
|
||||||
|
"destination": "trx Vannes_KBE",
|
||||||
|
"src-tp-id": "trx Brest_KLA",
|
||||||
|
"dst-tp-id": "trx Vannes_KBE",
|
||||||
|
"bidirectional": false,
|
||||||
|
"path-constraints": {
|
||||||
|
"te-bandwidth": {
|
||||||
|
"technology": "flexi-grid",
|
||||||
|
"trx_type": "Voyager",
|
||||||
|
"trx_mode": "mode 1",
|
||||||
|
"effective-freq-slot": [
|
||||||
|
{
|
||||||
|
"N": "null",
|
||||||
|
"M": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"spacing": 50000000000.0,
|
||||||
|
"max-nb-of-channel": null,
|
||||||
|
"output-power": 0.0012589254117941673,
|
||||||
|
"path_bandwidth": 10000000000.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"request-id": "3",
|
||||||
|
"source": "trx Lannion_CAS",
|
||||||
|
"destination": "trx Rennes_STA",
|
||||||
|
"src-tp-id": "trx Lannion_CAS",
|
||||||
|
"dst-tp-id": "trx Rennes_STA",
|
||||||
|
"bidirectional": false,
|
||||||
|
"path-constraints": {
|
||||||
|
"te-bandwidth": {
|
||||||
|
"technology": "flexi-grid",
|
||||||
|
"trx_type": "vendorA_trx-type1",
|
||||||
|
"trx_mode": "mode 1",
|
||||||
|
"effective-freq-slot": [
|
||||||
|
{
|
||||||
|
"N": "null",
|
||||||
|
"M": "null"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"spacing": 50000000000.0,
|
||||||
|
"max-nb-of-channel": 80,
|
||||||
|
"output-power": 0.0012589254117941673,
|
||||||
|
"path_bandwidth": 60000000000.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,8 @@
|
|||||||
response-id,source,destination,path_bandwidth,Pass?,nb of tsp pairs,total cost,transponder-type,transponder-mode,OSNR-0.1nm,SNR-0.1nm,SNR-bandwidth,baud rate (Gbaud),input power (dBm),path
|
response-id,source,destination,path_bandwidth,Pass?,nb of tsp pairs,total cost,transponder-type,transponder-mode,OSNR-0.1nm,SNR-0.1nm,SNR-bandwidth,baud rate (Gbaud),input power (dBm),path,"spectrum (N,M)",reversed path OSNR-0.1nm,reversed path SNR-0.1nm,reversed path SNR-bandwidth
|
||||||
0,trx Lorient_KMA,trx Vannes_KBE,100.0,True,1,1,Voyager,mode 1,30.84,30.84,26.75,32.0,0.0,trx Lorient_KMA | roadm Lorient_KMA | Edfa1_roadm Lorient_KMA | fiber (Lorient_KMA → Vannes_KBE)-F055 | Edfa0_fiber (Lorient_KMA → Vannes_KBE)-F055 | roadm Vannes_KBE | trx Vannes_KBE
|
0,trx Lorient_KMA,trx Vannes_KBE,100.0,True,1,1,Voyager,mode 1,30.84,30.84,26.75,32.0,0.0,trx Lorient_KMA | roadm Lorient_KMA | Edfa1_roadm Lorient_KMA | fiber (Lorient_KMA → Vannes_KBE)-F055 | Edfa0_fiber (Lorient_KMA → Vannes_KBE)-F055 | roadm Vannes_KBE | trx Vannes_KBE,"-284, 4"
|
||||||
1,trx Brest_KLA,trx Vannes_KBE,0.0,True,0,0,Voyager,mode 1,22.65,22.11,18.03,32.0,1.0,trx Brest_KLA | roadm Brest_KLA | Edfa0_roadm Brest_KLA | fiber (Brest_KLA → Morlaix)-F060 | east fused spans in Morlaix | fiber (Morlaix → Lannion_CAS)-F059 | west edfa in Lannion_CAS to Morlaix | roadm Lannion_CAS | east edfa in Lannion_CAS to Corlay | fiber (Lannion_CAS → Corlay)-F061 | west fused spans in Corlay | fiber (Corlay → Loudeac)-F010 | west fused spans in Loudeac | fiber (Loudeac → Lorient_KMA)-F054 | Edfa0_fiber (Loudeac → Lorient_KMA)-F054 | roadm Lorient_KMA | Edfa1_roadm Lorient_KMA | fiber (Lorient_KMA → Vannes_KBE)-F055 | Edfa0_fiber (Lorient_KMA → Vannes_KBE)-F055 | roadm Vannes_KBE | trx Vannes_KBE
|
1,trx Brest_KLA,trx Vannes_KBE,10.0,True,1,1,Voyager,mode 1,22.65,22.11,18.03,32.0,1.0,trx Brest_KLA | roadm Brest_KLA | Edfa0_roadm Brest_KLA | fiber (Brest_KLA → Morlaix)-F060 | east fused spans in Morlaix | fiber (Morlaix → Lannion_CAS)-F059 | west edfa in Lannion_CAS to Morlaix | roadm Lannion_CAS | east edfa in Lannion_CAS to Corlay | fiber (Lannion_CAS → Corlay)-F061 | west fused spans in Corlay | fiber (Corlay → Loudeac)-F010 | west fused spans in Loudeac | fiber (Loudeac → Lorient_KMA)-F054 | Edfa0_fiber (Loudeac → Lorient_KMA)-F054 | roadm Lorient_KMA | Edfa1_roadm Lorient_KMA | fiber (Lorient_KMA → Vannes_KBE)-F055 | Edfa0_fiber (Lorient_KMA → Vannes_KBE)-F055 | roadm Vannes_KBE | trx Vannes_KBE,"-276, 4"
|
||||||
3,trx Lannion_CAS,trx Rennes_STA,60.0,True,1,1,vendorA_trx-type1,mode 1,28.29,25.85,21.77,32.0,1.0,trx Lannion_CAS | roadm Lannion_CAS | east edfa in Lannion_CAS to Stbrieuc | fiber (Lannion_CAS → Stbrieuc)-F056 | east edfa in Stbrieuc to Rennes_STA | fiber (Stbrieuc → Rennes_STA)-F057 | Edfa0_fiber (Stbrieuc → Rennes_STA)-F057 | roadm Rennes_STA | trx Rennes_STA
|
3,trx Lannion_CAS,trx Rennes_STA,60.0,True,1,1,vendorA_trx-type1,mode 1,28.29,25.85,21.77,32.0,1.0,trx Lannion_CAS | roadm Lannion_CAS | east edfa in Lannion_CAS to Stbrieuc | fiber (Lannion_CAS → Stbrieuc)-F056 | east edfa in Stbrieuc to Rennes_STA | fiber (Stbrieuc → Rennes_STA)-F057 | Edfa0_fiber (Stbrieuc → Rennes_STA)-F057 | roadm Rennes_STA | trx Rennes_STA,"-284, 4"
|
||||||
4,trx Rennes_STA,trx Lannion_CAS,150.0,True,1,1,vendorA_trx-type1,mode 2,22.27,22.15,15.05,64.0,0.0,trx Rennes_STA | roadm Rennes_STA | Edfa1_roadm Rennes_STA | fiber (Rennes_STA → Ploermel)- | east edfa in Ploermel to Vannes_KBE | fiber (Ploermel → Vannes_KBE)- | Edfa0_fiber (Ploermel → Vannes_KBE)- | roadm Vannes_KBE | Edfa0_roadm Vannes_KBE | fiber (Vannes_KBE → Lorient_KMA)-F055 | Edfa0_fiber (Vannes_KBE → Lorient_KMA)-F055 | roadm Lorient_KMA | Edfa0_roadm Lorient_KMA | fiber (Lorient_KMA → Loudeac)-F054 | east fused spans in Loudeac | fiber (Loudeac → Corlay)-F010 | east fused spans in Corlay | fiber (Corlay → Lannion_CAS)-F061 | west edfa in Lannion_CAS to Corlay | roadm Lannion_CAS | trx Lannion_CAS
|
4,trx Rennes_STA,trx Lannion_CAS,150.0,True,1,1,vendorA_trx-type1,mode 2,22.27,22.15,15.05,64.0,0.0,trx Rennes_STA | roadm Rennes_STA | Edfa1_roadm Rennes_STA | fiber (Rennes_STA → Ploermel)- | east edfa in Ploermel to Vannes_KBE | fiber (Ploermel → Vannes_KBE)- | Edfa0_fiber (Ploermel → Vannes_KBE)- | roadm Vannes_KBE | Edfa0_roadm Vannes_KBE | fiber (Vannes_KBE → Lorient_KMA)-F055 | Edfa0_fiber (Vannes_KBE → Lorient_KMA)-F055 | roadm Lorient_KMA | Edfa0_roadm Lorient_KMA | fiber (Lorient_KMA → Loudeac)-F054 | east fused spans in Loudeac | fiber (Loudeac → Corlay)-F010 | east fused spans in Corlay | fiber (Corlay → Lannion_CAS)-F061 | west edfa in Lannion_CAS to Corlay | roadm Lannion_CAS | trx Lannion_CAS,"-266, 6"
|
||||||
5,trx Rennes_STA,trx Lannion_CAS,20.0,True,1,1,vendorA_trx-type1,mode 2,30.79,28.77,21.68,64.0,3.0,trx Rennes_STA | roadm Rennes_STA | Edfa0_roadm Rennes_STA | fiber (Rennes_STA → Stbrieuc)-F057 | Edfa0_fiber (Rennes_STA → Stbrieuc)-F057 | fiber (Stbrieuc → Lannion_CAS)-F056 | Edfa0_fiber (Stbrieuc → Lannion_CAS)-F056 | roadm Lannion_CAS | trx Lannion_CAS
|
5,trx Rennes_STA,trx Lannion_CAS,20.0,True,1,1,vendorA_trx-type1,mode 2,30.79,28.77,21.68,64.0,3.0,trx Rennes_STA | roadm Rennes_STA | Edfa0_roadm Rennes_STA | fiber (Rennes_STA → Stbrieuc)-F057 | Edfa0_fiber (Rennes_STA → Stbrieuc)-F057 | fiber (Stbrieuc → Lannion_CAS)-F056 | Edfa0_fiber (Stbrieuc → Lannion_CAS)-F056 | roadm Lannion_CAS | trx Lannion_CAS,"-274, 6"
|
||||||
6,,,,False,0,,,,,,,,,
|
6,,,,NO_PATH,,,,,,,,,,,
|
||||||
|
|
||||||
|
|||||||
|
Can't render this file because it has a wrong number of fields in line 2.
|
@@ -6,6 +6,7 @@
|
|||||||
"destination": "trx Vannes_KBE",
|
"destination": "trx Vannes_KBE",
|
||||||
"src-tp-id": "trx Lorient_KMA",
|
"src-tp-id": "trx Lorient_KMA",
|
||||||
"dst-tp-id": "trx Vannes_KBE",
|
"dst-tp-id": "trx Vannes_KBE",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -30,6 +31,7 @@
|
|||||||
"destination": "trx Vannes_KBE",
|
"destination": "trx Vannes_KBE",
|
||||||
"src-tp-id": "trx Brest_KLA",
|
"src-tp-id": "trx Brest_KLA",
|
||||||
"dst-tp-id": "trx Vannes_KBE",
|
"dst-tp-id": "trx Vannes_KBE",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -44,7 +46,7 @@
|
|||||||
"spacing": 50000000000.0,
|
"spacing": 50000000000.0,
|
||||||
"max-nb-of-channel": null,
|
"max-nb-of-channel": null,
|
||||||
"output-power": 0.0012589254117941673,
|
"output-power": 0.0012589254117941673,
|
||||||
"path_bandwidth": 0
|
"path_bandwidth": 10000000000.0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"explicit-route-objects": {
|
"explicit-route-objects": {
|
||||||
@@ -94,6 +96,7 @@
|
|||||||
"destination": "trx Rennes_STA",
|
"destination": "trx Rennes_STA",
|
||||||
"src-tp-id": "trx Lannion_CAS",
|
"src-tp-id": "trx Lannion_CAS",
|
||||||
"dst-tp-id": "trx Rennes_STA",
|
"dst-tp-id": "trx Rennes_STA",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -118,6 +121,7 @@
|
|||||||
"destination": "trx Lannion_CAS",
|
"destination": "trx Lannion_CAS",
|
||||||
"src-tp-id": "trx Rennes_STA",
|
"src-tp-id": "trx Rennes_STA",
|
||||||
"dst-tp-id": "trx Lannion_CAS",
|
"dst-tp-id": "trx Lannion_CAS",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -142,6 +146,7 @@
|
|||||||
"destination": "trx Lannion_CAS",
|
"destination": "trx Lannion_CAS",
|
||||||
"src-tp-id": "trx Rennes_STA",
|
"src-tp-id": "trx Rennes_STA",
|
||||||
"dst-tp-id": "trx Lannion_CAS",
|
"dst-tp-id": "trx Lannion_CAS",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -166,6 +171,7 @@
|
|||||||
"destination": "trx a",
|
"destination": "trx a",
|
||||||
"src-tp-id": "trx Lannion_CAS",
|
"src-tp-id": "trx Lannion_CAS",
|
||||||
"dst-tp-id": "trx a",
|
"dst-tp-id": "trx a",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
"destination": "trx g",
|
"destination": "trx g",
|
||||||
"src-tp-id": "trx a",
|
"src-tp-id": "trx a",
|
||||||
"dst-tp-id": "trx g",
|
"dst-tp-id": "trx g",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -30,6 +31,7 @@
|
|||||||
"destination": "trx h",
|
"destination": "trx h",
|
||||||
"src-tp-id": "trx a",
|
"src-tp-id": "trx a",
|
||||||
"dst-tp-id": "trx h",
|
"dst-tp-id": "trx h",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -54,6 +56,7 @@
|
|||||||
"destination": "trx b",
|
"destination": "trx b",
|
||||||
"src-tp-id": "trx f",
|
"src-tp-id": "trx f",
|
||||||
"dst-tp-id": "trx b",
|
"dst-tp-id": "trx b",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -78,6 +81,7 @@
|
|||||||
"destination": "trx f",
|
"destination": "trx f",
|
||||||
"src-tp-id": "trx c",
|
"src-tp-id": "trx c",
|
||||||
"dst-tp-id": "trx f",
|
"dst-tp-id": "trx f",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -123,6 +127,7 @@
|
|||||||
"destination": "trx f",
|
"destination": "trx f",
|
||||||
"src-tp-id": "trx c",
|
"src-tp-id": "trx c",
|
||||||
"dst-tp-id": "trx f",
|
"dst-tp-id": "trx f",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -147,6 +152,7 @@
|
|||||||
"destination": "trx g",
|
"destination": "trx g",
|
||||||
"src-tp-id": "trx a",
|
"src-tp-id": "trx a",
|
||||||
"dst-tp-id": "trx g",
|
"dst-tp-id": "trx g",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -171,6 +177,7 @@
|
|||||||
"destination": "trx h",
|
"destination": "trx h",
|
||||||
"src-tp-id": "trx a",
|
"src-tp-id": "trx a",
|
||||||
"dst-tp-id": "trx h",
|
"dst-tp-id": "trx h",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -208,6 +215,7 @@
|
|||||||
"destination": "trx b",
|
"destination": "trx b",
|
||||||
"src-tp-id": "trx f",
|
"src-tp-id": "trx f",
|
||||||
"dst-tp-id": "trx b",
|
"dst-tp-id": "trx b",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -245,6 +253,7 @@
|
|||||||
"destination": "trx f",
|
"destination": "trx f",
|
||||||
"src-tp-id": "trx c",
|
"src-tp-id": "trx c",
|
||||||
"dst-tp-id": "trx f",
|
"dst-tp-id": "trx f",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -269,6 +278,7 @@
|
|||||||
"destination": "trx f",
|
"destination": "trx f",
|
||||||
"src-tp-id": "trx c",
|
"src-tp-id": "trx c",
|
||||||
"dst-tp-id": "trx f",
|
"dst-tp-id": "trx f",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -314,6 +324,7 @@
|
|||||||
"destination": "trx g",
|
"destination": "trx g",
|
||||||
"src-tp-id": "trx a",
|
"src-tp-id": "trx a",
|
||||||
"dst-tp-id": "trx g",
|
"dst-tp-id": "trx g",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -338,6 +349,7 @@
|
|||||||
"destination": "trx h",
|
"destination": "trx h",
|
||||||
"src-tp-id": "trx a",
|
"src-tp-id": "trx a",
|
||||||
"dst-tp-id": "trx h",
|
"dst-tp-id": "trx h",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -362,6 +374,7 @@
|
|||||||
"destination": "trx b",
|
"destination": "trx b",
|
||||||
"src-tp-id": "trx f",
|
"src-tp-id": "trx f",
|
||||||
"dst-tp-id": "trx b",
|
"dst-tp-id": "trx b",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -386,6 +399,7 @@
|
|||||||
"destination": "trx f",
|
"destination": "trx f",
|
||||||
"src-tp-id": "trx c",
|
"src-tp-id": "trx c",
|
||||||
"dst-tp-id": "trx f",
|
"dst-tp-id": "trx f",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -431,6 +445,7 @@
|
|||||||
"destination": "trx f",
|
"destination": "trx f",
|
||||||
"src-tp-id": "trx c",
|
"src-tp-id": "trx c",
|
||||||
"dst-tp-id": "trx f",
|
"dst-tp-id": "trx f",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -455,6 +470,7 @@
|
|||||||
"destination": "trx g",
|
"destination": "trx g",
|
||||||
"src-tp-id": "trx a",
|
"src-tp-id": "trx a",
|
||||||
"dst-tp-id": "trx g",
|
"dst-tp-id": "trx g",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -479,6 +495,7 @@
|
|||||||
"destination": "trx h",
|
"destination": "trx h",
|
||||||
"src-tp-id": "trx a",
|
"src-tp-id": "trx a",
|
||||||
"dst-tp-id": "trx h",
|
"dst-tp-id": "trx h",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -503,6 +520,7 @@
|
|||||||
"destination": "trx b",
|
"destination": "trx b",
|
||||||
"src-tp-id": "trx f",
|
"src-tp-id": "trx f",
|
||||||
"dst-tp-id": "trx b",
|
"dst-tp-id": "trx b",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
@@ -527,6 +545,7 @@
|
|||||||
"destination": "trx b",
|
"destination": "trx b",
|
||||||
"src-tp-id": "trx f",
|
"src-tp-id": "trx f",
|
||||||
"dst-tp-id": "trx b",
|
"dst-tp-id": "trx b",
|
||||||
|
"bidirectional": false,
|
||||||
"path-constraints": {
|
"path-constraints": {
|
||||||
"te-bandwidth": {
|
"te-bandwidth": {
|
||||||
"technology": "flexi-grid",
|
"technology": "flexi-grid",
|
||||||
|
|||||||
@@ -18,10 +18,8 @@ from pathlib import Path
|
|||||||
import pytest
|
import pytest
|
||||||
from gnpy.core.equipment import load_equipment, trx_mode_params, automatic_nch
|
from gnpy.core.equipment import load_equipment, trx_mode_params, automatic_nch
|
||||||
from gnpy.core.network import load_network, build_network
|
from gnpy.core.network import load_network, build_network
|
||||||
from examples.path_requests_run import (requests_from_json , correct_route_list ,
|
from examples.path_requests_run import requests_from_json, correct_route_list, load_requests
|
||||||
load_requests , disjunctions_from_json)
|
from gnpy.core.request import compute_path_dsjctn, propagate, propagate_and_optimize_mode
|
||||||
from gnpy.core.request import (compute_path_dsjctn, isdisjoint , find_reversed_path,
|
|
||||||
propagate,propagate_and_optimize_mode)
|
|
||||||
from gnpy.core.utils import db2lin, lin2db
|
from gnpy.core.utils import db2lin, lin2db
|
||||||
from gnpy.core.elements import Roadm
|
from gnpy.core.elements import Roadm
|
||||||
|
|
||||||
@@ -35,7 +33,7 @@ eqpt_library_name = Path(__file__).parent.parent / 'tests/data/eqpt_config.json'
|
|||||||
@pytest.mark.parametrize("serv",[service_file_name])
|
@pytest.mark.parametrize("serv",[service_file_name])
|
||||||
@pytest.mark.parametrize("expected_mode",[['16QAM', 'PS_SP64_1', 'PS_SP64_1', 'PS_SP64_1', 'mode 2 - fake', 'mode 2', 'PS_SP64_1', 'mode 3', 'PS_SP64_1', 'PS_SP64_1', '16QAM', 'mode 1', 'PS_SP64_1', 'PS_SP64_1', 'mode 1', 'mode 2', 'mode 1', 'mode 2', 'nok']])
|
@pytest.mark.parametrize("expected_mode",[['16QAM', 'PS_SP64_1', 'PS_SP64_1', 'PS_SP64_1', 'mode 2 - fake', 'mode 2', 'PS_SP64_1', 'mode 3', 'PS_SP64_1', 'PS_SP64_1', '16QAM', 'mode 1', 'PS_SP64_1', 'PS_SP64_1', 'mode 1', 'mode 2', 'mode 1', 'mode 2', 'nok']])
|
||||||
def test_automaticmodefeature(net,eqpt,serv,expected_mode):
|
def test_automaticmodefeature(net,eqpt,serv,expected_mode):
|
||||||
data = load_requests(serv,eqpt)
|
data = load_requests(serv, eqpt, bidir=False)
|
||||||
equipment = load_equipment(eqpt)
|
equipment = load_equipment(eqpt)
|
||||||
network = load_network(net,equipment)
|
network = load_network(net,equipment)
|
||||||
|
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ from examples.path_requests_run import (requests_from_json , correct_route_list
|
|||||||
from gnpy.core.request import compute_path_dsjctn, isdisjoint , find_reversed_path
|
from gnpy.core.request import compute_path_dsjctn, isdisjoint , find_reversed_path
|
||||||
from gnpy.core.utils import db2lin, lin2db
|
from gnpy.core.utils import db2lin, lin2db
|
||||||
from gnpy.core.elements import Roadm
|
from gnpy.core.elements import Roadm
|
||||||
|
from gnpy.core.spectrum_assignment import build_oms_list
|
||||||
|
|
||||||
network_file_name = Path(__file__).parent.parent / 'tests/data/testTopology_expected.json'
|
network_file_name = Path(__file__).parent.parent / 'tests/data/testTopology_expected.json'
|
||||||
service_file_name = Path(__file__).parent.parent / 'tests/data/testTopology_testservices.json'
|
service_file_name = Path(__file__).parent.parent / 'tests/data/testTopology_testservices.json'
|
||||||
@@ -29,10 +30,9 @@ eqpt_library_name = Path(__file__).parent.parent / 'tests/data/eqpt_config.json'
|
|||||||
@pytest.mark.parametrize("eqpt", [eqpt_library_name])
|
@pytest.mark.parametrize("eqpt", [eqpt_library_name])
|
||||||
@pytest.mark.parametrize("serv",[service_file_name])
|
@pytest.mark.parametrize("serv",[service_file_name])
|
||||||
def test_disjunction(net,eqpt,serv):
|
def test_disjunction(net,eqpt,serv):
|
||||||
data = load_requests(serv,eqpt)
|
data = load_requests(serv, eqpt, bidir=False)
|
||||||
equipment = load_equipment(eqpt)
|
equipment = load_equipment(eqpt)
|
||||||
network = load_network(net,equipment)
|
network = load_network(net,equipment)
|
||||||
|
|
||||||
# Build the network once using the default power defined in SI in eqpt config
|
# Build the network once using the default power defined in SI in eqpt config
|
||||||
# power density : db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by
|
# power density : db2linp(ower_dbm": 0)/power_dbm": 0 * nb channels as defined by
|
||||||
# spacing, f_min and f_max
|
# spacing, f_min and f_max
|
||||||
@@ -41,6 +41,7 @@ def test_disjunction(net,eqpt,serv):
|
|||||||
p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
|
p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
|
||||||
equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
|
equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
|
||||||
build_network(network, equipment, p_db, p_total_db)
|
build_network(network, equipment, p_db, p_total_db)
|
||||||
|
build_oms_list(network, equipment)
|
||||||
|
|
||||||
rqs = requests_from_json(data, equipment)
|
rqs = requests_from_json(data, equipment)
|
||||||
rqs = correct_route_list(network, rqs)
|
rqs = correct_route_list(network, rqs)
|
||||||
@@ -56,7 +57,7 @@ def test_disjunction(net,eqpt,serv):
|
|||||||
rqs_id_list = [r.request_id for r in rqs]
|
rqs_id_list = [r.request_id for r in rqs]
|
||||||
p1 = pths[rqs_id_list.index(e[0])][1:-1]
|
p1 = pths[rqs_id_list.index(e[0])][1:-1]
|
||||||
p2 = pths[rqs_id_list.index(e[1])][1:-1]
|
p2 = pths[rqs_id_list.index(e[1])][1:-1]
|
||||||
if isdisjoint(p1,p2) + isdisjoint(p1,find_reversed_path(p2, network)) > 0:
|
if isdisjoint(p1, p2) + isdisjoint(p1, find_reversed_path(p2)) > 0:
|
||||||
test = False
|
test = False
|
||||||
print(f'Computed path (roadms):{[e.uid for e in p1 if isinstance(e, Roadm)]}\n')
|
print(f'Computed path (roadms):{[e.uid for e in p1 if isinstance(e, Roadm)]}\n')
|
||||||
print(f'Computed path (roadms):{[e.uid for e in p2 if isinstance(e, Roadm)]}\n')
|
print(f'Computed path (roadms):{[e.uid for e in p2 if isinstance(e, Roadm)]}\n')
|
||||||
@@ -68,7 +69,7 @@ def test_disjunction(net,eqpt,serv):
|
|||||||
@pytest.mark.parametrize("eqpt", [eqpt_library_name])
|
@pytest.mark.parametrize("eqpt", [eqpt_library_name])
|
||||||
@pytest.mark.parametrize("serv",[service_file_name])
|
@pytest.mark.parametrize("serv",[service_file_name])
|
||||||
def test_does_not_loop_back(net,eqpt,serv):
|
def test_does_not_loop_back(net,eqpt,serv):
|
||||||
data = load_requests(serv,eqpt)
|
data = load_requests(serv, eqpt, bidir=False)
|
||||||
equipment = load_equipment(eqpt)
|
equipment = load_equipment(eqpt)
|
||||||
network = load_network(net,equipment)
|
network = load_network(net,equipment)
|
||||||
|
|
||||||
@@ -80,6 +81,7 @@ def test_does_not_loop_back(net,eqpt,serv):
|
|||||||
p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
|
p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
|
||||||
equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
|
equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
|
||||||
build_network(network, equipment, p_db, p_total_db)
|
build_network(network, equipment, p_db, p_total_db)
|
||||||
|
build_oms_list(network, equipment)
|
||||||
|
|
||||||
rqs = requests_from_json(data, equipment)
|
rqs = requests_from_json(data, equipment)
|
||||||
rqs = correct_route_list(network, rqs)
|
rqs = correct_route_list(network, rqs)
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ from os import unlink
|
|||||||
from pandas import read_csv
|
from pandas import read_csv
|
||||||
import pytest
|
import pytest
|
||||||
from tests.compare import compare_networks, compare_services
|
from tests.compare import compare_networks, compare_services
|
||||||
|
from copy import deepcopy
|
||||||
from gnpy.core.utils import lin2db
|
from gnpy.core.utils import lin2db
|
||||||
from gnpy.core.network import save_network, build_network
|
from gnpy.core.network import save_network, build_network
|
||||||
from gnpy.core.convert import convert_file
|
from gnpy.core.convert import convert_file
|
||||||
@@ -29,6 +30,8 @@ from gnpy.core.equipment import load_equipment, automatic_nch
|
|||||||
from gnpy.core.network import load_network
|
from gnpy.core.network import load_network
|
||||||
from gnpy.core.request import (jsontocsv, requests_aggregation,
|
from gnpy.core.request import (jsontocsv, requests_aggregation,
|
||||||
compute_path_dsjctn, Result_element)
|
compute_path_dsjctn, Result_element)
|
||||||
|
from gnpy.core.spectrum_assignment import build_oms_list, pth_assign_spectrum
|
||||||
|
from gnpy.core.exceptions import ServiceError
|
||||||
from examples.path_requests_run import (requests_from_json, disjunctions_from_json,
|
from examples.path_requests_run import (requests_from_json, disjunctions_from_json,
|
||||||
correct_route_list, correct_disjn,
|
correct_route_list, correct_disjn,
|
||||||
compute_path_with_disjunction)
|
compute_path_with_disjunction)
|
||||||
@@ -147,9 +150,9 @@ def test_auto_design_generation_fromjson(json_input, expected_json_output):
|
|||||||
assert not results.connections.different
|
assert not results.connections.different
|
||||||
|
|
||||||
# test services creation
|
# test services creation
|
||||||
|
|
||||||
@pytest.mark.parametrize('xls_input,expected_json_output', {
|
@pytest.mark.parametrize('xls_input,expected_json_output', {
|
||||||
DATA_DIR / 'testTopology.xls': DATA_DIR / 'testTopology_services_expected.json',
|
DATA_DIR / 'testTopology.xls': DATA_DIR / 'testTopology_services_expected.json',
|
||||||
|
DATA_DIR / 'testService.xls': DATA_DIR / 'testService_services_expected.json'
|
||||||
}.items())
|
}.items())
|
||||||
def test_excel_service_json_generation(xls_input, expected_json_output):
|
def test_excel_service_json_generation(xls_input, expected_json_output):
|
||||||
""" test services creation
|
""" test services creation
|
||||||
@@ -172,6 +175,8 @@ def test_excel_service_json_generation(xls_input, expected_json_output):
|
|||||||
assert not results.synchronizations.extra
|
assert not results.synchronizations.extra
|
||||||
assert not results.synchronizations.different
|
assert not results.synchronizations.different
|
||||||
|
|
||||||
|
# TODO verify that requested bandwidth is not zero !
|
||||||
|
|
||||||
# test xls answers creation
|
# test xls answers creation
|
||||||
@pytest.mark.parametrize('json_input, csv_output', {
|
@pytest.mark.parametrize('json_input, csv_output', {
|
||||||
DATA_DIR / 'testTopology_response.json': DATA_DIR / 'testTopology_response',
|
DATA_DIR / 'testTopology_response.json': DATA_DIR / 'testTopology_response',
|
||||||
@@ -206,12 +211,18 @@ def test_csv_response_generation(json_input, csv_output):
|
|||||||
# 'SNR-bandwidth',
|
# 'SNR-bandwidth',
|
||||||
# 'baud rate (Gbaud)',
|
# 'baud rate (Gbaud)',
|
||||||
# 'input power (dBm)',
|
# 'input power (dBm)',
|
||||||
# 'path'
|
# 'path',
|
||||||
|
# 'spectrum (N,M)',
|
||||||
|
# 'reversed path OSNR-0.1nm',
|
||||||
|
# 'reversed path SNR-0.1nm',
|
||||||
|
# 'reversed path SNR-bandwidth'
|
||||||
# ]
|
# ]
|
||||||
|
|
||||||
resp = read_csv(csv_filename)
|
resp = read_csv(csv_filename)
|
||||||
|
print(resp)
|
||||||
unlink(csv_filename)
|
unlink(csv_filename)
|
||||||
expected_resp = read_csv(expected_csv_filename)
|
expected_resp = read_csv(expected_csv_filename)
|
||||||
|
print(expected_resp)
|
||||||
resp_header = list(resp.head(0))
|
resp_header = list(resp.head(0))
|
||||||
expected_resp_header = list(expected_resp.head(0))
|
expected_resp_header = list(expected_resp.head(0))
|
||||||
# check that headers are the same
|
# check that headers are the same
|
||||||
@@ -240,23 +251,24 @@ def compare_response(exp_resp, act_resp):
|
|||||||
print(act_resp)
|
print(act_resp)
|
||||||
test = True
|
test = True
|
||||||
for key in act_resp.keys():
|
for key in act_resp.keys():
|
||||||
print(key)
|
|
||||||
if not key in exp_resp.keys():
|
if not key in exp_resp.keys():
|
||||||
print(key)
|
print(f'{key} is not expected')
|
||||||
return False
|
return False
|
||||||
if isinstance(act_resp[key], dict):
|
if isinstance(act_resp[key], dict):
|
||||||
test = compare_response(exp_resp[key], act_resp[key])
|
test = compare_response(exp_resp[key], act_resp[key])
|
||||||
if test:
|
if test:
|
||||||
for key in exp_resp.keys():
|
for key in exp_resp.keys():
|
||||||
if not key in act_resp.keys():
|
if not key in act_resp.keys():
|
||||||
print(key)
|
print(f'{key} is expected')
|
||||||
return False
|
return False
|
||||||
if isinstance(exp_resp[key], dict):
|
if isinstance(exp_resp[key], dict):
|
||||||
test = compare_response(exp_resp[key], act_resp[key])
|
test = compare_response(exp_resp[key], act_resp[key])
|
||||||
|
|
||||||
# at this point exp_resp and act_resp have the same keys. Check if their values are the same
|
# at this point exp_resp and act_resp have the same keys. Check if their values are the same
|
||||||
for key in act_resp.keys():
|
for key in act_resp.keys():
|
||||||
if not isinstance(act_resp[key], dict):
|
if not isinstance(act_resp[key], dict):
|
||||||
if exp_resp[key] != act_resp[key]:
|
if exp_resp[key] != act_resp[key]:
|
||||||
|
print(f'expected value :{exp_resp[key]}\n actual value: {act_resp[key]}')
|
||||||
return False
|
return False
|
||||||
return test
|
return test
|
||||||
|
|
||||||
@@ -269,6 +281,9 @@ def test_json_response_generation(xls_input, expected_response_file):
|
|||||||
""" tests if json response is correctly generated for all combinations of requests
|
""" tests if json response is correctly generated for all combinations of requests
|
||||||
"""
|
"""
|
||||||
data = convert_service_sheet(xls_input, eqpt_filename)
|
data = convert_service_sheet(xls_input, eqpt_filename)
|
||||||
|
# change one of the request with bidir option to cover bidir case as well
|
||||||
|
data['path-request'][2]['bidirectional'] = True
|
||||||
|
|
||||||
equipment = load_equipment(eqpt_filename)
|
equipment = load_equipment(eqpt_filename)
|
||||||
network = load_network(xls_input, equipment)
|
network = load_network(xls_input, equipment)
|
||||||
p_db = equipment['SI']['default'].power_dbm
|
p_db = equipment['SI']['default'].power_dbm
|
||||||
@@ -276,23 +291,56 @@ def test_json_response_generation(xls_input, expected_response_file):
|
|||||||
p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
|
p_total_db = p_db + lin2db(automatic_nch(equipment['SI']['default'].f_min,\
|
||||||
equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
|
equipment['SI']['default'].f_max, equipment['SI']['default'].spacing))
|
||||||
build_network(network, equipment, p_db, p_total_db)
|
build_network(network, equipment, p_db, p_total_db)
|
||||||
|
oms_list = build_oms_list(network, equipment)
|
||||||
rqs = requests_from_json(data, equipment)
|
rqs = requests_from_json(data, equipment)
|
||||||
rqs = correct_route_list(network, rqs)
|
rqs = correct_route_list(network, rqs)
|
||||||
dsjn = disjunctions_from_json(data)
|
dsjn = disjunctions_from_json(data)
|
||||||
dsjn = correct_disjn(dsjn)
|
dsjn = correct_disjn(dsjn)
|
||||||
rqs, dsjn = requests_aggregation(rqs, dsjn)
|
rqs, dsjn = requests_aggregation(rqs, dsjn)
|
||||||
pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
|
pths = compute_path_dsjctn(network, equipment, rqs, dsjn)
|
||||||
propagatedpths = compute_path_with_disjunction(network, equipment, rqs, pths)
|
propagatedpths, reversed_pths, reversed_propagatedpths = \
|
||||||
|
compute_path_with_disjunction(network, equipment, rqs, pths)
|
||||||
|
pth_assign_spectrum(pths, rqs, oms_list, reversed_pths)
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
for i, pth in enumerate(propagatedpths):
|
for i, pth in enumerate(propagatedpths):
|
||||||
result.append(Result_element(rqs[i], pth))
|
# test ServiceError handling : when M is zero at this point, the
|
||||||
|
# json result should not be created if there is no blocking reason
|
||||||
|
if i == 1:
|
||||||
|
my_rq = deepcopy(rqs[i])
|
||||||
|
my_rq.M = 0
|
||||||
|
with pytest.raises(ServiceError):
|
||||||
|
Result_element(my_rq, pth, reversed_propagatedpths[i]).json
|
||||||
|
|
||||||
|
my_rq.blocking_reason = 'NO_SPECTRUM'
|
||||||
|
Result_element(my_rq, pth, reversed_propagatedpths[i]).json
|
||||||
|
|
||||||
|
result.append(Result_element(rqs[i], pth, reversed_propagatedpths[i]))
|
||||||
|
|
||||||
temp = {
|
temp = {
|
||||||
'response': [n.json for n in result]
|
'response': [n.json for n in result]
|
||||||
}
|
}
|
||||||
# load expected result and compare keys
|
# load expected result and compare keys and values
|
||||||
# (not values at this stage)
|
|
||||||
with open(expected_response_file) as jsonfile:
|
with open(expected_response_file) as jsonfile:
|
||||||
expected = load(jsonfile)
|
expected = load(jsonfile)
|
||||||
|
# since we changes bidir attribute of request#2, need to add the corresponding
|
||||||
|
# metric in response
|
||||||
|
|
||||||
for i, response in enumerate(temp['response']):
|
for i, response in enumerate(temp['response']):
|
||||||
|
if i == 2:
|
||||||
|
# compare response must be False because z-a metric is missing
|
||||||
|
# (request with bidir option to cover bidir case)
|
||||||
|
assert not compare_response(expected['response'][i], response)
|
||||||
|
print(f'response {response["response-id"]} should not match')
|
||||||
|
expected['response'][2]['path-properties']['z-a-path-metric'] = [
|
||||||
|
{'metric-type': 'SNR-bandwidth', 'accumulative-value': 22.809999999999999},
|
||||||
|
{'metric-type': 'SNR-0.1nm', 'accumulative-value': 26.890000000000001},
|
||||||
|
{'metric-type': 'OSNR-bandwidth', 'accumulative-value': 26.239999999999998},
|
||||||
|
{'metric-type': 'OSNR-0.1nm', 'accumulative-value': 30.32},
|
||||||
|
{'metric-type': 'reference_power', 'accumulative-value': 0.0012589254117941673},
|
||||||
|
{'metric-type': 'path_bandwidth', 'accumulative-value': 60000000000.0}]
|
||||||
|
# test should be OK now
|
||||||
|
else:
|
||||||
assert compare_response(expected['response'][i], response)
|
assert compare_response(expected['response'][i], response)
|
||||||
|
print(f'response {response["response-id"]} is not correct')
|
||||||
|
|||||||
49
tests/test_science_utils.py
Normal file
49
tests/test_science_utils.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# @Author: Alessio Ferrari
|
||||||
|
"""
|
||||||
|
checks that RamanFiber propagates properly the spectral information. In this way, also the RamanSolver and the NliSolver
|
||||||
|
are tested.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
from pandas import read_csv
|
||||||
|
from numpy.testing import assert_allclose
|
||||||
|
from gnpy.core.info import create_input_spectral_information
|
||||||
|
from gnpy.core.elements import RamanFiber
|
||||||
|
from gnpy.core.network import load_sim_params
|
||||||
|
from pathlib import Path
|
||||||
|
TEST_DIR = Path(__file__).parent
|
||||||
|
|
||||||
|
def test_raman_fiber():
|
||||||
|
""" Test the accuracy of propagating the RamanFiber.
|
||||||
|
"""
|
||||||
|
# spectral information generation
|
||||||
|
power = 1e-3
|
||||||
|
with open(TEST_DIR / 'data' / 'eqpt_config.json', 'r') as file:
|
||||||
|
eqpt_params = json.load(file)
|
||||||
|
spectral_info_params = eqpt_params['SI'][0]
|
||||||
|
spectral_info_params.pop('power_dbm')
|
||||||
|
spectral_info_params.pop('power_range_db')
|
||||||
|
spectral_info_params.pop('tx_osnr')
|
||||||
|
spectral_info_params.pop('sys_margins')
|
||||||
|
spectral_info_input = create_input_spectral_information(power=power, **spectral_info_params)
|
||||||
|
|
||||||
|
# RamanFiber
|
||||||
|
with open(TEST_DIR / 'data' / 'raman_fiber_config.json', 'r') as file:
|
||||||
|
raman_fiber_params = json.load(file)
|
||||||
|
sim_params = load_sim_params(TEST_DIR / 'data' / 'sim_params.json')
|
||||||
|
fiber = RamanFiber(**raman_fiber_params)
|
||||||
|
fiber.sim_params = sim_params
|
||||||
|
|
||||||
|
# propagation
|
||||||
|
spectral_info_out = fiber(spectral_info_input)
|
||||||
|
|
||||||
|
p_signal = [carrier.power.signal for carrier in spectral_info_out.carriers]
|
||||||
|
p_ase = [carrier.power.ase for carrier in spectral_info_out.carriers]
|
||||||
|
p_nli = [carrier.power.nli for carrier in spectral_info_out.carriers]
|
||||||
|
|
||||||
|
expected_results = read_csv(TEST_DIR / 'data' / 'expected_results_science_utils.csv')
|
||||||
|
assert_allclose(p_signal, expected_results['signal'], rtol=1e-3)
|
||||||
|
assert_allclose(p_ase, expected_results['ase'], rtol=1e-3)
|
||||||
|
assert_allclose(p_nli, expected_results['nli'], rtol=1e-3)
|
||||||
Reference in New Issue
Block a user