Another time improving formatting (codacy report)

removing unused import
change short variable names to conform to [a-z_][a-z0-9_]{2,30}$
change main variable names to conform to [A-Z_][A-Z0-9_]{2,30}$
add or remove spaces
add docstrings
correct comments and indents of cut lines

Signed-off-by: EstherLerouzic <esther.lerouzic@orange.com>
This commit is contained in:
EstherLerouzic
2019-09-13 18:57:11 +01:00
parent 83d3f32fe0
commit edb54b02ac
5 changed files with 206 additions and 223 deletions

View File

@@ -28,10 +28,10 @@ from gnpy.core.utils import db2lin, lin2db
from gnpy.core.request import (Path_request, Result_element, from gnpy.core.request import (Path_request, Result_element,
propagate, jsontocsv, Disjunction, compute_path_dsjctn, propagate, jsontocsv, Disjunction, compute_path_dsjctn,
requests_aggregation, propagate_and_optimize_mode, requests_aggregation, propagate_and_optimize_mode,
BLOCKING_NOPATH, BLOCKING_NOMODE, BLOCKING_NOSPECTRUM, BLOCKING_NOPATH, BLOCKING_NOMODE,
find_reversed_path) find_reversed_path)
from gnpy.core.exceptions import (ConfigurationError, EquipmentConfigError, NetworkTopologyError, from gnpy.core.exceptions import (ConfigurationError, EquipmentConfigError, NetworkTopologyError,
ServiceError) ServiceError, DisjunctionError)
import gnpy.core.ansi_escapes as ansi_escapes import gnpy.core.ansi_escapes as ansi_escapes
from gnpy.core.spectrum_assignment import (build_oms_list, pth_assign_spectrum) from gnpy.core.spectrum_assignment import (build_oms_list, pth_assign_spectrum)
from copy import copy, deepcopy from copy import copy, deepcopy
@@ -40,7 +40,7 @@ from math import ceil
#EQPT_LIBRARY_FILENAME = Path(__file__).parent / 'eqpt_config.json' #EQPT_LIBRARY_FILENAME = Path(__file__).parent / 'eqpt_config.json'
logger = getLogger(__name__) LOGGER = getLogger(__name__)
PARSER = ArgumentParser(description='A function that computes performances for a list of ' + PARSER = ArgumentParser(description='A function that computes performances for a list of ' +
'services provided in a json file or an excel sheet.') 'services provided in a json file or an excel sheet.')
@@ -131,16 +131,16 @@ def consistency_check(params, f_max_from_si):
f'{params["trx_type"]} {params["trx_mode"]} min spacing value ' +\ f'{params["trx_type"]} {params["trx_mode"]} min spacing value ' +\
f'{params["min_spacing"]*1e-9}GHz.\nComputation stopped' f'{params["min_spacing"]*1e-9}GHz.\nComputation stopped'
print(msg) print(msg)
logger.critical(msg)
exit() exit()
LOGGER.critical(msg)
if f_max > f_max_from_si: if f_max > f_max_from_si:
msg = dedent(f''' msg = dedent(f'''
Requested channel number {params["nb_channel"]}, baud rate {params["baud_rate"]} GHz and requested spacing {params["spacing"]*1e-9}GHz Requested channel number {params["nb_channel"]}, baud rate {params["baud_rate"]} GHz and requested spacing {params["spacing"]*1e-9}GHz
is not consistent with frequency range {f_min*1e-12} THz, {f_max*1e-12} THz, min recommanded spacing {params["min_spacing"]*1e-9}GHz. is not consistent with frequency range {f_min*1e-12} THz, {f_max*1e-12} THz, min recommanded spacing {params["min_spacing"]*1e-9}GHz.
max recommanded nb of channels is {max_recommanded_nb_channels} max recommanded nb of channels is {max_recommanded_nb_channels}
Computation stopped.''') Computation stopped.''')
logger.critical(msg)
exit() exit()
LOGGER.critical(msg)
def disjunctions_from_json(json_data): def disjunctions_from_json(json_data):
@@ -169,8 +169,8 @@ def load_requests(filename, eqpt_filename, bidir):
""" loads the requests from a json or an excel file into a data string """ loads the requests from a json or an excel file into a data string
""" """
if filename.suffix.lower() == '.xls': if filename.suffix.lower() == '.xls':
logger.info('Automatically converting requests from XLS to JSON')
json_data = convert_service_sheet(filename, eqpt_filename, bidir=bidir) json_data = convert_service_sheet(filename, eqpt_filename, bidir=bidir)
LOGGER.info('Automatically converting requests from XLS to JSON')
else: else:
with open(filename, encoding='utf-8') as my_f: with open(filename, encoding='utf-8') as my_f:
json_data = loads(my_f.read()) json_data = loads(my_f.read())
@@ -219,7 +219,7 @@ def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist):
f' {pathreq.tsp_mode}\n\tcomputedSNR in 0.1nm = {temp_snr01nm} ' +\ f' {pathreq.tsp_mode}\n\tcomputedSNR in 0.1nm = {temp_snr01nm} ' +\
f'- required osnr {pathreq.OSNR}' f'- required osnr {pathreq.OSNR}'
print(msg) print(msg)
logger.warning(msg) LOGGER.warning(msg)
pathreq.blocking_reason = 'MODE_NOT_FEASIBLE' pathreq.blocking_reason = 'MODE_NOT_FEASIBLE'
else: else:
total_path, mode = propagate_and_optimize_mode(total_path, pathreq, equipment) total_path, mode = propagate_and_optimize_mode(total_path, pathreq, equipment)
@@ -265,7 +265,7 @@ def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist):
f' {pathreq.tsp_mode}\n' +\ f' {pathreq.tsp_mode}\n' +\
f'\tcomputedSNR in 0.1nm = {temp_snr01nm} - required osnr {pathreq.OSNR}' f'\tcomputedSNR in 0.1nm = {temp_snr01nm} - required osnr {pathreq.OSNR}'
print(msg) print(msg)
logger.warning(msg) LOGGER.warning(msg)
# TODO selection of mode should also be on reversed direction !! # TODO selection of mode should also be on reversed direction !!
pathreq.blocking_reason = 'MODE_NOT_FEASIBLE' pathreq.blocking_reason = 'MODE_NOT_FEASIBLE'
else: else:
@@ -273,7 +273,7 @@ def compute_path_with_disjunction(network, equipment, pathreqlist, pathlist):
else: else:
msg = 'Total path is empty. No propagation' msg = 'Total path is empty. No propagation'
print(msg) print(msg)
logger.info(msg) LOGGER.info(msg)
reversed_path = [] reversed_path = []
propagated_reversed_path = [] propagated_reversed_path = []
@@ -316,19 +316,19 @@ def correct_route_list(network, pathreqlist):
else: else:
msg = f'\x1b[1;33;40m'+f'could not find node: {n_id} in network topology.' +\ msg = f'\x1b[1;33;40m'+f'could not find node: {n_id} in network topology.' +\
f' Strict constraint can not be applied.' + '\x1b[0m' f' Strict constraint can not be applied.' + '\x1b[0m'
logger.critical(msg) LOGGER.critical(msg)
raise ValueError(msg) raise ValueError(msg)
if pathreq.source not in transponders: if pathreq.source not in transponders:
msg = f'\x1b[1;31;40m' + f'Request: {pathreq.request_id}: could not find' +\ msg = f'\x1b[1;31;40m' + f'Request: {pathreq.request_id}: could not find' +\
f' transponder source: {pathreq.source}.'+'\x1b[0m' f' transponder source: {pathreq.source}.'+'\x1b[0m'
logger.critical(msg) LOGGER.critical(msg)
print(f'{msg}\nComputation stopped.') print(f'{msg}\nComputation stopped.')
exit() exit()
if pathreq.destination not in transponders: if pathreq.destination not in transponders:
msg = f'\x1b[1;31;40m'+f'Request: {pathreq.request_id}: could not find' +\ msg = f'\x1b[1;31;40m'+f'Request: {pathreq.request_id}: could not find' +\
f' transponder destination: {pathreq.destination}.'+'\x1b[0m' f' transponder destination: {pathreq.destination}.'+'\x1b[0m'
logger.critical(msg) LOGGER.critical(msg)
print(f'{msg}\nComputation stopped.') print(f'{msg}\nComputation stopped.')
exit() exit()

View File

@@ -15,10 +15,9 @@ and feasibility
See: draft-ietf-teas-yang-path-computation-01.txt See: draft-ietf-teas-yang-path-computation-01.txt
""" """
from sys import exit
from collections import namedtuple, OrderedDict from collections import namedtuple, OrderedDict
from logging import getLogger, basicConfig, CRITICAL, DEBUG, INFO from logging import getLogger, basicConfig, CRITICAL, DEBUG, INFO
from networkx import (dijkstra_path, NetworkXNoPath, all_simple_paths, shortest_path_length) from networkx import (dijkstra_path, NetworkXNoPath, all_simple_paths)
from networkx.utils import pairwise from networkx.utils import pairwise
from numpy import mean from numpy import mean
from gnpy.core.service_sheet import convert_service_sheet, Request_element, Element from gnpy.core.service_sheet import convert_service_sheet, Request_element, Element
@@ -30,7 +29,7 @@ from copy import copy, deepcopy
from csv import writer from csv import writer
from math import ceil from math import ceil
logger = getLogger(__name__) LOGGER = getLogger(__name__)
RequestParams = namedtuple('RequestParams', 'request_id source destination bidir trx_type' + RequestParams = namedtuple('RequestParams', 'request_id source destination bidir trx_type' +
' trx_mode nodes_list loose_list spacing power nb_channel f_min' + ' trx_mode nodes_list loose_list spacing power nb_channel f_min' +
@@ -40,6 +39,8 @@ DisjunctionParams = namedtuple('DisjunctionParams', 'disjunction_id relaxable li
'_diverse node_diverse disjunctions_req') '_diverse node_diverse disjunctions_req')
class Path_request: class Path_request:
""" the class that contains all attributes related to a request
"""
def __init__(self, *args, **params): def __init__(self, *args, **params):
params = RequestParams(**params) params = RequestParams(**params)
self.request_id = params.request_id self.request_id = params.request_id
@@ -92,6 +93,8 @@ class Path_request:
f'loose-list:\t{self.loose_list}' f'loose-list:\t{self.loose_list}'
'\n']) '\n'])
class Disjunction: class Disjunction:
""" the class that contains all attributes related to disjunction constraints
"""
def __init__(self, *args, **params): def __init__(self, *args, **params):
params = DisjunctionParams(**params) params = DisjunctionParams(**params)
self.disjunction_id = params.disjunction_id self.disjunction_id = params.disjunction_id
@@ -130,6 +133,8 @@ class Result_element(Element):
uid = property(lambda self: repr(self)) uid = property(lambda self: repr(self))
@property @property
def detailed_path_json(self): def detailed_path_json(self):
""" a function that builds path object for normal and blocking cases
"""
index = 0 index = 0
pro_list = [] pro_list = []
for element in self.computed_path: for element in self.computed_path:
@@ -176,33 +181,33 @@ class Result_element(Element):
def path_properties(self): def path_properties(self):
""" a function that returns the path properties (metrics, crossed elements) into a dict """ a function that returns the path properties (metrics, crossed elements) into a dict
""" """
def path_metric(p, r): def path_metric(pth, req):
""" creates the metrics dictionary """ creates the metrics dictionary
""" """
return [ return [
{ {
'metric-type': 'SNR-bandwidth', 'metric-type': 'SNR-bandwidth',
'accumulative-value': round(mean(p[-1].snr), 2) 'accumulative-value': round(mean(pth[-1].snr), 2)
}, },
{ {
'metric-type': 'SNR-0.1nm', 'metric-type': 'SNR-0.1nm',
'accumulative-value': round(mean(p[-1].snr+lin2db(r.baud_rate/12.5e9)), 2) 'accumulative-value': round(mean(pth[-1].snr+lin2db(req.baud_rate/12.5e9)), 2)
}, },
{ {
'metric-type': 'OSNR-bandwidth', 'metric-type': 'OSNR-bandwidth',
'accumulative-value': round(mean(p[-1].osnr_ase), 2) 'accumulative-value': round(mean(pth[-1].osnr_ase), 2)
}, },
{ {
'metric-type': 'OSNR-0.1nm', 'metric-type': 'OSNR-0.1nm',
'accumulative-value': round(mean(p[-1].osnr_ase_01nm), 2) 'accumulative-value': round(mean(pth[-1].osnr_ase_01nm), 2)
}, },
{ {
'metric-type': 'reference_power', 'metric-type': 'reference_power',
'accumulative-value': r.power 'accumulative-value': req.power
}, },
{ {
'metric-type': 'path_bandwidth', 'metric-type': 'path_bandwidth',
'accumulative-value': r.path_bandwidth 'accumulative-value': req.path_bandwidth
} }
] ]
if self.path_request.bidir: if self.path_request.bidir:
@@ -271,13 +276,13 @@ def compute_constrained_path(network, req):
if nodes_list is None: if nodes_list is None:
msg = f'Request {req.request_id} problem in the constitution of nodes_list: ' +\ msg = f'Request {req.request_id} problem in the constitution of nodes_list: ' +\
'should at least include destination' 'should at least include destination'
logger.critical(msg)
exit() exit()
LOGGER.critical(msg)
if req.nodes_list[-1] != req.destination: if req.nodes_list[-1] != req.destination:
msg = f'Request {req.request_id} malformed list of nodes: last node should '+\ msg = f'Request {req.request_id} malformed list of nodes: last node should '+\
'be destination trx' 'be destination trx'
logger.critical(msg)
exit() exit()
LOGGER.critical(msg)
if len(nodes_list) == 1: if len(nodes_list) == 1:
try: try:
@@ -293,7 +298,7 @@ def compute_constrained_path(network, req):
except NetworkXNoPath: except NetworkXNoPath:
msg = f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path from' +\ msg = f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path from' +\
f' {source.uid} to node: {destination.uid} in network topology'+ '\x1b[0m' f' {source.uid} to node: {destination.uid} in network topology'+ '\x1b[0m'
logger.critical(msg) LOGGER.critical(msg)
print(msg) print(msg)
req.blocking_reason = 'NO_PATH' req.blocking_reason = 'NO_PATH'
total_path = [] total_path = []
@@ -301,10 +306,10 @@ def compute_constrained_path(network, req):
all_simp_pths = list(all_simple_paths(network, source=source,\ all_simp_pths = list(all_simple_paths(network, source=source,\
target=destination, cutoff=120)) target=destination, cutoff=120))
candidate = [] candidate = []
for p in all_simp_pths: for pth in all_simp_pths:
if ispart(nodes_list, p): if ispart(nodes_list, pth):
# print(f'selection{[el.uid for el in p if el in roadm]}') # print(f'selection{[el.uid for el in p if el in roadm]}')
candidate.append(p) candidate.append(pth)
# select the shortest path (in nb of hops) -> changed to shortest path in km length # select the shortest path (in nb of hops) -> changed to shortest path in km length
if len(candidate) > 0: if len(candidate) > 0:
# candidate.sort(key=lambda x: len(x)) # candidate.sort(key=lambda x: len(x))
@@ -326,18 +331,19 @@ def compute_constrained_path(network, req):
if 'STRICT' not in req.loose_list[:-len('STRICT')]: if 'STRICT' not in req.loose_list[:-len('STRICT')]:
msg = f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path with user_' +\ msg = f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path with user_' +\
f'include node constraints' + '\x1b[0m' f'include node constraints' + '\x1b[0m'
logger.info(msg) LOGGER.info(msg)
print(f'constraint ignored') print(f'constraint ignored')
total_path = dijkstra_path(network, source, destination, weight='weight') total_path = dijkstra_path(network, source, destination, weight='weight')
else: else:
msg = f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path with user ' +\ msg = f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path with user ' +\
f'include node constraints.\nNo path computed'+ '\x1b[0m' f'include node constraints.\nNo path computed'+ '\x1b[0m'
logger.critical(msg) LOGGER.critical(msg)
print(msg) print(msg)
req.blocking_reason = 'NO_PATH_WITH_CONSTRAINT' req.blocking_reason = 'NO_PATH_WITH_CONSTRAINT'
total_path = [] total_path = []
# obsolete method: this does not guaranty to avoid loops or correct results # the following method was initially used but abandonned: compute per segment:
# this does not guaranty to avoid loops or correct results
# Here is the demonstration: # Here is the demonstration:
# 1 1 # 1 1
# eg a----b-----c # eg a----b-----c
@@ -362,42 +368,6 @@ def compute_constrained_path(network, req):
# e----f h g # e----f h g
# 1 # 1
# then there is no more path to g destination # then there is no more path to g destination
#
#
# total_path = [source]
# for n in req.nodes_list:
# try:
# node = next(el for el in trx if el.uid == n)
# except StopIteration:
# try:
# node = next(el for el in anytypenode if el.uid == n)
# except StopIteration:
# try:
# # TODO this test is not giving good results: full name of the
# # amp is required to avoid ambiguity on the direction
# node = next(el for el in anytypenode
# if n in el.uid)
# except StopIteration:
# msg = f'could not find node: {n} in network topology: \
# not a trx, roadm, edfa, fiber or fused element'
# logger.critical(msg)
# raise ValueError(msg)
# # extend path list without repeating source -> skip first element in the list
# try:
# # to avoid looping back: use an alternate graph were current path edges and vertex are suppressed
# total_path.extend(dijkstra_path(network, source, node)[1:])
# source = node
# except NetworkXNoPath:
# if req.loose_list[req.nodes_list.index(n)] == 'loose':
# print(f'could not find a path from {source.uid} to loose node: {n} in network topology')
# print(f'node {n} is skipped')
# else:
# msg = f'could not find a path from {source.uid} to node: {n} in network topology'
# logger.critical(msg)
# print(msg)
# total_path = []
return total_path return total_path
@@ -425,15 +395,17 @@ def propagate2(path, req, equipment):
def propagate_and_optimize_mode(path, req, equipment): def propagate_and_optimize_mode(path, req, equipment):
# if mode is unknown : loops on the modes starting from the highest baudrate fiting in the # if mode is unknown : loops on the modes starting from the highest baudrate fiting in the
# step 1: create an ordered list of modes based on baudrate # step 1: create an ordered list of modes based on baudrate
baudrate_to_explore = list(set([m['baud_rate'] for m in equipment['Transceiver'][req.tsp].mode baudrate_to_explore = list(set([this_mode['baud_rate']
if float(m['min_spacing']) <= req.spacing])) for this_mode in equipment['Transceiver'][req.tsp].mode
if float(this_mode['min_spacing']) <= req.spacing]))
# TODO be carefull on limits cases if spacing very close to req spacing eg 50.001 50.000 # TODO be carefull on limits cases if spacing very close to req spacing eg 50.001 50.000
baudrate_to_explore = sorted(baudrate_to_explore, reverse=True) baudrate_to_explore = sorted(baudrate_to_explore, reverse=True)
if baudrate_to_explore: if baudrate_to_explore:
# at least 1 baudrate can be tested wrt spacing # at least 1 baudrate can be tested wrt spacing
for b in baudrate_to_explore: for this_br in baudrate_to_explore:
modes_to_explore = [m for m in equipment['Transceiver'][req.tsp].mode modes_to_explore = [this_mode for this_mode in equipment['Transceiver'][req.tsp].mode
if m['baud_rate'] == b and float(m['min_spacing']) <= req.spacing] if this_mode['baud_rate'] == this_br and
float(this_mode['min_spacing']) <= req.spacing]
modes_to_explore = sorted(modes_to_explore, modes_to_explore = sorted(modes_to_explore,
key=lambda x: x['bit_rate'], reverse=True) key=lambda x: x['bit_rate'], reverse=True)
# print(modes_to_explore) # print(modes_to_explore)
@@ -441,19 +413,19 @@ def propagate_and_optimize_mode(path, req, equipment):
found_a_feasible_mode = False found_a_feasible_mode = False
# TODO: the case of roll of is not included: for now use SI one # TODO: the case of roll of is not included: for now use SI one
# TODO: if the loop in mode optimization does not have a feasible path, then bugs # TODO: if the loop in mode optimization does not have a feasible path, then bugs
si = create_input_spectral_information(req.f_min, req.f_max, spc_info = create_input_spectral_information(req.f_min, req.f_max,
equipment['SI']['default'].roll_off, equipment['SI']['default'].roll_off,
b, req.power, req.spacing) this_br, req.power, req.spacing)
for el in path: for el in path:
si = el(si) spc_info = el(spc_info)
for m in modes_to_explore : for this_mode in modes_to_explore:
if path[-1].snr is not None: if path[-1].snr is not None:
path[-1].update_snr(m['tx_osnr'], equipment['Roadm']['default'].add_drop_osnr) path[-1].update_snr(this_mode['tx_osnr'], equipment['Roadm']['default'].add_drop_osnr)
if round(min(path[-1].snr+lin2db(b/(12.5e9))), 2) > m['OSNR']: if round(min(path[-1].snr+lin2db(this_br/(12.5e9))), 2) > this_mode['OSNR']:
found_a_feasible_mode = True found_a_feasible_mode = True
return path, m return path, this_mode
else: else:
last_explored_mode = m last_explored_mode = this_mode
else: else:
req.blocking_reason = 'NO_COMPUTED_SNR' req.blocking_reason = 'NO_COMPUTED_SNR'
return path, None return path, None
@@ -463,14 +435,14 @@ def propagate_and_optimize_mode(path, req, equipment):
# returns the last propagated path and mode # returns the last propagated path and mode
msg = f'\tWarning! Request {req.request_id}: no mode satisfies path SNR requirement.\n' msg = f'\tWarning! Request {req.request_id}: no mode satisfies path SNR requirement.\n'
print(msg) print(msg)
logger.info(msg) LOGGER.info(msg)
req.blocking_reason = 'NO_FEASIBLE_MODE' req.blocking_reason = 'NO_FEASIBLE_MODE'
return path, last_explored_mode return path, last_explored_mode
else: else:
# no baudrate satisfying spacing # no baudrate satisfying spacing
msg = f'\tWarning! Request {req.request_id}: no baudrate satisfies spacing requirement.\n' msg = f'\tWarning! Request {req.request_id}: no baudrate satisfies spacing requirement.\n'
print(msg) print(msg)
logger.info(msg) LOGGER.info(msg)
req.blocking_reason = 'NO_FEASIBLE_BAUDRATE_WITH_SPACING' req.blocking_reason = 'NO_FEASIBLE_BAUDRATE_WITH_SPACING'
return [], None return [], None
@@ -613,6 +585,9 @@ def jsontocsv(json_data, equipment, fileout):
tsp = temp_tsp['transponder-type'] tsp = temp_tsp['transponder-type']
mode = temp_tsp['transponder-mode'] mode = temp_tsp['transponder-mode']
# find the min acceptable OSNR, baud rate from the eqpt library based
# on tsp (type) and mode (format).
# loading equipment already tests the existence of tsp type and mode:
pth, minosnr, baud_rate, bit_rate, cost, output_snr, output_snrbandwidth, \ pth, minosnr, baud_rate, bit_rate, cost, output_snr, output_snrbandwidth, \
output_osnr, power, path_bandwidth, sptrm = \ output_osnr, power, path_bandwidth, sptrm = \
jsontoparams(pth_el, tsp, mode, equipment) jsontoparams(pth_el, tsp, mode, equipment)
@@ -731,22 +706,22 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
all_simp_pths_reversed.append(find_reversed_path(pth)) all_simp_pths_reversed.append(find_reversed_path(pth))
rqs[pathreq.request_id] = all_simp_pths rqs[pathreq.request_id] = all_simp_pths
temp = [] temp = []
for p in all_simp_pths: for pth in all_simp_pths:
# build a short list representing each roadm+direction with the first item # build a short list representing each roadm+direction with the first item
# start enumeration at 1 to avoid Trx in the list # start enumeration at 1 to avoid Trx in the list
s = [e.uid for i, e in enumerate(p[1:-1]) \ short_list = [e.uid for i, e in enumerate(pth[1:-1]) \
if isinstance(e, Roadm) | (isinstance(p[i], Roadm))] if isinstance(e, Roadm) | (isinstance(pth[i], Roadm))]
temp.append(s) temp.append(short_list)
# id(s) is unique even if path is the same: two objects with same # id(short_list) is unique even if path is the same: two objects with same
# path have two different ids # path have two different ids
allpaths[id(s)] = Pth(pathreq, p, s) allpaths[id(short_list)] = Pth(pathreq, pth, short_list)
simple_rqs[pathreq.request_id] = temp simple_rqs[pathreq.request_id] = temp
temp = [] temp = []
for p in all_simp_pths_reversed: for pth in all_simp_pths_reversed:
# build a short list representing each roadm+direction with the first item # build a short list representing each roadm+direction with the first item
# start enumeration at 1 to avoid Trx in the list # start enumeration at 1 to avoid Trx in the list
temp.append([e.uid for i, e in enumerate(p[1:-1]) \ temp.append([e.uid for i, e in enumerate(pth[1:-1]) \
if isinstance(e, Roadm) | (isinstance(p[i], Roadm))]) if isinstance(e, Roadm) | (isinstance(pth[i], Roadm))])
simple_rqs_reversed[pathreq.request_id] = temp simple_rqs_reversed[pathreq.request_id] = temp
# step 2 # step 2
# for each set of requests that need to be disjoint # for each set of requests that need to be disjoint
@@ -757,8 +732,8 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
dlist = d.disjunctions_req.copy() dlist = d.disjunctions_req.copy()
# each line of dpath is one combination of path that satisfies disjunction # each line of dpath is one combination of path that satisfies disjunction
dpath = [] dpath = []
for i, p in enumerate(simple_rqs[dlist[0]]): for i, pth in enumerate(simple_rqs[dlist[0]]):
dpath.append([p]) dpath.append([pth])
# allpaths[id(p)].d_id = d.disjunction_id # allpaths[id(p)].d_id = d.disjunction_id
# in each loop, dpath is updated with a path for rq that satisfies # in each loop, dpath is updated with a path for rq that satisfies
# disjunction with each path in dpath # disjunction with each path in dpath
@@ -785,25 +760,24 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
# p3 and p4 are not disjoint # p3 and p4 are not disjoint
# p3 and p7 are not disjoint # p3 and p7 are not disjoint
for e1 in dlist[1:]: for elem1 in dlist[1:]:
temp = [] temp = []
for j, p1 in enumerate(simple_rqs[e1]): for j, pth1 in enumerate(simple_rqs[elem1]):
# allpaths[id(p1)].d_id = d.disjunction_id
# can use index j in simple_rqs_reversed because index # can use index j in simple_rqs_reversed because index
# of direct and reversed paths have been kept identical # of direct and reversed paths have been kept identical
p1_reversed = simple_rqs_reversed[e1][j] pth1_reversed = simple_rqs_reversed[elem1][j]
# print(p1_reversed) # print(pth1_reversed)
# print('\n\n') # print('\n\n')
for k, c in enumerate(dpath): for cndt in dpath:
# print(f' c: \t{c}') # print(f' c: \t{c}')
temp2 = c.copy() temp2 = cndt.copy()
all_disjoint = 0 all_disjoint = 0
for p in c: for pth in cndt:
all_disjoint += isdisjoint(p1, p) + isdisjoint(p1_reversed, p) all_disjoint += isdisjoint(pth1, pth) + isdisjoint(pth1_reversed, pth)
if all_disjoint == 0: if all_disjoint == 0:
temp2.append(p1) temp2.append(pth1)
temp.append(temp2) temp.append(temp2)
# print(f' coucou {e1}: \t{temp}') # print(f' coucou {elem1}: \t{temp}')
dpath = temp dpath = temp
# print(dpath) # print(dpath)
candidates[d.disjunction_id] = dpath candidates[d.disjunction_id] = dpath
@@ -829,23 +803,23 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
candidate_paths = simple_rqs[pathreq.request_id] candidate_paths = simple_rqs[pathreq.request_id]
# print('coucou') # print('coucou')
# print(pathreq.request_id) # print(pathreq.request_id)
for p in candidate_paths: for pth in candidate_paths:
iscandidate = 0 iscandidate = 0
for sol in concerned_d_id: for sol in concerned_d_id:
test = 1 test = 1
# for each solution test if p is part of the solution # for each solution test if pth is part of the solution
# if yes, then p can remain a candidate # if yes, then pth can remain a candidate
for i, m in enumerate(candidates[sol]): for cndt in candidates[sol]:
if p in m: if pth in cndt:
if allpaths[id(m[m.index(p)])].req.request_id == pathreq.request_id: if allpaths[id(cndt[cndt.index(pth)])].req.request_id == pathreq.request_id:
test = 0 test = 0
break break
iscandidate += test iscandidate += test
if iscandidate != 0: if iscandidate != 0:
for l in concerned_d_id: for this_id in concerned_d_id:
for m in candidates[l]: for cndt in candidates[this_id]:
if p in m: if pth in cndt:
candidates[l].remove(m) candidates[this_id].remove(cndt)
# for i in disjunctions_list: # for i in disjunctions_list:
# print(i.disjunction_id) # print(i.disjunction_id)
@@ -855,53 +829,53 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
# the constraint only in the case of disjounction: the simple path is processed in # the constraint only in the case of disjounction: the simple path is processed in
# request.compute_constrained_path # request.compute_constrained_path
# TODO: keep a version without the loose constraint # TODO: keep a version without the loose constraint
for d in disjunctions_list: for this_d in disjunctions_list:
temp = [] temp = []
for j, sol in enumerate(candidates[d.disjunction_id]): for j, sol in enumerate(candidates[this_d.disjunction_id]):
testispartok = True testispartok = True
for i, p in enumerate(sol): for pth in sol:
# print(f'test {allpaths[id(p)].req.request_id}') # print(f'test {allpaths[id(pth)].req.request_id}')
# print(f'length of route {len(allpaths[id(p)].req.nodes_list)}') # print(f'length of route {len(allpaths[id(pth)].req.nodes_list)}')
if allpaths[id(p)].req.nodes_list: if allpaths[id(pth)].req.nodes_list:
# if p does not containt the ordered list node, remove sol from the candidate # if pth does not containt the ordered list node, remove sol from the candidate
# except if this was the last solution: then check if the constraint is loose # except if this was the last solution: then check if the constraint is loose
# or not # or not
if not ispart(allpaths[id(p)].req.nodes_list, p): if not ispart(allpaths[id(pth)].req.nodes_list, pth):
# print(f'nb of solutions {len(temp)}') # print(f'nb of solutions {len(temp)}')
if j < len(candidates[d.disjunction_id])-1: if j < len(candidates[this_d.disjunction_id])-1:
msg = f'removing {sol}' msg = f'removing {sol}'
logger.info(msg) LOGGER.info(msg)
testispartok = False testispartok = False
#break #break
else: else:
if 'LOOSE' in allpaths[id(p)].req.loose_list: if 'LOOSE' in allpaths[id(pth)].req.loose_list:
logger.info(f'Could not apply route constraint'+ LOGGER.info(f'Could not apply route constraint'+
f'{allpaths[id(p)].req.nodes_list} on request' +\ f'{allpaths[id(pth)].req.nodes_list} on request' +\
f' {allpaths[id(p)].req.request_id}') f' {allpaths[id(pth)].req.request_id}')
else : else:
logger.info(f'removing last solution from candidate paths\n{sol}') LOGGER.info(f'removing last solution from candidate paths\n{sol}')
testispartok = False testispartok = False
if testispartok: if testispartok:
temp.append(sol) temp.append(sol)
candidates[d.disjunction_id] = temp candidates[this_d.disjunction_id] = temp
# step 5 select the first combination that works # step 5 select the first combination that works
pathreslist_disjoint = {} pathreslist_disjoint = {}
for d in disjunctions_list: for dis in disjunctions_list:
test_sol = True test_sol = True
while test_sol: while test_sol:
# print('coucou') # print('coucou')
if candidates[d.disjunction_id]: if candidates[dis.disjunction_id]:
for p in candidates[d.disjunction_id][0]: for pth in candidates[dis.disjunction_id][0]:
if allpaths[id(p)].req in pathreqlist_disjt: if allpaths[id(pth)].req in pathreqlist_disjt:
# print(f'selected path:{p} for req {allpaths[id(p)].req.request_id}') # print(f'selected path:{pth} for req {allpaths[id(pth)].req.request_id}')
pathreslist_disjoint[allpaths[id(p)].req] = allpaths[id(p)].pth pathreslist_disjoint[allpaths[id(pth)].req] = allpaths[id(pth)].pth
pathreqlist_disjt.remove(allpaths[id(p)].req) pathreqlist_disjt.remove(allpaths[id(pth)].req)
candidates = remove_candidate(candidates, allpaths, allpaths[id(p)].req, p) candidates = remove_candidate(candidates, allpaths, allpaths[id(pth)].req, pth)
test_sol = False test_sol = False
else: else:
msg = f'No disjoint path found with added constraint' msg = f'No disjoint path found with added constraint'
logger.critical(msg) LOGGER.critical(msg)
print(f'{msg}\nComputation stopped.') print(f'{msg}\nComputation stopped.')
# TODO in this case: replay step 5 with the candidate without constraints # TODO in this case: replay step 5 with the candidate without constraints
exit() exit()
@@ -921,12 +895,13 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list):
path_res_list.append(pathreslist_disjoint[req]) path_res_list.append(pathreslist_disjoint[req])
return path_res_list return path_res_list
def isdisjoint(p1, p2): def isdisjoint(pth1, pth2):
# returns 0 if disjoint """ returns 0 if disjoint
edge1 = list(pairwise(p1)) """
edge2 = list(pairwise(p2)) edge1 = list(pairwise(pth1))
for e in edge1: edge2 = list(pairwise(pth2))
if e in edge2: for edge in edge1:
if edge in edge2:
return 1 return 1
return 0 return 0
@@ -958,51 +933,56 @@ def find_reversed_path(pth):
else: else:
msg = f'Error while handling reversed path {pth[-1].uid} to {pth[0].uid}:' +\ msg = f'Error while handling reversed path {pth[-1].uid} to {pth[0].uid}:' +\
' can not handle unidir topology. TO DO.' ' can not handle unidir topology. TO DO.'
logger.critical(msg) LOGGER.critical(msg)
raise ValueError(msg) raise ValueError(msg)
reversed_path.append(pth[0]) reversed_path.append(pth[0])
return reversed_path return reversed_path
def ispart(a, b): def ispart(ptha, pthb):
# the functions takes two paths a and b and retrns True """ the functions takes two paths a and b and retrns True
# if all a elements are part of b and in the same order if all a elements are part of b and in the same order
"""
j = 0 j = 0
for i, el in enumerate(a): for elem in ptha:
if el in b: if elem in pthb:
if b.index(el) >= j: if pthb.index(elem) >= j:
j = b.index(el) j = pthb.index(elem)
else: else:
return False return False
else: else:
return False return False
return True return True
def remove_candidate(candidates, allpaths, rq, pth): def remove_candidate(candidates, allpaths, rqst, pth):
# print(f'coucou {rq.request_id}') """ filter duplicate candidates
"""
# print(f'coucou {rqst.request_id}')
for key, candidate in candidates.items(): for key, candidate in candidates.items():
temp = candidate.copy() temp = candidate.copy()
for i, sol in enumerate(candidate): for sol in candidate:
for p in sol: for this_p in sol:
if allpaths[id(p)].req.request_id == rq.request_id: if allpaths[id(this_p)].req.request_id == rqst.request_id:
if id(p) != id(pth): if id(this_p) != id(pth):
temp.remove(sol) temp.remove(sol)
break break
candidates[key] = temp candidates[key] = temp
return candidates return candidates
def compare_reqs(req1, req2, disjlist): def compare_reqs(req1, req2, disjlist):
""" compare two requests: returns True or False
"""
dis1 = [d for d in disjlist if req1.request_id in d.disjunctions_req] dis1 = [d for d in disjlist if req1.request_id in d.disjunctions_req]
dis2 = [d for d in disjlist if req2.request_id in d.disjunctions_req] dis2 = [d for d in disjlist if req2.request_id in d.disjunctions_req]
same_disj = False same_disj = False
if dis1 and dis2: if dis1 and dis2:
temp1 = [] temp1 = []
for d in dis1: for this_d in dis1:
temp1.extend(d.disjunctions_req) temp1.extend(this_d.disjunctions_req)
temp1.remove(req1.request_id) temp1.remove(req1.request_id)
temp2 = [] temp2 = []
for d in dis2: for this_d in dis2:
temp2.extend(d.disjunctions_req) temp2.extend(this_d.disjunctions_req)
temp2.remove(req2.request_id) temp2.remove(req2.request_id)
if set(temp1) == set(temp2): if set(temp1) == set(temp2):
same_disj = True same_disj = True
@@ -1030,28 +1010,29 @@ def compare_reqs(req1, req2, disjlist):
return False return False
def requests_aggregation(pathreqlist, disjlist): def requests_aggregation(pathreqlist, disjlist):
# this function aggregates requests so that if several requests """ this function aggregates requests so that if several requests
# exist between same source and destination and with same transponder type exist between same source and destination and with same transponder type
"""
# todo maybe add conditions on mode ??, spacing ... # todo maybe add conditions on mode ??, spacing ...
# currently if undefined takes the default values # currently if undefined takes the default values
local_list = pathreqlist.copy() local_list = pathreqlist.copy()
for req in pathreqlist: for req in pathreqlist:
for r in local_list: for this_r in local_list:
if req.request_id != r.request_id and compare_reqs(req, r, disjlist): if req.request_id != this_r.request_id and compare_reqs(req, this_r, disjlist):
# aggregate # aggregate
r.path_bandwidth += req.path_bandwidth this_r.path_bandwidth += req.path_bandwidth
temp_r_id = r.request_id temp_r_id = this_r.request_id
r.request_id = ' | '.join((r.request_id, req.request_id)) this_r.request_id = ' | '.join((this_r.request_id, req.request_id))
# remove request from list # remove request from list
local_list.remove(req) local_list.remove(req)
# todo change also disjunction req with new demand # todo change also disjunction req with new demand
for d in disjlist: for this_d in disjlist:
if req.request_id in d.disjunctions_req: if req.request_id in this_d.disjunctions_req:
d.disjunctions_req.remove(req.request_id) this_d.disjunctions_req.remove(req.request_id)
d.disjunctions_req.append(r.request_id) this_d.disjunctions_req.append(this_r.request_id)
for d in disjlist: for this_d in disjlist:
if temp_r_id in d.disjunctions_req: if temp_r_id in this_d.disjunctions_req:
disjlist.remove(d) disjlist.remove(this_d)
break break
return local_list, disjlist return local_list, disjlist

View File

@@ -239,21 +239,25 @@ def parse_excel(input_filename):
return services return services
def parse_service_sheet(service_sheet): def parse_service_sheet(service_sheet):
""" reads each column according to authorized fieldnames. order is not important.
"""
logger.info(f'Validating headers on {service_sheet.name!r}') logger.info(f'Validating headers on {service_sheet.name!r}')
# add a test on field to enable the '' field case that arises when columns on the # add a test on field to enable the '' field case that arises when columns on the
# right hand side are used as comments or drawing in the excel sheet # right hand side are used as comments or drawing in the excel sheet
header = [x.value.strip() for x in service_sheet.row(4)[0:SERVICES_COLUMN] if len(x.value.strip())>0] header = [x.value.strip() for x in service_sheet.row(4)[0:SERVICES_COLUMN]
if len(x.value.strip()) > 0]
# create a service_fieldname independant from the excel column order # create a service_fieldname independant from the excel column order
# to be compatible with any version of the sheet # to be compatible with any version of the sheet
# the following dictionnary records the excel field names and the corresponding parameter's name # the following dictionnary records the excel field names and the corresponding parameter's name
authorized_fieldnames = {'route id':'request_id', 'Source':'source', 'Destination':'destination', \ authorized_fieldnames = {
'route id':'request_id', 'Source':'source', 'Destination':'destination', \
'TRX type':'trx_type', 'Mode' : 'mode', 'System: spacing':'spacing', \ 'TRX type':'trx_type', 'Mode' : 'mode', 'System: spacing':'spacing', \
'System: input power (dBm)':'power', 'System: nb of channels':'nb_channel',\ 'System: input power (dBm)':'power', 'System: nb of channels':'nb_channel',\
'routing: disjoint from': 'disjoint_from', 'routing: path':'nodes_list',\ 'routing: disjoint from': 'disjoint_from', 'routing: path':'nodes_list',\
'routing: is loose?':'is_loose', 'path bandwidth':'path_bandwidth'} 'routing: is loose?':'is_loose', 'path bandwidth':'path_bandwidth'}
try : try:
service_fieldnames = [authorized_fieldnames[e] for e in header] service_fieldnames = [authorized_fieldnames[e] for e in header]
except KeyError: except KeyError:
msg = f'Malformed header on Service sheet: {header} field not in {authorized_fieldnames}' msg = f'Malformed header on Service sheet: {header} field not in {authorized_fieldnames}'

View File

@@ -18,10 +18,8 @@ from pathlib import Path
import pytest import pytest
from gnpy.core.equipment import load_equipment, trx_mode_params, automatic_nch from gnpy.core.equipment import load_equipment, trx_mode_params, automatic_nch
from gnpy.core.network import load_network, build_network from gnpy.core.network import load_network, build_network
from examples.path_requests_run import (requests_from_json , correct_route_list , from examples.path_requests_run import requests_from_json, correct_route_list, load_requests
load_requests , disjunctions_from_json) from gnpy.core.request import compute_path_dsjctn, propagate, propagate_and_optimize_mode
from gnpy.core.request import (compute_path_dsjctn, isdisjoint , find_reversed_path,
propagate,propagate_and_optimize_mode)
from gnpy.core.utils import db2lin, lin2db from gnpy.core.utils import db2lin, lin2db
from gnpy.core.elements import Roadm from gnpy.core.elements import Roadm