Files
oopt-gnpy/examples/path_requests_run.py
EstherLerouzic 3ade885e41 improving path_request_run robustness to wrong excel input
- adding a test to check that transponder types and modes are part of the eqpt library
- adding a formating on numeical value input for path request ids
- automatically printing the results into a csv file in addition to the json file
  and with the saime name+.csv
- printing results in a more pretty way for demo

Signed-off-by: EstherLerouzic <esther.lerouzic@orange.com>
2018-09-11 11:09:28 +01:00

168 lines
7.1 KiB
Python

#!/usr/bin/env python3
# TelecomInfraProject/gnpy/examples
# Module name : path_requests_run.py
# Version :
# License : BSD 3-Clause Licence
# Copyright (c) 2018, Telecom Infra Project
"""
@author: esther.lerouzic
@author: jeanluc-auge
read json request file in accordance with:
Yang model for requesting Path Computation
draft-ietf-teas-yang-path-computation-01.txt.
and returns path results in terms of path and feasibility
"""
from sys import exit
from argparse import ArgumentParser
from pathlib import Path
from collections import namedtuple
from logging import getLogger, basicConfig, CRITICAL, DEBUG, INFO
from json import dumps, loads
from networkx import (draw_networkx_nodes, draw_networkx_edges,
draw_networkx_labels, dijkstra_path, NetworkXNoPath)
from numpy import mean
from examples.convert_service_sheet import convert_service_sheet, Request_element, Element
from gnpy.core.utils import load_json
from gnpy.core.network import load_network, build_network, set_roadm_loss
from gnpy.core.equipment import load_equipment, trx_mode_params
from gnpy.core.elements import Transceiver, Roadm, Edfa, Fused
from gnpy.core.utils import db2lin, lin2db
from gnpy.core.request import Path_request, Result_element, compute_constrained_path, propagate, jsontocsv
from copy import copy, deepcopy
#EQPT_LIBRARY_FILENAME = Path(__file__).parent / 'eqpt_config.json'
logger = getLogger(__name__)
parser = ArgumentParser(description = 'A function that computes performances for a list of services provided in a json file or an excel sheet.')
parser.add_argument('network_filename', nargs='?', type = Path, default= Path(__file__).parent / 'meshTopologyExampleV2.xls')
parser.add_argument('service_filename', nargs='?', type = Path, default= Path(__file__).parent / 'meshTopologyExampleV2.xls')
parser.add_argument('eqpt_filename', nargs='?', type = Path, default=Path(__file__).parent / 'eqpt_config.json')
parser.add_argument('-v', '--verbose', action='count')
parser.add_argument('-o', '--output', default=None)
def requests_from_json(json_data,equipment):
requests_list = []
for req in json_data['path-request']:
#print(f'{req}')
params = {}
params['request_id'] = req['request-id']
params['source'] = req['src-tp-id']
params['destination'] = req['dst-tp-id']
params['trx_type'] = req['path-constraints']['te-bandwidth']['trx_type']
params['trx_mode'] = req['path-constraints']['te-bandwidth']['trx_mode']
params['format'] = params['trx_mode']
nd_list = req['optimizations']['explicit-route-include-objects']
params['nodes_list'] = [n['unnumbered-hop']['node-id'] for n in nd_list]
params['loose_list'] = [n['unnumbered-hop']['hop-type'] for n in nd_list]
params['spacing'] = req['path-constraints']['te-bandwidth']['spacing']
trx_params = trx_mode_params(equipment,params['trx_type'],params['trx_mode'],True)
params.update(trx_params)
params['power'] = req['path-constraints']['te-bandwidth']['output-power']
params['nb_channel'] = req['path-constraints']['te-bandwidth']['max-nb-of-channel']
requests_list.append(Path_request(**params))
return requests_list
def load_requests(filename,eqpt_filename):
if filename.suffix.lower() == '.xls':
logger.info('Automatically converting requests from XLS to JSON')
json_data = convert_service_sheet(filename,eqpt_filename)
else:
with open(filename) as f:
json_data = loads(f.read())
return json_data
def compute_path(network, equipment, pathreqlist):
path_res_list = []
for pathreq in pathreqlist:
#need to rebuid the network for each path because the total power
#can be different and the choice of amplifiers in autodesign is power dependant
#but the design is the same if the total power is the same
#TODO parametrize the total spectrum power so the same design can be shared
p_db = lin2db(pathreq.power*1e3)
p_total_db = p_db + lin2db(pathreq.nb_channel)
build_network(network, equipment, p_db, p_total_db)
pathreq.nodes_list.append(pathreq.destination)
#we assume that the destination is a strict constraint
pathreq.loose_list.append('strict')
print(f'Computing path from {pathreq.source} to {pathreq.destination}')
print(f'with path constraint: {[pathreq.source]+pathreq.nodes_list}') #adding first node to be clearer on the output
total_path = compute_constrained_path(network, pathreq)
print(f'Computed path (roadms):{[e.uid for e in total_path if isinstance(e, Roadm)]}\n')
# for debug
# print(f'{pathreq.baud_rate} {pathreq.power} {pathreq.spacing} {pathreq.nb_channel}')
if total_path :
total_path = propagate(total_path,pathreq,equipment, show=False)
else:
total_path = []
# we record the last tranceiver object in order to have th whole
# information about spectrum. Important Note: since transceivers
# attached to roadms are actually logical elements to simulate
# performance, several demands having the same destination may use
# the same transponder for the performance simaulation. This is why
# we use deepcopy: to ensure each propagation is recorded and not
# overwritten
path_res_list.append(deepcopy(total_path))
return path_res_list
def path_result_json(pathresult):
data = {
'path': [n.json for n in pathresult]
}
return data
if __name__ == '__main__':
args = parser.parse_args()
basicConfig(level={2: DEBUG, 1: INFO, 0: CRITICAL}.get(args.verbose, CRITICAL))
logger.info(f'Computing path requests {args.service_filename} into JSON format')
# for debug
# print( args.eqpt_filename)
data = load_requests(args.service_filename,args.eqpt_filename)
equipment = load_equipment(args.eqpt_filename)
network = load_network(args.network_filename,equipment)
pths = requests_from_json(data, equipment)
print(pths)
test = compute_path(network, equipment, pths)
#TODO write results
header = ['demand','snr@bandwidth','snr@0.1nm','Receiver minOSNR']
data = []
data.append(header)
for i, p in enumerate(test):
if p:
line = [f'{pths[i].source} to {pths[i].destination} : ', f'{round(mean(p[-1].snr),2)}',\
f'{round(mean(p[-1].snr+lin2db(pths[i].baud_rate/(12.5e9))),2)}',\
f'{pths[i].OSNR}']
else:
line = [f'no path from {pths[i].source} to {pths[i].destination} ']
data.append(line)
col_width = max(len(word) for row in data for word in row) # padding
for row in data:
print(''.join(word.ljust(col_width) for word in row))
if args.output :
result = []
for p in test:
result.append(Result_element(pths[test.index(p)],p))
with open(args.output, 'w') as f:
f.write(dumps(path_result_json(result), indent=2))
fnamecsv = next(s for s in args.output.split('.')) + '.csv'
with open(fnamecsv,"w") as fcsv :
jsontocsv(path_result_json(result),equipment,fcsv)