mirror of
https://github.com/Telecominfraproject/oopt-gnpy.git
synced 2025-11-01 18:47:48 +00:00
small fixes
This commit is contained in:
@@ -448,7 +448,7 @@ power/channel definition.
|
||||
+----------------------+-----------+-------------------------------------------+
|
||||
|
||||
The `transmission_main_example.py <examples/transmission_main_example.py>`_
|
||||
script propagates a specrum of 96 channels at 32 Gbaud, 50 GHz spacing and 0
|
||||
script propagates a specrum of channels at 32 Gbaud, 50 GHz spacing and 0
|
||||
dBm/channel. These are not yet parametrized but can be modified directly in the
|
||||
script (via the SpectralInformation structure) to accomodate any baud rate,
|
||||
spacing, power or channel count demand.
|
||||
|
||||
@@ -11,216 +11,15 @@ Yang model for requesting path computation.
|
||||
See: draft-ietf-teas-yang-path-computation-01.txt
|
||||
"""
|
||||
|
||||
from sys import exit
|
||||
try:
|
||||
from xlrd import open_workbook, XL_CELL_EMPTY
|
||||
except ModuleNotFoundError:
|
||||
exit('Required: `pip install xlrd`')
|
||||
from argparse import ArgumentParser
|
||||
from collections import namedtuple
|
||||
from logging import getLogger, basicConfig, CRITICAL, DEBUG, INFO
|
||||
from json import dumps
|
||||
from pathlib import Path
|
||||
from gnpy.core.equipment import load_equipment
|
||||
from gnpy.core.utils import db2lin, lin2db
|
||||
|
||||
SERVICES_COLUMN = 11
|
||||
#EQPT_LIBRARY_FILENAME = Path(__file__).parent / 'eqpt_config.json'
|
||||
from gnpy.core.service_sheet import Request, Element, Request_element
|
||||
from gnpy.core.service_sheet import parse_row, parse_excel, convert_service_sheet
|
||||
|
||||
all_rows = lambda sheet, start=0: (sheet.row(x) for x in range(start, sheet.nrows))
|
||||
logger = getLogger(__name__)
|
||||
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument('workbook', nargs='?', type = Path , default='meshTopologyExampleV2.xls')
|
||||
parser.add_argument('-v', '--verbose', action='count')
|
||||
parser.add_argument('-o', '--output', default=None)
|
||||
|
||||
# Type for input data
|
||||
class Request(namedtuple('Request', 'request_id source destination trx_type mode \
|
||||
spacing power nb_channel disjoint_from nodes_list is_loose')):
|
||||
def __new__(cls, request_id, source, destination, trx_type, mode , spacing , power , nb_channel , disjoint_from ='' , nodes_list = None, is_loose = ''):
|
||||
return super().__new__(cls, request_id, source, destination, trx_type, mode, spacing, power, nb_channel, disjoint_from, nodes_list, is_loose)
|
||||
|
||||
# Type for output data: // from dutc
|
||||
class Element:
|
||||
def __eq__(self, other):
|
||||
return type(self) == type(other) and self.uid == other.uid
|
||||
def __hash__(self):
|
||||
return hash((type(self), self.uid))
|
||||
|
||||
class Request_element(Element):
|
||||
def __init__(self,Request,eqpt_filename):
|
||||
# request_id is str
|
||||
# excel has automatic number formatting that adds .0 on integer values
|
||||
# the next lines recover the pure int value, assuming this .0 is unwanted
|
||||
if not isinstance(Request.request_id,str):
|
||||
value = str(int(Request.request_id))
|
||||
if value.endswith('.0'):
|
||||
value = value[:-2]
|
||||
self.request_id = value
|
||||
else:
|
||||
self.request_id = Request.request_id
|
||||
self.source = Request.source
|
||||
self.destination = Request.destination
|
||||
self.srctpid = f'trx {Request.source}'
|
||||
self.dsttpid = f'trx {Request.destination}'
|
||||
# test that trx_type belongs to eqpt_config.json
|
||||
# if not replace it with a default
|
||||
equipment = load_equipment(eqpt_filename)
|
||||
try :
|
||||
if equipment['Transceiver'][Request.trx_type]:
|
||||
self.trx_type = Request.trx_type
|
||||
if [mode for mode in equipment['Transceiver'][Request.trx_type].mode]:
|
||||
self.mode = Request.mode
|
||||
except KeyError:
|
||||
msg = f'could not find tsp : {Request.trx_type} with mode: {Request.mode} in eqpt library \nComputation stopped.'
|
||||
#print(msg)
|
||||
logger.critical(msg)
|
||||
exit()
|
||||
# excel input are in GHz and dBm
|
||||
self.spacing = Request.spacing * 1e9
|
||||
self.power = db2lin(Request.power) * 1e-3
|
||||
self.nb_channel = int(Request.nb_channel)
|
||||
if not isinstance(Request.disjoint_from,str):
|
||||
value = str(int(Request.disjoint_from))
|
||||
if value.endswith('.0'):
|
||||
value = value[:-2]
|
||||
else:
|
||||
value = Request.disjoint_from
|
||||
self.disjoint_from = [n for n in value.split()]
|
||||
self.nodes_list = []
|
||||
if Request.nodes_list :
|
||||
self.nodes_list = Request.nodes_list.split(' | ')
|
||||
try :
|
||||
self.nodes_list.remove(self.source)
|
||||
msg = f'{self.source} removed from explicit path node-list'
|
||||
logger.info(msg)
|
||||
# print(msg)
|
||||
except ValueError:
|
||||
msg = f'{self.source} already removed from explicit path node-list'
|
||||
logger.info(msg)
|
||||
# print(msg)
|
||||
try :
|
||||
self.nodes_list.remove(self.destination)
|
||||
msg = f'{self.destination} removed from explicit path node-list'
|
||||
logger.info(msg)
|
||||
# print(msg)
|
||||
except ValueError:
|
||||
msg = f'{self.destination} already removed from explicit path node-list'
|
||||
logger.info(msg)
|
||||
# print(msg)
|
||||
|
||||
self.loose = 'loose'
|
||||
if Request.is_loose == 'no' :
|
||||
self.loose = 'strict'
|
||||
|
||||
uid = property(lambda self: repr(self))
|
||||
@property
|
||||
def pathrequest(self):
|
||||
return {
|
||||
'request-id':self.request_id,
|
||||
'source': self.source,
|
||||
'destination': self.destination,
|
||||
'src-tp-id': self.srctpid,
|
||||
'dst-tp-id': self.dsttpid,
|
||||
'path-constraints':{
|
||||
'te-bandwidth': {
|
||||
'technology': 'flexi-grid',
|
||||
'trx_type' : self.trx_type,
|
||||
'trx_mode' : self.mode,
|
||||
'effective-freq-slot':[{'n': 'null','m': 'null'}] ,
|
||||
'spacing' : self.spacing,
|
||||
'max-nb-of-channel' : self.nb_channel,
|
||||
'output-power' : self.power
|
||||
}
|
||||
},
|
||||
'optimizations': {
|
||||
'explicit-route-include-objects': [
|
||||
{
|
||||
'index': self.nodes_list.index(node),
|
||||
'unnumbered-hop':{
|
||||
'node-id': f'{node}',
|
||||
'link-tp-id': 'link-tp-id is not used',
|
||||
'hop-type': 'loose',
|
||||
'direction': 'direction is not used'
|
||||
},
|
||||
'label-hop':{
|
||||
'te-label': {
|
||||
'generic': 'generic is not used',
|
||||
'direction': 'direction is not used'
|
||||
}
|
||||
}
|
||||
}
|
||||
for node in self.nodes_list
|
||||
]
|
||||
|
||||
}
|
||||
}
|
||||
@property
|
||||
def pathsync(self):
|
||||
if self.disjoint_from :
|
||||
return {'synchonization-id':self.request_id,
|
||||
'svec': {
|
||||
'relaxable' : 'False',
|
||||
'link-diverse': 'True',
|
||||
'node-diverse': 'True',
|
||||
'request-id-number': [self.request_id]+ [n for n in self.disjoint_from]
|
||||
}
|
||||
}
|
||||
# TO-DO: avoid multiple entries with same synchronisation vectors
|
||||
@property
|
||||
def json(self):
|
||||
return self.pathrequest , self.pathsync
|
||||
|
||||
def convert_service_sheet(input_filename, eqpt_filename, output_filename='', filter_region=[]):
|
||||
service = parse_excel(input_filename)
|
||||
req = [Request_element(n,eqpt_filename) for n in service]
|
||||
# dumps the output into a json file with name
|
||||
# split_filename = [input_filename[0:len(input_filename)-len(suffix_filename)] , suffix_filename[1:]]
|
||||
if output_filename=='':
|
||||
output_filename = f'{str(input_filename)[0:len(str(input_filename))-len(str(input_filename.suffixes[0]))]}_services.json'
|
||||
# for debug
|
||||
# print(json_filename)
|
||||
data = {
|
||||
'path-request': [n.json[0] for n in req],
|
||||
'synchronisation': [n.json[1] for n in req
|
||||
if n.json[1] is not None]
|
||||
}
|
||||
with open(output_filename, 'w') as f:
|
||||
f.write(dumps(data, indent=2))
|
||||
return data
|
||||
|
||||
# to be used from dutc
|
||||
def parse_row(row, fieldnames):
|
||||
return {f: r.value for f, r in zip(fieldnames, row[0:SERVICES_COLUMN])
|
||||
if r.ctype != XL_CELL_EMPTY}
|
||||
#
|
||||
|
||||
def parse_excel(input_filename):
|
||||
with open_workbook(input_filename) as wb:
|
||||
service_sheet = wb.sheet_by_name('Service')
|
||||
services = list(parse_service_sheet(service_sheet))
|
||||
return services
|
||||
|
||||
def parse_service_sheet(service_sheet):
|
||||
logger.info(f'Validating headers on {service_sheet.name!r}')
|
||||
header = [x.value.strip() for x in service_sheet.row(4)[0:SERVICES_COLUMN]]
|
||||
expected = ['route id', 'Source', 'Destination', 'TRX type', \
|
||||
'Mode', 'System: spacing', 'System: input power (dBm)', 'System: nb of channels',\
|
||||
'routing: disjoint from', 'routing: path', 'routing: is loose?']
|
||||
if header != expected:
|
||||
msg = f'Malformed header on Service sheet: {header} != {expected}'
|
||||
logger.critical(msg)
|
||||
raise ValueError(msg)
|
||||
|
||||
service_fieldnames = 'request_id source destination trx_type mode spacing power nb_channel disjoint_from nodes_list is_loose'.split()
|
||||
# Important Note: it reads all colum on each row so that
|
||||
# it is not possible to write annotation in the excel sheet
|
||||
# outside the SERVICES_COLUMN ... TO BE IMPROVED
|
||||
# request_id should be unique for disjunction constraints (not used yet)
|
||||
for row in all_rows(service_sheet, start=5):
|
||||
yield Request(**parse_row(row[0:SERVICES_COLUMN], service_fieldnames))
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = parser.parse_args()
|
||||
basicConfig(level={2: DEBUG, 1: INFO, 0: CRITICAL}.get(args.verbose, CRITICAL))
|
||||
|
||||
@@ -684,7 +684,6 @@ class Edfa(Node):
|
||||
|
||||
def propagate(self, pref, *carriers):
|
||||
"""add ase noise to the propagating carriers of SpectralInformation"""
|
||||
i = 0
|
||||
pin = array([c.power.signal+c.power.nli+c.power.ase for c in carriers]) # pin in W
|
||||
freq = array([c.frequency for c in carriers])
|
||||
brate = array([c.baud_rate for c in carriers])
|
||||
@@ -697,7 +696,6 @@ class Edfa(Node):
|
||||
|
||||
for gain, carrier_ase, carrier in zip(gains, carrier_ases, carriers):
|
||||
pwr = carrier.power
|
||||
bw = carrier.baud_rate
|
||||
pwr = pwr._replace(signal=pwr.signal*gain/att,
|
||||
nonlinear_interference=pwr.nli*gain/att,
|
||||
amplified_spontaneous_emission=(pwr.ase+carrier_ase)*gain/att)
|
||||
|
||||
@@ -16,7 +16,6 @@ try:
|
||||
from xlrd import open_workbook, XL_CELL_EMPTY
|
||||
except ModuleNotFoundError:
|
||||
exit('Required: `pip install xlrd`')
|
||||
from argparse import ArgumentParser
|
||||
from collections import namedtuple
|
||||
from logging import getLogger, basicConfig, CRITICAL, DEBUG, INFO
|
||||
from json import dumps
|
||||
|
||||
@@ -1,36 +1,40 @@
|
||||
alabaster==0.7.10
|
||||
attrs==17.4.0
|
||||
Babel==2.5.3
|
||||
certifi==2017.11.5
|
||||
alabaster==0.7.12
|
||||
atomicwrites==1.2.1
|
||||
attrs==18.2.0
|
||||
Babel==2.6.0
|
||||
certifi==2018.10.15
|
||||
chardet==3.0.4
|
||||
cycler==0.10.0
|
||||
decorator==4.1.2
|
||||
decorator==4.3.0
|
||||
docutils==0.14
|
||||
idna==2.6
|
||||
imagesize==0.7.1
|
||||
idna==2.7
|
||||
imagesize==1.1.0
|
||||
Jinja2==2.10
|
||||
kiwisolver==1.0.1
|
||||
latexcodec==1.0.5
|
||||
MarkupSafe==1.0
|
||||
matplotlib==2.1.0
|
||||
networkx==2.0
|
||||
numpy==1.13.3
|
||||
matplotlib==3.0.0
|
||||
more-itertools==4.3.0
|
||||
networkx==2.2
|
||||
numpy==1.15.2
|
||||
oset==0.1.3
|
||||
pluggy==0.6.0
|
||||
py==1.5.2
|
||||
packaging==18.0
|
||||
pluggy==0.7.1
|
||||
py==1.7.0
|
||||
pybtex==0.21
|
||||
pybtex-docutils==0.2.1
|
||||
Pygments==2.2.0
|
||||
pyparsing==2.2.0
|
||||
pytest==3.3.2
|
||||
python-dateutil==2.6.1
|
||||
pytz==2017.3
|
||||
PyYAML==3.12
|
||||
requests==2.18.4
|
||||
scipy==1.0.0
|
||||
pyparsing==2.2.2
|
||||
pytest==3.8.2
|
||||
python-dateutil==2.7.3
|
||||
pytz==2018.5
|
||||
PyYAML==3.13
|
||||
requests==2.19.1
|
||||
scipy==1.1.0
|
||||
six==1.11.0
|
||||
snowballstemmer==1.2.1
|
||||
Sphinx==1.6.6
|
||||
sphinxcontrib-bibtex==0.3.6
|
||||
sphinxcontrib-websupport==1.0.1
|
||||
urllib3==1.22
|
||||
Sphinx==1.8.1
|
||||
sphinxcontrib-bibtex==0.4.0
|
||||
sphinxcontrib-websupport==1.1.0
|
||||
urllib3==1.23
|
||||
xlrd==1.1.0
|
||||
|
||||
Reference in New Issue
Block a user