mirror of
https://github.com/Telecominfraproject/oopt-gnpy.git
synced 2025-11-01 18:47:48 +00:00
This JSON file is never used by the rest of the code. Let's get rid of one of these files which are put into the source code directory during unit test execution. Also remove other dead code; thanks to Esther for catching this. Change-Id: I30a4e7edcf638162ec438fbf7f00d26d78944ac3
742 lines
31 KiB
Python
Executable File
742 lines
31 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
gnpy.core.convert
|
|
=================
|
|
|
|
This module contains utilities for converting between XLS and JSON.
|
|
|
|
The input XLS file must contain sheets named "Nodes" and "Links".
|
|
It may optionally contain a sheet named "Eqpt".
|
|
|
|
In the "Nodes" sheet, only the "City" column is mandatory. The column "Type"
|
|
can be determined automatically given the topology (e.g., if degree 2, ILA;
|
|
otherwise, ROADM.) Incorrectly specified types (e.g., ILA for node of
|
|
degree ≠ 2) will be automatically corrected.
|
|
|
|
In the "Links" sheet, only the first three columns ("Node A", "Node Z" and
|
|
"east Distance (km)") are mandatory. Missing "west" information is copied from
|
|
the "east" information so that it is possible to input undirected data.
|
|
"""
|
|
|
|
from sys import exit
|
|
try:
|
|
from xlrd import open_workbook
|
|
except ModuleNotFoundError:
|
|
exit('Required: `pip install xlrd`')
|
|
from argparse import ArgumentParser
|
|
from collections import namedtuple, Counter, defaultdict
|
|
from itertools import chain
|
|
from json import dumps
|
|
from pathlib import Path
|
|
from difflib import get_close_matches
|
|
from copy import copy
|
|
from gnpy.core.utils import silent_remove
|
|
from gnpy.core.exceptions import NetworkTopologyError
|
|
from gnpy.core.elements import Transceiver, Roadm, Edfa, Fused, Fiber
|
|
import time
|
|
|
|
all_rows = lambda sh, start=0: (sh.row(x) for x in range(start, sh.nrows))
|
|
|
|
class Node(object):
|
|
def __init__(self, **kwargs):
|
|
super(Node, self).__init__()
|
|
self.update_attr(kwargs)
|
|
|
|
def update_attr(self, kwargs):
|
|
clean_kwargs = {k:v for k,v in kwargs.items() if v !=''}
|
|
for k,v in self.default_values.items():
|
|
v = clean_kwargs.get(k,v)
|
|
setattr(self, k, v)
|
|
|
|
default_values = \
|
|
{
|
|
'city': '',
|
|
'state': '',
|
|
'country': '',
|
|
'region': '',
|
|
'latitude': 0,
|
|
'longitude': 0,
|
|
'node_type': 'ILA',
|
|
'booster_restriction' : '',
|
|
'preamp_restriction' : ''
|
|
}
|
|
|
|
class Link(object):
|
|
"""attribtes from west parse_ept_headers dict
|
|
+node_a, node_z, west_fiber_con_in, east_fiber_con_in
|
|
"""
|
|
def __init__(self, **kwargs):
|
|
super(Link, self).__init__()
|
|
self.update_attr(kwargs)
|
|
self.distance_units = 'km'
|
|
|
|
def update_attr(self, kwargs):
|
|
clean_kwargs = {k:v for k,v in kwargs.items() if v !=''}
|
|
for k,v in self.default_values.items():
|
|
v = clean_kwargs.get(k,v)
|
|
setattr(self, k, v)
|
|
k = 'west' + k.split('east')[-1]
|
|
v = clean_kwargs.get(k,v)
|
|
setattr(self, k, v)
|
|
|
|
def __eq__(self, link):
|
|
return (self.from_city == link.from_city and self.to_city == link.to_city) \
|
|
or (self.from_city == link.to_city and self.to_city == link.from_city)
|
|
|
|
default_values = \
|
|
{
|
|
'from_city': '',
|
|
'to_city': '',
|
|
'east_distance': 80,
|
|
'east_fiber': 'SSMF',
|
|
'east_lineic': 0.2,
|
|
'east_con_in': None,
|
|
'east_con_out': None,
|
|
'east_pmd': 0.1,
|
|
'east_cable': ''
|
|
}
|
|
|
|
|
|
class Eqpt(object):
|
|
def __init__(self, **kwargs):
|
|
super(Eqpt, self).__init__()
|
|
self.update_attr(kwargs)
|
|
|
|
def update_attr(self, kwargs):
|
|
clean_kwargs = {k:v for k,v in kwargs.items() if v !=''}
|
|
for k,v in self.default_values.items():
|
|
v_east = clean_kwargs.get(k,v)
|
|
setattr(self, k, v_east)
|
|
k = 'west' + k.split('east')[-1]
|
|
v_west = clean_kwargs.get(k,v)
|
|
setattr(self, k, v_west)
|
|
|
|
default_values = \
|
|
{
|
|
'from_city': '',
|
|
'to_city': '',
|
|
'east_amp_type': '',
|
|
'east_att_in': 0,
|
|
'east_amp_gain': None,
|
|
'east_amp_dp': None,
|
|
'east_tilt': 0,
|
|
'east_att_out': None
|
|
}
|
|
|
|
|
|
def read_header(my_sheet, line, slice_):
|
|
""" return the list of headers !:= ''
|
|
header_i = [(header, header_column_index), ...]
|
|
in a {line, slice1_x, slice_y} range
|
|
"""
|
|
Param_header = namedtuple('Param_header', 'header colindex')
|
|
try:
|
|
header = [x.value.strip() for x in my_sheet.row_slice(line, slice_[0], slice_[1])]
|
|
header_i = [Param_header(header,i+slice_[0]) for i, header in enumerate(header) if header != '']
|
|
except Exception:
|
|
header_i = []
|
|
if header_i != [] and header_i[-1].colindex != slice_[1]:
|
|
header_i.append(Param_header('',slice_[1]))
|
|
return header_i
|
|
|
|
def read_slice(my_sheet, line, slice_, header):
|
|
"""return the slice range of a given header
|
|
in a defined range {line, slice_x, slice_y}"""
|
|
header_i = read_header(my_sheet, line, slice_)
|
|
slice_range = (-1,-1)
|
|
if header_i != []:
|
|
try:
|
|
slice_range = next((h.colindex,header_i[i+1].colindex) \
|
|
for i,h in enumerate(header_i) if header in h.header)
|
|
except Exception:
|
|
pass
|
|
return slice_range
|
|
|
|
|
|
def parse_headers(my_sheet, input_headers_dict, headers, start_line, slice_in):
|
|
"""return a dict of header_slice
|
|
key = column index
|
|
value = header name"""
|
|
|
|
|
|
for h0 in input_headers_dict:
|
|
slice_out = read_slice(my_sheet, start_line, slice_in, h0)
|
|
iteration = 1
|
|
while slice_out == (-1,-1) and iteration < 10:
|
|
#try next lines
|
|
#print(h0, iteration)
|
|
slice_out = read_slice(my_sheet, start_line+iteration, slice_in, h0)
|
|
iteration += 1
|
|
if slice_out == (-1, -1):
|
|
if h0 in ('east', 'Node A', 'Node Z', 'City') :
|
|
print(f'\x1b[1;31;40m'+f'CRITICAL: missing _{h0}_ header: EXECUTION ENDS'+ '\x1b[0m')
|
|
exit()
|
|
else:
|
|
print(f'missing header {h0}')
|
|
elif not isinstance(input_headers_dict[h0], dict):
|
|
headers[slice_out[0]] = input_headers_dict[h0]
|
|
else:
|
|
headers = parse_headers(my_sheet, input_headers_dict[h0], headers, start_line+1, slice_out)
|
|
if headers == {}:
|
|
print(f'\x1b[1;31;40m'+f'CRITICAL ERROR: could not find any header to read _ ABORT'+ '\x1b[0m')
|
|
exit()
|
|
return headers
|
|
|
|
def parse_row(row, headers):
|
|
#print([label for label in ept.values()])
|
|
#print([i for i in ept.keys()])
|
|
#print(row[i for i in ept.keys()])
|
|
return {f: r.value for f, r in \
|
|
zip([label for label in headers.values()], [row[i] for i in headers])}
|
|
#if r.ctype != XL_CELL_EMPTY}
|
|
|
|
def parse_sheet(my_sheet, input_headers_dict, header_line, start_line, column):
|
|
headers = parse_headers(my_sheet, input_headers_dict, {}, header_line, (0,column))
|
|
for row in all_rows(my_sheet, start=start_line):
|
|
yield parse_row(row[0: column], headers)
|
|
|
|
def sanity_check(nodes, links, nodes_by_city, links_by_city, eqpts_by_city):
|
|
|
|
duplicate_links = []
|
|
for l1 in links:
|
|
for l2 in links:
|
|
if l1 is not l2 and l1 == l2 and l2 not in duplicate_links:
|
|
print(f'\nWARNING\n \
|
|
link {l1.from_city}-{l1.to_city} is duplicate \
|
|
\nthe 1st duplicate link will be removed but you should check Links sheet input')
|
|
duplicate_links.append(l1)
|
|
#if duplicate_links != []:
|
|
#time.sleep(3)
|
|
for l in duplicate_links:
|
|
links.remove(l)
|
|
|
|
try :
|
|
test_nodes = [n for n in nodes_by_city if not n in links_by_city]
|
|
test_links = [n for n in links_by_city if not n in nodes_by_city]
|
|
test_eqpts = [n for n in eqpts_by_city if not n in nodes_by_city]
|
|
assert (test_nodes == [] or test_nodes == [''])\
|
|
and (test_links == [] or test_links ==[''])\
|
|
and (test_eqpts == [] or test_eqpts ==[''])
|
|
except AssertionError:
|
|
print(f'CRITICAL error: \nNames in Nodes and Links sheets do no match, check:\
|
|
\n{test_nodes} in Nodes sheet\
|
|
\n{test_links} in Links sheet\
|
|
\n{test_eqpts} in Eqpt sheet')
|
|
exit(1)
|
|
|
|
for city,link in links_by_city.items():
|
|
if nodes_by_city[city].node_type.lower()=='ila' and len(link) != 2:
|
|
#wrong input: ILA sites can only be Degree 2
|
|
# => correct to make it a ROADM and remove entry in links_by_city
|
|
#TODO : put in log rather than print
|
|
print(f'invalid node type ({nodes_by_city[city].node_type})\
|
|
specified in {city}, replaced by ROADM')
|
|
nodes_by_city[city].node_type = 'ROADM'
|
|
for n in nodes:
|
|
if n.city==city:
|
|
n.node_type='ROADM'
|
|
return nodes, links
|
|
|
|
def convert_file(input_filename, names_matching=False, filter_region=[]):
|
|
nodes, links, eqpts = parse_excel(input_filename)
|
|
if filter_region:
|
|
nodes = [n for n in nodes if n.region.lower() in filter_region]
|
|
cities = {n.city for n in nodes}
|
|
links = [lnk for lnk in links if lnk.from_city in cities and
|
|
lnk.to_city in cities]
|
|
cities = {lnk.from_city for lnk in links} | {lnk.to_city for lnk in links}
|
|
nodes = [n for n in nodes if n.city in cities]
|
|
|
|
global nodes_by_city
|
|
nodes_by_city = {n.city: n for n in nodes}
|
|
|
|
global links_by_city
|
|
links_by_city = defaultdict(list)
|
|
for link in links:
|
|
links_by_city[link.from_city].append(link)
|
|
links_by_city[link.to_city].append(link)
|
|
|
|
global eqpts_by_city
|
|
eqpts_by_city = defaultdict(list)
|
|
for eqpt in eqpts:
|
|
eqpts_by_city[eqpt.from_city].append(eqpt)
|
|
|
|
nodes, links = sanity_check(nodes, links, nodes_by_city, links_by_city, eqpts_by_city)
|
|
|
|
data = {
|
|
'elements':
|
|
[{'uid': f'trx {x.city}',
|
|
'metadata': {'location': {'city': x.city,
|
|
'region': x.region,
|
|
'latitude': x.latitude,
|
|
'longitude': x.longitude}},
|
|
'type': 'Transceiver'}
|
|
for x in nodes_by_city.values() if x.node_type.lower() == 'roadm'] +
|
|
[{'uid': f'roadm {x.city}',
|
|
'metadata': {'location': {'city': x.city,
|
|
'region': x.region,
|
|
'latitude': x.latitude,
|
|
'longitude': x.longitude}},
|
|
'type': 'Roadm'}
|
|
for x in nodes_by_city.values() if x.node_type.lower() == 'roadm' \
|
|
and x.booster_restriction == '' and x.preamp_restriction == ''] +
|
|
[{'uid': f'roadm {x.city}',
|
|
'params' : {
|
|
'restrictions': {
|
|
'preamp_variety_list': silent_remove(x.preamp_restriction.split(' | '),''),
|
|
'booster_variety_list': silent_remove(x.booster_restriction.split(' | '),'')
|
|
}
|
|
},
|
|
'metadata': {'location': {'city': x.city,
|
|
'region': x.region,
|
|
'latitude': x.latitude,
|
|
'longitude': x.longitude}},
|
|
'type': 'Roadm'}
|
|
for x in nodes_by_city.values() if x.node_type.lower() == 'roadm' and \
|
|
(x.booster_restriction != '' or x.preamp_restriction != '')] +
|
|
[{'uid': f'west fused spans in {x.city}',
|
|
'metadata': {'location': {'city': x.city,
|
|
'region': x.region,
|
|
'latitude': x.latitude,
|
|
'longitude': x.longitude}},
|
|
'type': 'Fused'}
|
|
for x in nodes_by_city.values() if x.node_type.lower() == 'fused'] +
|
|
[{'uid': f'east fused spans in {x.city}',
|
|
'metadata': {'location': {'city': x.city,
|
|
'region': x.region,
|
|
'latitude': x.latitude,
|
|
'longitude': x.longitude}},
|
|
'type': 'Fused'}
|
|
for x in nodes_by_city.values() if x.node_type.lower() == 'fused'] +
|
|
[{'uid': f'fiber ({x.from_city} \u2192 {x.to_city})-{x.east_cable}',
|
|
'metadata': {'location': midpoint(nodes_by_city[x.from_city],
|
|
nodes_by_city[x.to_city])},
|
|
'type': 'Fiber',
|
|
'type_variety': x.east_fiber,
|
|
'params': {'length': round(x.east_distance, 3),
|
|
'length_units': x.distance_units,
|
|
'loss_coef': x.east_lineic,
|
|
'con_in':x.east_con_in,
|
|
'con_out':x.east_con_out}
|
|
}
|
|
for x in links] +
|
|
[{'uid': f'fiber ({x.to_city} \u2192 {x.from_city})-{x.west_cable}',
|
|
'metadata': {'location': midpoint(nodes_by_city[x.from_city],
|
|
nodes_by_city[x.to_city])},
|
|
'type': 'Fiber',
|
|
'type_variety': x.west_fiber,
|
|
'params': {'length': round(x.west_distance, 3),
|
|
'length_units': x.distance_units,
|
|
'loss_coef': x.west_lineic,
|
|
'con_in':x.west_con_in,
|
|
'con_out':x.west_con_out}
|
|
} # missing ILA construction
|
|
for x in links] +
|
|
[{'uid': f'east edfa in {e.from_city} to {e.to_city}',
|
|
'metadata': {'location': {'city': nodes_by_city[e.from_city].city,
|
|
'region': nodes_by_city[e.from_city].region,
|
|
'latitude': nodes_by_city[e.from_city].latitude,
|
|
'longitude': nodes_by_city[e.from_city].longitude}},
|
|
'type': 'Edfa',
|
|
'type_variety': e.east_amp_type,
|
|
'operational': {'gain_target': e.east_amp_gain,
|
|
'delta_p': e.east_amp_dp,
|
|
'tilt_target': e.east_tilt,
|
|
'out_voa' : e.east_att_out}
|
|
}
|
|
for e in eqpts if (e.east_amp_type.lower() != '' and \
|
|
e.east_amp_type.lower() != 'fused')] +
|
|
[{'uid': f'west edfa in {e.from_city} to {e.to_city}',
|
|
'metadata': {'location': {'city': nodes_by_city[e.from_city].city,
|
|
'region': nodes_by_city[e.from_city].region,
|
|
'latitude': nodes_by_city[e.from_city].latitude,
|
|
'longitude': nodes_by_city[e.from_city].longitude}},
|
|
'type': 'Edfa',
|
|
'type_variety': e.west_amp_type,
|
|
'operational': {'gain_target': e.west_amp_gain,
|
|
'delta_p': e.west_amp_dp,
|
|
'tilt_target': e.west_tilt,
|
|
'out_voa' : e.west_att_out}
|
|
}
|
|
for e in eqpts if (e.west_amp_type.lower() != '' and \
|
|
e.west_amp_type.lower() != 'fused')] +
|
|
# fused edfa variety is a hack to indicate that there should not be
|
|
# booster amplifier out the roadm.
|
|
# If user specifies ILA in Nodes sheet and fused in Eqpt sheet, then assumes that
|
|
# this is a fused nodes.
|
|
[{'uid': f'east edfa in {e.from_city} to {e.to_city}',
|
|
'metadata': {'location': {'city': nodes_by_city[e.from_city].city,
|
|
'region': nodes_by_city[e.from_city].region,
|
|
'latitude': nodes_by_city[e.from_city].latitude,
|
|
'longitude': nodes_by_city[e.from_city].longitude}},
|
|
'type': 'Fused',
|
|
'params': {'loss': 0}
|
|
}
|
|
for e in eqpts if e.east_amp_type.lower() == 'fused'] +
|
|
[{'uid': f'west edfa in {e.from_city} to {e.to_city}',
|
|
'metadata': {'location': {'city': nodes_by_city[e.from_city].city,
|
|
'region': nodes_by_city[e.from_city].region,
|
|
'latitude': nodes_by_city[e.from_city].latitude,
|
|
'longitude': nodes_by_city[e.from_city].longitude}},
|
|
'type': 'Fused',
|
|
'params': {'loss': 0}
|
|
}
|
|
for e in eqpts if e.west_amp_type.lower() == 'fused'],
|
|
'connections':
|
|
list(chain.from_iterable([eqpt_connection_by_city(n.city)
|
|
for n in nodes]))
|
|
+
|
|
list(chain.from_iterable(zip(
|
|
[{'from_node': f'trx {x.city}',
|
|
'to_node': f'roadm {x.city}'}
|
|
for x in nodes_by_city.values() if x.node_type.lower()=='roadm'],
|
|
[{'from_node': f'roadm {x.city}',
|
|
'to_node': f'trx {x.city}'}
|
|
for x in nodes_by_city.values() if x.node_type.lower()=='roadm'])))
|
|
}
|
|
|
|
suffix_filename = str(input_filename.suffixes[0])
|
|
full_input_filename = str(input_filename)
|
|
split_filename = [full_input_filename[0:len(full_input_filename)-len(suffix_filename)] , suffix_filename[1:]]
|
|
output_json_file_name = split_filename[0]+'.json'
|
|
with open(output_json_file_name, 'w', encoding='utf-8') as edfa_json_file:
|
|
edfa_json_file.write(dumps(data, indent=2, ensure_ascii=False))
|
|
return output_json_file_name
|
|
|
|
def corresp_names(input_filename, network):
|
|
""" a function that builds the correspondance between names given in the excel,
|
|
and names used in the json, and created by the autodesign.
|
|
All names are listed
|
|
"""
|
|
nodes, links, eqpts = parse_excel(input_filename)
|
|
fused = [n.uid for n in network.nodes() if isinstance(n, Fused)]
|
|
ila = [n.uid for n in network.nodes() if isinstance(n, Edfa)]
|
|
|
|
corresp_roadm = {x.city: [f'roadm {x.city}'] for x in nodes
|
|
if x.node_type.lower() == 'roadm'}
|
|
corresp_fused = {x.city: [f'west fused spans in {x.city}', f'east fused spans in {x.city}']
|
|
for x in nodes if x.node_type.lower() == 'fused' and
|
|
f'west fused spans in {x.city}' in fused and
|
|
f'east fused spans in {x.city}' in fused}
|
|
|
|
#add the special cases when an ila is changed into a fused
|
|
for my_e in eqpts:
|
|
name = f'east edfa in {my_e.from_city} to {my_e.to_city}'
|
|
if my_e.east_amp_type.lower() == 'fused' and name in fused:
|
|
if my_e.from_city in corresp_fused.keys():
|
|
corresp_fused[my_e.from_city].append(name)
|
|
else:
|
|
corresp_fused[my_e.from_city] = [name]
|
|
name = f'west edfa in {my_e.from_city} to {my_e.to_city}'
|
|
if my_e.west_amp_type.lower() == 'fused' and name in fused:
|
|
if my_e.from_city in corresp_fused.keys():
|
|
corresp_fused[my_e.from_city].append(name)
|
|
else:
|
|
corresp_fused[my_e.from_city] = [name]
|
|
# build corresp ila based on eqpt sheet
|
|
# start with east direction
|
|
corresp_ila = {e.from_city: [f'east edfa in {e.from_city} to {e.to_city}']
|
|
for e in eqpts if e.east_amp_type.lower() != '' and
|
|
f'east edfa in {e.from_city} to {e.to_city}' in ila}
|
|
# west direction, append name or create a new item in dict
|
|
for my_e in eqpts:
|
|
if my_e.west_amp_type.lower() != '':
|
|
name = f'west edfa in {my_e.from_city} to {my_e.to_city}'
|
|
if name in ila:
|
|
if my_e.from_city in corresp_ila.keys():
|
|
corresp_ila[my_e.from_city].append(name)
|
|
else:
|
|
corresp_ila[my_e.from_city] = [name]
|
|
# complete with potential autodesign names: amplifiers
|
|
for my_l in links:
|
|
name = f'Edfa0_fiber ({my_l.to_city} \u2192 {my_l.from_city})-{my_l.west_cable}'
|
|
if name in ila:
|
|
if my_l.from_city in corresp_ila.keys():
|
|
# "east edfa in Stbrieuc to Rennes_STA" is equivalent name as
|
|
# "Edfa0_fiber (Lannion_CAS → Stbrieuc)-F056"
|
|
# "west edfa in Stbrieuc to Rennes_STA" is equivalent name as
|
|
# "Edfa0_fiber (Rennes_STA → Stbrieuc)-F057"
|
|
# does not filter names: all types (except boosters) are created.
|
|
# in case fibers are splitted the name here is a prefix
|
|
corresp_ila[my_l.from_city].append(name)
|
|
else:
|
|
corresp_ila[my_l.from_city] = [name]
|
|
name = f'Edfa0_fiber ({my_l.from_city} \u2192 {my_l.to_city})-{my_l.east_cable}'
|
|
if name in ila:
|
|
if my_l.to_city in corresp_ila.keys():
|
|
corresp_ila[my_l.to_city].append(name)
|
|
else:
|
|
corresp_ila[my_l.to_city] = [name]
|
|
|
|
# merge fused with ila:
|
|
for key, val in corresp_fused.items():
|
|
if key in corresp_ila.keys():
|
|
corresp_ila[key].extend(val)
|
|
else:
|
|
corresp_ila[key] = val
|
|
# no need of roadm booster
|
|
return corresp_roadm, corresp_fused, corresp_ila
|
|
|
|
def parse_excel(input_filename):
|
|
link_headers = \
|
|
{ 'Node A': 'from_city',
|
|
'Node Z': 'to_city',
|
|
'east':{
|
|
'Distance (km)': 'east_distance',
|
|
'Fiber type': 'east_fiber',
|
|
'lineic att': 'east_lineic',
|
|
'Con_in': 'east_con_in',
|
|
'Con_out': 'east_con_out',
|
|
'PMD': 'east_pmd',
|
|
'Cable id': 'east_cable'
|
|
},
|
|
'west':{
|
|
'Distance (km)': 'west_distance',
|
|
'Fiber type': 'west_fiber',
|
|
'lineic att': 'west_lineic',
|
|
'Con_in': 'west_con_in',
|
|
'Con_out': 'west_con_out',
|
|
'PMD': 'west_pmd',
|
|
'Cable id': 'west_cable'
|
|
}
|
|
}
|
|
node_headers = \
|
|
{ 'City': 'city',
|
|
'State': 'state',
|
|
'Country': 'country',
|
|
'Region': 'region',
|
|
'Latitude': 'latitude',
|
|
'Longitude': 'longitude',
|
|
'Type': 'node_type',
|
|
'Booster_restriction': 'booster_restriction',
|
|
'Preamp_restriction': 'preamp_restriction'
|
|
}
|
|
eqpt_headers = \
|
|
{ 'Node A': 'from_city',
|
|
'Node Z': 'to_city',
|
|
'east':{
|
|
'amp type': 'east_amp_type',
|
|
'att_in': 'east_att_in',
|
|
'amp gain': 'east_amp_gain',
|
|
'delta p': 'east_amp_dp',
|
|
'tilt': 'east_tilt',
|
|
'att_out': 'east_att_out'
|
|
},
|
|
'west':{
|
|
'amp type': 'west_amp_type',
|
|
'att_in': 'west_att_in',
|
|
'amp gain': 'west_amp_gain',
|
|
'delta p': 'west_amp_dp',
|
|
'tilt': 'west_tilt',
|
|
'att_out': 'west_att_out'
|
|
}
|
|
}
|
|
|
|
with open_workbook(input_filename) as wb:
|
|
nodes_sheet = wb.sheet_by_name('Nodes')
|
|
links_sheet = wb.sheet_by_name('Links')
|
|
try:
|
|
eqpt_sheet = wb.sheet_by_name('Eqpt')
|
|
except Exception:
|
|
#eqpt_sheet is optional
|
|
eqpt_sheet = None
|
|
|
|
nodes = []
|
|
for node in parse_sheet(nodes_sheet, node_headers, NODES_LINE, NODES_LINE+1, NODES_COLUMN):
|
|
nodes.append(Node(**node))
|
|
expected_node_types = {'ROADM', 'ILA', 'FUSED'}
|
|
for n in nodes:
|
|
if n.node_type not in expected_node_types:
|
|
n.node_type = 'ILA'
|
|
|
|
links = []
|
|
for link in parse_sheet(links_sheet, link_headers, LINKS_LINE, LINKS_LINE+2, LINKS_COLUMN):
|
|
links.append(Link(**link))
|
|
#print('\n', [l.__dict__ for l in links])
|
|
|
|
eqpts = []
|
|
if eqpt_sheet != None:
|
|
for eqpt in parse_sheet(eqpt_sheet, eqpt_headers, EQPTS_LINE, EQPTS_LINE+2, EQPTS_COLUMN):
|
|
eqpts.append(Eqpt(**eqpt))
|
|
|
|
# sanity check
|
|
all_cities = Counter(n.city for n in nodes)
|
|
if len(all_cities) != len(nodes):
|
|
raise ValueError(f'Duplicate city: {all_cities}')
|
|
bad_links = []
|
|
for lnk in links:
|
|
if lnk.from_city not in all_cities or lnk.to_city not in all_cities:
|
|
bad_links.append([lnk.from_city, lnk.to_city])
|
|
if bad_links:
|
|
raise NetworkTopologyError(f'Bad link(s): {bad_links}.')
|
|
|
|
return nodes, links, eqpts
|
|
|
|
|
|
def eqpt_connection_by_city(city_name):
|
|
other_cities = fiber_dest_from_source(city_name)
|
|
subdata = []
|
|
if nodes_by_city[city_name].node_type.lower() in {'ila', 'fused'}:
|
|
# Then len(other_cities) == 2
|
|
direction = ['west', 'east']
|
|
for i in range(2):
|
|
from_ = fiber_link(other_cities[i], city_name)
|
|
in_ = eqpt_in_city_to_city(city_name, other_cities[0],direction[i])
|
|
to_ = fiber_link(city_name, other_cities[1-i])
|
|
subdata += connect_eqpt(from_, in_, to_)
|
|
elif nodes_by_city[city_name].node_type.lower() == 'roadm':
|
|
for other_city in other_cities:
|
|
from_ = f'roadm {city_name}'
|
|
in_ = eqpt_in_city_to_city(city_name, other_city)
|
|
to_ = fiber_link(city_name, other_city)
|
|
subdata += connect_eqpt(from_, in_, to_)
|
|
|
|
from_ = fiber_link(other_city, city_name)
|
|
in_ = eqpt_in_city_to_city(city_name, other_city, "west")
|
|
to_ = f'roadm {city_name}'
|
|
subdata += connect_eqpt(from_, in_, to_)
|
|
return subdata
|
|
|
|
|
|
def connect_eqpt(from_, in_, to_):
|
|
connections = []
|
|
if in_ !='':
|
|
connections = [{'from_node': from_, 'to_node': in_},
|
|
{'from_node': in_, 'to_node': to_}]
|
|
else:
|
|
connections = [{'from_node': from_, 'to_node': to_}]
|
|
return connections
|
|
|
|
|
|
def eqpt_in_city_to_city(in_city, to_city, direction='east'):
|
|
rev_direction = 'west' if direction == 'east' else 'east'
|
|
amp_direction = f'{direction}_amp_type'
|
|
amp_rev_direction = f'{rev_direction}_amp_type'
|
|
return_eqpt = ''
|
|
if in_city in eqpts_by_city:
|
|
for e in eqpts_by_city[in_city]:
|
|
if nodes_by_city[in_city].node_type.lower() == 'roadm':
|
|
if e.to_city == to_city and getattr(e, amp_direction) != '':
|
|
return_eqpt = f'{direction} edfa in {e.from_city} to {e.to_city}'
|
|
elif nodes_by_city[in_city].node_type.lower() == 'ila':
|
|
if e.to_city != to_city:
|
|
direction = rev_direction
|
|
amp_direction = amp_rev_direction
|
|
if getattr(e, amp_direction) != '':
|
|
return_eqpt = f'{direction} edfa in {e.from_city} to {e.to_city}'
|
|
if nodes_by_city[in_city].node_type.lower() == 'fused':
|
|
return_eqpt = f'{direction} fused spans in {in_city}'
|
|
return return_eqpt
|
|
|
|
|
|
def corresp_next_node(network, corresp_ila, corresp_roadm):
|
|
""" for each name in corresp dictionnaries find the next node in network and its name
|
|
given by user in excel. for meshTopology_exampleV2.xls:
|
|
user ILA name Stbrieuc covers the two direction. convert.py creates 2 different ILA
|
|
with possible names (depending on the direction and if the eqpt was defined in eqpt
|
|
sheet)
|
|
- east edfa in Stbrieuc to Rennes_STA
|
|
- west edfa in Stbrieuc to Rennes_STA
|
|
- Edfa0_fiber (Lannion_CAS → Stbrieuc)-F056
|
|
- Edfa0_fiber (Rennes_STA → Stbrieuc)-F057
|
|
next_nodes finds the user defined name of next node to be able to map the path constraints
|
|
- east edfa in Stbrieuc to Rennes_STA next node = Rennes_STA
|
|
- west edfa in Stbrieuc to Rennes_STA next node Lannion_CAS
|
|
|
|
Edfa0_fiber (Lannion_CAS → Stbrieuc)-F056 and Edfa0_fiber (Rennes_STA → Stbrieuc)-F057
|
|
do not exist
|
|
the function supports fiber splitting, fused nodes and shall only be called if
|
|
excel format is used for both network and service
|
|
"""
|
|
next_node = {}
|
|
# consolidate tables and create next_node table
|
|
for ila_key, ila_list in corresp_ila.items():
|
|
temp = copy(ila_list)
|
|
for ila_elem in ila_list:
|
|
# find the node with ila_elem string _in_ the node uid. 'in' is used instead of
|
|
# '==' to find composed nodes due to fiber splitting in autodesign.
|
|
# eg if elem_ila is 'Edfa0_fiber (Lannion_CAS → Stbrieuc)-F056',
|
|
# node uid 'Edfa0_fiber (Lannion_CAS → Stbrieuc)-F056_(1/2)' is possible
|
|
correct_ila_name = next(n.uid for n in network.nodes() if ila_elem in n.uid)
|
|
temp.remove(ila_elem)
|
|
temp.append(correct_ila_name)
|
|
ila_nd = next(n for n in network.nodes() if ila_elem in n.uid)
|
|
next_nd = next(network.successors(ila_nd))
|
|
# search for the next ILA or ROADM
|
|
while isinstance(next_nd, (Fiber, Fused)):
|
|
next_nd = next(network.successors(next_nd))
|
|
# if next_nd is a ROADM, add the first found correspondance
|
|
for key, val in corresp_roadm.items():
|
|
# val is a list of possible names associated with key
|
|
if next_nd.uid in val:
|
|
next_node[correct_ila_name] = key
|
|
break
|
|
# if next_nd was not already added in the dict with the previous loop,
|
|
# add the first found correspondance in ila names
|
|
if correct_ila_name not in next_node.keys():
|
|
for key, val in corresp_ila.items():
|
|
# in case of splitted fibers the ila name might not be exact match
|
|
if [e for e in val if e in next_nd.uid]:
|
|
next_node[correct_ila_name] = key
|
|
break
|
|
|
|
corresp_ila[ila_key] = temp
|
|
return corresp_ila, next_node
|
|
|
|
|
|
def fiber_dest_from_source(city_name):
|
|
destinations = []
|
|
links_from_city = links_by_city[city_name]
|
|
for l in links_from_city:
|
|
if l.from_city == city_name:
|
|
destinations.append(l.to_city)
|
|
else:
|
|
destinations.append(l.from_city)
|
|
return destinations
|
|
|
|
|
|
def fiber_link(from_city, to_city):
|
|
source_dest = (from_city, to_city)
|
|
link = links_by_city[from_city]
|
|
l = next(l for l in link if l.from_city in source_dest and l.to_city in source_dest)
|
|
if l.from_city == from_city:
|
|
fiber = f'fiber ({l.from_city} \u2192 {l.to_city})-{l.east_cable}'
|
|
else:
|
|
fiber = f'fiber ({l.to_city} \u2192 {l.from_city})-{l.west_cable}'
|
|
return fiber
|
|
|
|
|
|
def midpoint(city_a, city_b):
|
|
lats = city_a.latitude, city_b.latitude
|
|
longs = city_a.longitude, city_b.longitude
|
|
try:
|
|
result = {
|
|
'latitude': sum(lats) / 2,
|
|
'longitude': sum(longs) / 2
|
|
}
|
|
except :
|
|
result = {
|
|
'latitude': 0,
|
|
'longitude': 0
|
|
}
|
|
return result
|
|
|
|
#output_json_file_name = 'coronet_conus_example.json'
|
|
#TODO get column size automatically from tupple size
|
|
|
|
NODES_COLUMN = 10
|
|
NODES_LINE = 4
|
|
LINKS_COLUMN = 16
|
|
LINKS_LINE = 3
|
|
EQPTS_LINE = 3
|
|
EQPTS_COLUMN = 14
|
|
parser = ArgumentParser()
|
|
parser.add_argument('workbook', nargs='?', type=Path , default='meshTopologyExampleV2.xls')
|
|
parser.add_argument('-f', '--filter-region', action='append', default=[])
|
|
|
|
if __name__ == '__main__':
|
|
args = parser.parse_args()
|
|
convert_file(args.workbook, args.filter_region)
|