diff --git a/gnpy/core/elements.py b/gnpy/core/elements.py index 409de490..19c96cf6 100644 --- a/gnpy/core/elements.py +++ b/gnpy/core/elements.py @@ -42,7 +42,7 @@ class Transceiver(Node): def _calc_snr(self, spectral_info): with errstate(divide='ignore'): self.baud_rate = [c.baud_rate for c in spectral_info.carriers] - ratio_01nm = [lin2db(12.5e9/b_rate) for b_rate in self.baud_rate] + ratio_01nm = [lin2db(12.5e9 / b_rate) for b_rate in self.baud_rate] # set raw values to record original calculation, before update_snr() self.raw_osnr_ase = [lin2db(divide(c.power.signal, c.power.ase)) for c in spectral_info.carriers] @@ -50,7 +50,7 @@ class Transceiver(Node): in zip(self.raw_osnr_ase, ratio_01nm)] self.raw_osnr_nli = [lin2db(divide(c.power.signal, c.power.nli)) for c in spectral_info.carriers] - self.raw_snr = [lin2db(divide(c.power.signal, c.power.nli+c.power.ase)) + self.raw_snr = [lin2db(divide(c.power.signal, c.power.nli + c.power.ase)) for c in spectral_info.carriers] self.raw_snr_01nm = [snr - ratio for snr, ratio in zip(self.raw_snr, ratio_01nm)] @@ -163,15 +163,15 @@ class Roadm(Node): # in the case of add channels self.effective_pch_out_db = min(pref.p_spani, self.params.target_pch_out_db) self.effective_loss = pref.p_spani - self.effective_pch_out_db - carriers_power = array([c.power.signal + c.power.nli+c.power.ase for c in carriers]) - carriers_att = list(map(lambda x: lin2db(x*1e3)-self.params.target_pch_out_db, carriers_power)) + carriers_power = array([c.power.signal + c.power.nli + c.power.ase for c in carriers]) + carriers_att = list(map(lambda x: lin2db(x * 1e3) - self.params.target_pch_out_db, carriers_power)) exceeding_att = -min(list(filter(lambda x: x < 0, carriers_att)), default=0) - carriers_att = list(map(lambda x: db2lin(x+exceeding_att), carriers_att)) + carriers_att = list(map(lambda x: db2lin(x + exceeding_att), carriers_att)) for carrier_att, carrier in zip(carriers_att, carriers): pwr = carrier.power - pwr = pwr._replace(signal=pwr.signal/carrier_att, - nli=pwr.nli/carrier_att, - ase=pwr.ase/carrier_att) + pwr = pwr._replace(signal=pwr.signal / carrier_att, + nli=pwr.nli / carrier_att, + ase=pwr.ase / carrier_att) yield carrier._replace(power=pwr) def update_pref(self, pref): @@ -219,9 +219,9 @@ class Fused(Node): for carrier in carriers: pwr = carrier.power - pwr = pwr._replace(signal=pwr.signal/attenuation, - nli=pwr.nli/attenuation, - ase=pwr.ase/attenuation) + pwr = pwr._replace(signal=pwr.signal / attenuation, + nli=pwr.nli / attenuation, + ase=pwr.ase / attenuation) yield carrier._replace(power=pwr) def update_pref(self, pref): @@ -305,10 +305,10 @@ class Fiber(Node): if not (loc in ('in', 'out') and attr in ('nli', 'signal', 'total', 'ase')): yield None return - loc_attr = 'carriers_'+loc + loc_attr = 'carriers_' + loc for c in getattr(self, loc_attr): if attr == 'total': - yield c.power.ase+c.power.nli+c.power.signal + yield c.power.ase + c.power.nli + c.power.signal else: yield c.power._asdict().get(attr, None) @@ -347,8 +347,8 @@ class Fiber(Node): for interfering_carrier in carriers: psi = _psi(carrier, interfering_carrier, beta2=self.params.beta2, asymptotic_length=self.params.asymptotic_length) - g_nli += (interfering_carrier.power.signal/interfering_carrier.baud_rate)**2 \ - * (carrier.power.signal/carrier.baud_rate) * psi + g_nli += (interfering_carrier.power.signal / interfering_carrier.baud_rate)**2 \ + * (carrier.power.signal / carrier.baud_rate) * psi g_nli *= (16 / 27) * (self.params.gamma * self.params.effective_length)**2 \ / (2 * pi * abs(self.params.beta2) * self.params.asymptotic_length) @@ -364,9 +364,9 @@ class Fiber(Node): chan = [] for carrier in carriers: pwr = carrier.power - pwr = pwr._replace(signal=pwr.signal/attenuation, - nli=pwr.nli/attenuation, - ase=pwr.ase/attenuation) + pwr = pwr._replace(signal=pwr.signal / attenuation, + nli=pwr.nli / attenuation, + ase=pwr.ase / attenuation) carrier = carrier._replace(power=pwr) chan.append(carrier) @@ -377,9 +377,9 @@ class Fiber(Node): for carrier in carriers: pwr = carrier.power carrier_nli = self._gn_analytic(carrier, *carriers) - pwr = pwr._replace(signal=pwr.signal/self.params.lin_attenuation/attenuation, - nli=(pwr.nli+carrier_nli)/self.params.lin_attenuation/attenuation, - ase=pwr.ase/self.params.lin_attenuation/attenuation) + pwr = pwr._replace(signal=pwr.signal / self.params.lin_attenuation / attenuation, + nli=(pwr.nli + carrier_nli) / self.params.lin_attenuation / attenuation, + ase=pwr.ase / self.params.lin_attenuation / attenuation) yield carrier._replace(power=pwr) def update_pref(self, pref): @@ -445,10 +445,10 @@ class EdfaParams: class EdfaOperational: default_values = { - 'gain_target': None, - 'delta_p': None, - 'out_voa': None, - 'tilt_target': 0 + 'gain_target': None, + 'delta_p': None, + 'out_voa': None, + 'tilt_target': 0 } def __init__(self, **operational): @@ -553,10 +553,10 @@ class Edfa(Node): if not (loc in ('in', 'out') and attr in ('nli', 'signal', 'total', 'ase')): yield None return - loc_attr = 'carriers_'+loc + loc_attr = 'carriers_' + loc for c in getattr(self, loc_attr): if attr == 'total': - yield c.power.ase+c.power.nli+c.power.signal + yield c.power.ase + c.power.nli + c.power.signal else: yield c.power._asdict().get(attr, None) @@ -576,7 +576,7 @@ class Edfa(Node): self.interpol_nf_ripple = interp(self.channel_freq, amplifier_freq, self.params.nf_ripple) self.nch = frequencies.size - self.pin_db = lin2db(sum(pin*1e3)) + self.pin_db = lin2db(sum(pin * 1e3)) """in power mode: delta_p is defined and can be used to calculate the power target This power target is used calculate the amplifier gain""" @@ -597,8 +597,8 @@ class Edfa(Node): self.nf = self._calc_nf() self.gprofile = self._gain_profile(pin) - pout = (pin + self.noise_profile(baud_rates))*db2lin(self.gprofile) - self.pout_db = lin2db(sum(pout*1e3)) + pout = (pin + self.noise_profile(baud_rates)) * db2lin(self.gprofile) + self.pout_db = lin2db(sum(pout * 1e3)) # ase & nli are only calculated in signal bandwidth # pout_db is not the absolute full output power (negligible if sufficient channels) @@ -610,7 +610,7 @@ class Edfa(Node): dg = max(gain_flatmax - gain_target, 0) if type_def == 'variable_gain': g1a = gain_target - nf_model.delta_p - dg - nf_avg = lin2db(db2lin(nf_model.nf1) + db2lin(nf_model.nf2)/db2lin(g1a)) + nf_avg = lin2db(db2lin(nf_model.nf1) + db2lin(nf_model.nf2) / db2lin(g1a)) elif type_def == 'fixed_gain': nf_avg = nf_model.nf0 elif type_def == 'openroadm': @@ -621,7 +621,7 @@ class Edfa(Node): nf_avg = polyval(nf_fit_coeff, -dg) else: assert False, "Unrecognized amplifier type, this should have been checked by the JSON loader" - return nf_avg+pad, pad + return nf_avg + pad, pad def _calc_nf(self, avg=False): """nf calculation based on 2 models: self.params.nf_model.enabled from json import: @@ -644,7 +644,7 @@ class Edfa(Node): self.params.booster_gain_min, self.params.booster_gain_flatmax, g2) - nf_avg = lin2db(db2lin(nf1_avg) + db2lin(nf2_avg-g1)) + nf_avg = lin2db(db2lin(nf1_avg) + db2lin(nf2_avg - g1)) # no padding expected for the 1stage because g1 = gain_max pad = 0 else: @@ -772,13 +772,13 @@ class Edfa(Node): # second estimate of amp ch gain using the channel input profile g2nd = g1st - voa - pout_db = lin2db(sum(pin*1e3*db2lin(g2nd))) + pout_db = lin2db(sum(pin * 1e3 * db2lin(g2nd))) dgts2 = self.effective_gain - (pout_db - tot_in_power_db) # center estimate of amp ch gain xcent = dgts2 gcent = g1st - voa + array(self.interpol_dgt) * xcent - pout_db = lin2db(sum(pin*1e3*db2lin(gcent))) + pout_db = lin2db(sum(pin * 1e3 * db2lin(gcent))) gavg_cent = pout_db - tot_in_power_db # Lower estimate of amp ch gain @@ -814,7 +814,7 @@ class Edfa(Node): def propagate(self, pref, *carriers): """add ASE noise to the propagating carriers of :class:`.info.SpectralInformation`""" - pin = array([c.power.signal+c.power.nli+c.power.ase for c in carriers]) # pin in W + pin = array([c.power.signal + c.power.nli + c.power.ase for c in carriers]) # pin in W freq = array([c.frequency for c in carriers]) brate = array([c.baud_rate for c in carriers]) # interpolate the amplifier vectors with the carriers freq, calculate nf & gain profile @@ -826,9 +826,9 @@ class Edfa(Node): for gain, carrier_ase, carrier in zip(gains, carrier_ases, carriers): pwr = carrier.power - pwr = pwr._replace(signal=pwr.signal*gain/att, - nli=pwr.nli*gain/att, - ase=(pwr.ase+carrier_ase)*gain/att) + pwr = pwr._replace(signal=pwr.signal * gain / att, + nli=pwr.nli * gain / att, + ase=(pwr.ase + carrier_ase) * gain / att) yield carrier._replace(power=pwr) def update_pref(self, pref): diff --git a/gnpy/core/equipment.py b/gnpy/core/equipment.py index e4e79c18..83cd5014 100644 --- a/gnpy/core/equipment.py +++ b/gnpy/core/equipment.py @@ -41,15 +41,15 @@ class common: class SI(common): default_values =\ { - "f_min": 191.35e12, - "f_max": 196.1e12, - "baud_rate": 32e9, - "spacing": 50e9, - "power_dbm": 0, - "power_range_db": [0, 0, 0.5], - "roll_off": 0.15, - "tx_osnr": 45, - "sys_margins": 0 + "f_min": 191.35e12, + "f_max": 196.1e12, + "baud_rate": 32e9, + "spacing": 50e9, + "power_dbm": 0, + "power_range_db": [0, 0, 0.5], + "roll_off": 0.15, + "tx_osnr": 45, + "sys_margins": 0 } def __init__(self, **kwargs): @@ -59,17 +59,17 @@ class SI(common): class Span(common): default_values = \ { - 'power_mode': True, - 'delta_power_range_db': None, - 'max_fiber_lineic_loss_for_raman': 0.25, - 'target_extended_gain': 2.5, - 'max_length': 150, - 'length_units': 'km', - 'max_loss': None, - 'padding': 10, - 'EOL': 0, - 'con_in': 0, - 'con_out': 0 + 'power_mode': True, + 'delta_power_range_db': None, + 'max_fiber_lineic_loss_for_raman': 0.25, + 'target_extended_gain': 2.5, + 'max_length': 150, + 'length_units': 'km', + 'max_loss': None, + 'padding': 10, + 'EOL': 0, + 'con_in': 0, + 'con_out': 0 } def __init__(self, **kwargs): @@ -80,7 +80,7 @@ class Roadm(common): default_values = \ { 'target_pch_out_db': -17, - 'add_drop_osnr': 100, + 'add_drop_osnr': 100, 'restrictions': { 'preamp_variety_list': [], 'booster_variety_list': [] @@ -95,8 +95,8 @@ class Transceiver(common): default_values = \ { 'type_variety': None, - 'frequency': None, - 'mode': {} + 'frequency': None, + 'mode': {} } def __init__(self, **kwargs): @@ -106,9 +106,9 @@ class Transceiver(common): class Fiber(common): default_values = \ { - 'type_variety': '', - 'dispersion': None, - 'gamma': 0 + 'type_variety': '', + 'dispersion': None, + 'gamma': 0 } def __init__(self, **kwargs): @@ -118,9 +118,9 @@ class Fiber(common): class RamanFiber(common): default_values = \ { - 'type_variety': '', - 'dispersion': None, - 'gamma': 0, + 'type_variety': '', + 'dispersion': None, + 'gamma': 0, 'raman_efficiency': None } @@ -136,22 +136,22 @@ class RamanFiber(common): class Amp(common): default_values = \ { - 'f_min': 191.35e12, - 'f_max': 196.1e12, - 'type_variety': '', - 'type_def': '', - 'gain_flatmax': None, - 'gain_min': None, - 'p_max': None, - 'nf_model': None, - 'dual_stage_model': None, - 'nf_fit_coeff': None, - 'nf_ripple': None, - 'dgt': None, - 'gain_ripple': None, - 'out_voa_auto': False, - 'allowed_for_design': False, - 'raman': False + 'f_min': 191.35e12, + 'f_max': 196.1e12, + 'type_variety': '', + 'type_def': '', + 'gain_flatmax': None, + 'gain_min': None, + 'p_max': None, + 'nf_model': None, + 'dual_stage_model': None, + 'nf_fit_coeff': None, + 'nf_ripple': None, + 'dgt': None, + 'gain_ripple': None, + 'out_voa_auto': False, + 'allowed_for_design': False, + 'raman': False } def __init__(self, **kwargs): @@ -228,8 +228,8 @@ def nf_model(type_variety, gain_min, gain_max, nf_min, nf_max): g1a_min = gain_min - (gain_max - gain_min) - delta_p g1a_max = gain_max - delta_p nf2 = lin2db((db2lin(nf_min) - db2lin(nf_max)) / - (1/db2lin(g1a_max) - 1/db2lin(g1a_min))) - nf1 = lin2db(db2lin(nf_min) - db2lin(nf2)/db2lin(g1a_max)) + (1 / db2lin(g1a_max) - 1 / db2lin(g1a_min))) + nf1 = lin2db(db2lin(nf_min) - db2lin(nf2) / db2lin(g1a_max)) if nf1 < 4: raise EquipmentConfigError(f'First coil value too low {nf1} for amplifier {type_variety}') @@ -242,17 +242,17 @@ def nf_model(type_variety, gain_min, gain_max, nf_min, nf_max): nf2 = clip(nf2, nf1 + 0.3, nf1 + 2) g1a_max = lin2db(db2lin(nf2) / (db2lin(nf_min) - db2lin(nf1))) delta_p = gain_max - g1a_max - g1a_min = gain_min - (gain_max-gain_min) - delta_p + g1a_min = gain_min - (gain_max - gain_min) - delta_p if not 1 < delta_p < 11: raise EquipmentConfigError(f'Computed \N{greek capital letter delta}P invalid \ \n 1st coil vs 2nd coil calculated DeltaP {delta_p:.2f} for \ \n amplifier {type_variety} is not valid: revise inputs \ \n calculated 1st coil NF = {nf1:.2f}, 2nd coil NF = {nf2:.2f}') # Check calculated values for nf1 and nf2 - calc_nf_min = lin2db(db2lin(nf1) + db2lin(nf2)/db2lin(g1a_max)) + calc_nf_min = lin2db(db2lin(nf1) + db2lin(nf2) / db2lin(g1a_max)) if not isclose(nf_min, calc_nf_min, abs_tol=0.01): raise EquipmentConfigError(f'nf_min does not match calc_nf_min, {nf_min} vs {calc_nf_min} for amp {type_variety}') - calc_nf_max = lin2db(db2lin(nf1) + db2lin(nf2)/db2lin(g1a_min)) + calc_nf_max = lin2db(db2lin(nf1) + db2lin(nf2) / db2lin(g1a_min)) if not isclose(nf_max, calc_nf_max, abs_tol=0.01): raise EquipmentConfigError(f'nf_max does not match calc_nf_max, {nf_max} vs {calc_nf_max} for amp {type_variety}') @@ -330,7 +330,7 @@ def trx_mode_params(equipment, trx_type_variety='', trx_mode='', error_message=F trx_params['nb_channel'] = nch print(f'There are {nch} channels propagating') - trx_params['power'] = db2lin(default_si_data.power_dbm)*1e-3 + trx_params['power'] = db2lin(default_si_data.power_dbm) * 1e-3 return trx_params @@ -340,11 +340,11 @@ def automatic_spacing(baud_rate): # TODO : this should parametrized in a cfg file # list of possible tuples [(max_baud_rate, spacing_for_this_baud_rate)] spacing_list = [(33e9, 37.5e9), (38e9, 50e9), (50e9, 62.5e9), (67e9, 75e9), (92e9, 100e9)] - return min((s[1] for s in spacing_list if s[0] > baud_rate), default=baud_rate*1.2) + return min((s[1] for s in spacing_list if s[0] > baud_rate), default=baud_rate * 1.2) def automatic_nch(f_min, f_max, spacing): - return int((f_max - f_min)//spacing) + return int((f_max - f_min) // spacing) def automatic_fmax(f_min, spacing, nch): diff --git a/gnpy/core/info.py b/gnpy/core/info.py index 86e10d03..4689c12d 100644 --- a/gnpy/core/info.py +++ b/gnpy/core/info.py @@ -23,7 +23,7 @@ class Channel(namedtuple('Channel', 'channel_number frequency baud_rate roll_off class Pref(namedtuple('Pref', 'p_span0, p_spani, neq_ch ')): - """noiseless reference power in dBm: + """noiseless reference power in dBm: p_span0: inital target carrier power p_spani: carrier power after element i neq_ch: equivalent channel count in dB""" @@ -42,7 +42,7 @@ def create_input_spectral_information(f_min, f_max, roll_off, baud_rate, power, si = SpectralInformation( pref=Pref(pref, pref, lin2db(nb_channel)), carriers=[ - Channel(f, (f_min+spacing*f), - baud_rate, roll_off, Power(power, 0, 0)) for f in range(1, nb_channel+1) + Channel(f, (f_min + spacing * f), + baud_rate, roll_off, Power(power, 0, 0)) for f in range(1, nb_channel + 1) ]) return si diff --git a/gnpy/core/network.py b/gnpy/core/network.py index fdfad338..51d80770 100644 --- a/gnpy/core/network.py +++ b/gnpy/core/network.py @@ -125,7 +125,7 @@ def select_edfa(raman_allowed, gain_target, power_target, equipment, uid, restri edfa.p_max ) - power_target, - gain_min=gain_target+3 + gain_min=gain_target + 3 - edfa.gain_min, nf=edfa_nf(gain_target, edfa_variety, equipment)) for edfa_variety, edfa in edfa_dict.items() @@ -186,7 +186,7 @@ def select_edfa(raman_allowed, gain_target, power_target, equipment, uid, restri # allow a 0.3dB power range # this allows to chose an amplifier with a better NF subsequentely acceptable_power_list = [x for x in acceptable_gain_min_list - if x.power-power_max > -0.3] + if x.power - power_max > -0.3] # gain and power requirements are resolved, # =>chose the amp with the best NF among the acceptable ones: @@ -300,8 +300,8 @@ def set_amplifier_voa(amp, power_target, power_mode): if amp.out_voa is None: if power_mode: gain_target = amp.effective_gain - voa = min(amp.params.p_max-power_target, - amp.params.gain_flatmax-amp.effective_gain) + voa = min(amp.params.p_max - power_target, + amp.params.gain_flatmax - amp.effective_gain) voa = max(round2float(max(voa, 0), 0.5) - VOA_MARGIN, 0) if amp.params.out_voa_auto else 0 amp.delta_p = amp.delta_p + voa amp.effective_gain = amp.effective_gain + voa @@ -366,8 +366,8 @@ def set_egress_amplifier(network, roadm, equipment, pref_total_db): restrictions = None if node.params.type_variety == '': - edfa_variety, power_reduction = select_edfa(raman_allowed, - gain_target, power_target, equipment, node.uid, restrictions) + edfa_variety, power_reduction = select_edfa( + raman_allowed, gain_target, power_target, equipment, node.uid, restrictions) extra_params = equipment['Edfa'][edfa_variety] node.params.update_params(extra_params.__dict__) dp += power_reduction @@ -404,10 +404,10 @@ def add_egress_amplifier(network, node): params={}, metadata={ 'location': { - 'latitude': (node.lat * 2 + next_node.lat * 2) / 4, + 'latitude': (node.lat * 2 + next_node.lat * 2) / 4, 'longitude': (node.lng * 2 + next_node.lng * 2) / 4, - 'city': node.loc.city, - 'region': node.loc.region, + 'city': node.loc.city, + 'region': node.loc.region, } }, operational={ @@ -429,12 +429,12 @@ def calculate_new_length(fiber_length, bounds, target_length): n_spans = int(fiber_length // target_length) - length1 = fiber_length / (n_spans+1) - delta1 = target_length-length1 - result1 = (length1, n_spans+1) + length1 = fiber_length / (n_spans + 1) + delta1 = target_length - length1 + result1 = (length1, n_spans + 1) length2 = fiber_length / n_spans - delta2 = length2-target_length + delta2 = length2 - target_length result2 = (length2, n_spans) if (bounds.start <= length1 <= bounds.stop) and not(bounds.start <= length2 <= bounds.stop): @@ -463,17 +463,17 @@ def split_fiber(network, fiber, bounds, target_length, equipment): fiber.params.length = new_length f = interp1d([prev_node.lng, next_node.lng], [prev_node.lat, next_node.lat]) - xpos = [prev_node.lng + (next_node.lng - prev_node.lng) * (n+1)/(n_spans+1) for n in range(n_spans)] + xpos = [prev_node.lng + (next_node.lng - prev_node.lng) * (n + 1) / (n_spans + 1) for n in range(n_spans)] ypos = f(xpos) for span, lng, lat in zip(range(n_spans), xpos, ypos): new_span = Fiber(uid=f'{fiber.uid}_({span+1}/{n_spans})', type_variety=fiber.type_variety, metadata={ 'location': { - 'latitude': lat, + 'latitude': lat, 'longitude': lng, - 'city': fiber.loc.city, - 'region': fiber.loc.region, + 'city': fiber.loc.city, + 'region': fiber.loc.region, } }, params=fiber.params.asdict()) @@ -528,7 +528,7 @@ def add_fiber_padding(network, fibers, padding): def build_network(network, equipment, pref_ch_db, pref_total_db): default_span_data = equipment['Span']['default'] max_length = int(default_span_data.max_length * UNITS[default_span_data.length_units]) - min_length = max(int(default_span_data.padding/0.2*1e3), 50_000) + min_length = max(int(default_span_data.padding / 0.2 * 1e3), 50_000) bounds = range(min_length, max_length) target_length = max(min_length, 90_000) default_con_in = default_span_data.con_in diff --git a/gnpy/core/science_utils.py b/gnpy/core/science_utils.py index b4f1e78c..739a15d3 100644 --- a/gnpy/core/science_utils.py +++ b/gnpy/core/science_utils.py @@ -65,9 +65,9 @@ def propagate_raman_fiber(fiber, *carriers): for carrier, attenuation, rmn_ase in zip(carriers, fiber_attenuation, raman_ase): carrier_nli = np.interp(carrier.frequency, nli_frequencies, computed_nli) pwr = carrier.power - pwr = pwr._replace(signal=pwr.signal/attenuation/attenuation_out, - nli=(pwr.nli+carrier_nli)/attenuation/attenuation_out, - ase=((pwr.ase/attenuation)+rmn_ase)/attenuation_out) + pwr = pwr._replace(signal=pwr.signal / attenuation / attenuation_out, + nli=(pwr.nli + carrier_nli) / attenuation / attenuation_out, + ase=((pwr.ase / attenuation) + rmn_ase) / attenuation_out) new_carriers.append(carrier._replace(power=pwr)) return new_carriers @@ -297,7 +297,7 @@ class RamanSolver: for f_ind, f_ase in enumerate(freq_array): cr_raman = cr_raman_matrix[f_ind, :] vibrational_loss = f_ase / freq_array[:f_ind] - eta = 1/(np.exp((h*freq_diff[f_ind, f_ind+1:])/(kb*temperature)) - 1) + eta = 1 / (np.exp((h * freq_diff[f_ind, f_ind + 1:]) / (kb * temperature)) - 1) int_fiber_loss = -alphap_fiber[f_ind] * z_array int_raman_loss = np.sum((cr_raman[:f_ind] * vibrational_loss * int_pump[:f_ind, :].transpose()).transpose(), @@ -306,11 +306,12 @@ class RamanSolver: int_gain_loss = int_fiber_loss + int_raman_gain + int_raman_loss - new_ase = np.sum((cr_raman[f_ind+1:] * (1 + eta) * raman_matrix[f_ind+1:, :].transpose()).transpose() + new_ase = np.sum((cr_raman[f_ind + 1:] * (1 + eta) * raman_matrix[f_ind + 1:, :].transpose()).transpose() * h * f_ase * bn_array[f_ind], axis=0) bc_evolution = ase_bc[f_ind] * np.exp(int_gain_loss) - ase_evolution = np.exp(int_gain_loss) * cumtrapz(new_ase*np.exp(-int_gain_loss), z_array, dx=dx, initial=0) + ase_evolution = np.exp(int_gain_loss) * cumtrapz(new_ase * + np.exp(-int_gain_loss), z_array, dx=dx, initial=0) power_ase[f_ind, :] = bc_evolution + ase_evolution @@ -318,7 +319,7 @@ class RamanSolver: return spontaneous_raman_scattering def calculate_stimulated_raman_scattering(self, carriers, raman_pumps): - """ Returns stimulated Raman scattering solution including + """ Returns stimulated Raman scattering solution including fiber gain/loss profile. :return: None """ @@ -419,7 +420,7 @@ class RamanSolver: vibrational_loss = freq_array[f_ind] / freq_array[:f_ind] for z_ind, power_sample in enumerate(power): - raman_gain = np.sum(cr_raman[f_ind+1:] * power_spectrum[f_ind+1:, z_ind]) + raman_gain = np.sum(cr_raman[f_ind + 1:] * power_spectrum[f_ind + 1:, z_ind]) raman_loss = np.sum(vibrational_loss * cr_raman[:f_ind] * power_spectrum[:f_ind, z_ind]) dpdz_element = prop_direct[f_ind] * (-alphap_fiber[f_ind] + raman_gain - raman_loss) * power_sample @@ -476,7 +477,7 @@ class NliSolver: carrier_nli = 0 for pump_carrier_1 in carriers: for pump_carrier_2 in carriers: - carrier_nli += eta_matrix[pump_carrier_1.channel_number-1, pump_carrier_2.channel_number-1] * \ + carrier_nli += eta_matrix[pump_carrier_1.channel_number - 1, pump_carrier_2.channel_number - 1] * \ pump_carrier_1.power.signal * pump_carrier_2.power.signal carrier_nli *= carrier.power.signal diff --git a/gnpy/core/service_sheet.py b/gnpy/core/service_sheet.py index bbc43d12..99658409 100644 --- a/gnpy/core/service_sheet.py +++ b/gnpy/core/service_sheet.py @@ -30,7 +30,8 @@ SERVICES_COLUMN = 12 #EQPT_LIBRARY_FILENAME = Path(__file__).parent / 'eqpt_config.json' -def all_rows(sheet, start=0): return (sheet.row(x) for x in range(start, sheet.nrows)) +def all_rows(sheet, start=0): + return (sheet.row(x) for x in range(start, sheet.nrows)) logger = getLogger(__name__) @@ -125,8 +126,8 @@ class Request_element(Element): # Default assumption for bidir is False req_dictionnary = { 'request-id': self.request_id, - 'source': self.source, - 'destination': self.destination, + 'source': self.source, + 'destination': self.destination, 'src-tp-id': self.srctpid, 'dst-tp-id': self.dsttpid, 'bidirectional': self.bidir, @@ -181,7 +182,14 @@ class Request_element(Element): return self.pathrequest, self.pathsync -def convert_service_sheet(input_filename, eqpt, network, network_filename=None, output_filename='', bidir=False, filter_region=None): +def convert_service_sheet( + input_filename, + eqpt, + network, + network_filename=None, + output_filename='', + bidir=False, + filter_region=None): """ converts a service sheet into a json structure """ if filter_region is None: @@ -339,7 +347,7 @@ def correct_xls_route_list(network_filename, network, pathreqlist): # we rely on the next node provided by the user for this purpose new_n = next(n for n in nodes_suggestion if n in next_node.keys() and next_node[n] - in temp.nodes_list[i:]+[pathreq.destination] and + in temp.nodes_list[i:] + [pathreq.destination] and next_node[n] not in temp.nodes_list[:i]) else: new_n = nodes_suggestion[0] diff --git a/gnpy/core/spectrum_assignment.py b/gnpy/core/spectrum_assignment.py index b3af9576..e45a5173 100644 --- a/gnpy/core/spectrum_assignment.py +++ b/gnpy/core/spectrum_assignment.py @@ -30,15 +30,15 @@ class Bitmap: # n is the min index including guardband. Guardband is require to be sure # that a channel can be assigned with center frequency fmin (means that its # slot occupation goes below freq_index_min - n_min = frequency_to_n(f_min-guardband, grid) - n_max = frequency_to_n(f_max+guardband, grid) - 1 + n_min = frequency_to_n(f_min - guardband, grid) + n_max = frequency_to_n(f_max + guardband, grid) - 1 self.n_min = n_min self.n_max = n_max self.freq_index_min = frequency_to_n(f_min) self.freq_index_max = frequency_to_n(f_max) - self.freq_index = list(range(n_min, n_max+1)) + self.freq_index = list(range(n_min, n_max + 1)) if bitmap is None: - self.bitmap = [1] * (n_max-n_min+1) + self.bitmap = [1] * (n_max - n_min + 1) elif len(bitmap) == len(self.freq_index): self.bitmap = bitmap else: @@ -58,7 +58,7 @@ class Bitmap: """ insert bitmap on the left to align oms bitmaps if their start frequencies are different """ self.bitmap = newbitmap + self.bitmap - temp = list(range(self.n_min-len(newbitmap), self.n_min)) + temp = list(range(self.n_min - len(newbitmap), self.n_min)) self.freq_index = temp + self.freq_index self.n_min = self.freq_index[0] @@ -66,7 +66,7 @@ class Bitmap: """ insert bitmap on the right to align oms bitmaps if their stop frequencies are different """ self.bitmap = self.bitmap + newbitmap - self.freq_index = self.freq_index + list(range(self.n_max, self.n_max+len(newbitmap))) + self.freq_index = self.freq_index + list(range(self.n_max, self.n_max + len(newbitmap))) self.n_max = self.freq_index[-1] @@ -144,7 +144,7 @@ class OMS: if startn <= self.spectrum_bitmap.n_min: raise SpectrumError(f'N {nvalue}, M {mvalue} below the N spectrum bitmap bounds') self.spectrum_bitmap.bitmap[self.spectrum_bitmap.geti( - startn):self.spectrum_bitmap.geti(stopn)+1] = [0] * (stopn-startn+1) + startn):self.spectrum_bitmap.geti(stopn) + 1] = [0] * (stopn - startn + 1) def add_service(self, service_id, nb_wl): """ record service and mark spectrum as occupied @@ -164,7 +164,7 @@ def frequency_to_n(freq, grid=0.00625e12): 20 """ - return (int)((freq-193.1e12)/grid) + return (int)((freq - 193.1e12) / grid) def nvalue_to_frequency(nvalue, grid=0.00625e12): @@ -201,8 +201,8 @@ def slots_to_m(startn, stopn): 7 """ - nvalue = (int)((startn+stopn+1)/2) - mvalue = (int)((stopn-startn+1)/2) + nvalue = (int)((startn + stopn + 1) / 2) + mvalue = (int)((stopn - startn + 1) / 2) return nvalue, mvalue @@ -221,7 +221,7 @@ def m_to_freq(nvalue, mvalue, grid=0.00625e12): """ startn, stopn = mvalue_to_slots(nvalue, mvalue) fstart = nvalue_to_frequency(startn, grid) - fstop = nvalue_to_frequency(stopn+1, grid) + fstop = nvalue_to_frequency(stopn + 1, grid) return fstart, fstop @@ -355,11 +355,11 @@ def spectrum_selection(pth, oms_list, requested_m, requested_n=None): freq_availability = bitmap_sum(oms_list[oms].spectrum_bitmap.bitmap, freq_availability) if requested_n is None: # avoid slots reserved on the edge 0.15e-12 on both sides -> 24 - candidates = [(freq_index[i]+requested_m, freq_index[i], freq_index[i]+2*requested_m-1) + candidates = [(freq_index[i] + requested_m, freq_index[i], freq_index[i] + 2 * requested_m - 1) for i in range(len(freq_availability)) - if freq_availability[i:i+2*requested_m] == [1] * (2*requested_m) + if freq_availability[i:i + 2 * requested_m] == [1] * (2 * requested_m) and freq_index[i] >= freq_index_min - and freq_index[i+2*requested_m-1] <= freq_index_max] + and freq_index[i + 2 * requested_m - 1] <= freq_index_max] candidate = select_candidate(candidates, policy='first_fit') else: @@ -367,11 +367,11 @@ def spectrum_selection(pth, oms_list, requested_m, requested_n=None): # print(f'N {requested_n} i {i}') # print(freq_availability[i-m:i+m] ) # print(freq_index[i-m:i+m]) - if (freq_availability[i-requested_m:i+requested_m] == [1] * (2*requested_m) and - freq_index[i-requested_m] >= freq_index_min - and freq_index[i+requested_m-1] <= freq_index_max): + if (freq_availability[i - requested_m:i + requested_m] == [1] * (2 * requested_m) and + freq_index[i - requested_m] >= freq_index_min + and freq_index[i + requested_m - 1] <= freq_index_max): # candidate is the triplet center_n, startn and stopn - candidate = (requested_n, requested_n-requested_m, requested_n+requested_m-1) + candidate = (requested_n, requested_n - requested_m, requested_n + requested_m - 1) else: candidate = (None, None, None) # print("coucou11") diff --git a/gnpy/core/utils.py b/gnpy/core/utils.py index 20667f9a..432e9eff 100644 --- a/gnpy/core/utils.py +++ b/gnpy/core/utils.py @@ -145,8 +145,8 @@ def freq2wavelength(value): def snr_sum(snr, bw, snr_added, bw_added=12.5e9): - snr_added = snr_added - lin2db(bw/bw_added) - snr = -lin2db(db2lin(-snr)+db2lin(-snr_added)) + snr_added = snr_added - lin2db(bw / bw_added) + snr = -lin2db(db2lin(-snr) + db2lin(-snr_added)) return snr diff --git a/gnpy/tools/convert.py b/gnpy/tools/convert.py index 5f96fae7..9e76a4be 100755 --- a/gnpy/tools/convert.py +++ b/gnpy/tools/convert.py @@ -53,13 +53,13 @@ class Node(object): setattr(self, k, v) default_values = { - 'city': '', - 'state': '', - 'country': '', - 'region': '', - 'latitude': 0, - 'longitude': 0, - 'node_type': 'ILA', + 'city': '', + 'state': '', + 'country': '', + 'region': '', + 'latitude': 0, + 'longitude': 0, + 'node_type': 'ILA', 'booster_restriction': '', 'preamp_restriction': '' } @@ -89,15 +89,15 @@ class Link(object): or (self.from_city == link.to_city and self.to_city == link.from_city) default_values = { - 'from_city': '', - 'to_city': '', - 'east_distance': 80, - 'east_fiber': 'SSMF', - 'east_lineic': 0.2, - 'east_con_in': None, - 'east_con_out': None, - 'east_pmd': 0.1, - 'east_cable': '' + 'from_city': '', + 'to_city': '', + 'east_distance': 80, + 'east_fiber': 'SSMF', + 'east_lineic': 0.2, + 'east_con_in': None, + 'east_con_out': None, + 'east_pmd': 0.1, + 'east_cable': '' } @@ -116,14 +116,14 @@ class Eqpt(object): setattr(self, k, v_west) default_values = { - 'from_city': '', - 'to_city': '', - 'east_amp_type': '', - 'east_att_in': 0, - 'east_amp_gain': None, - 'east_amp_dp': None, - 'east_tilt': 0, - 'east_att_out': None + 'from_city': '', + 'to_city': '', + 'east_amp_type': '', + 'east_att_in': 0, + 'east_amp_gain': None, + 'east_amp_dp': None, + 'east_tilt': 0, + 'east_att_out': None } @@ -150,7 +150,7 @@ def read_slice(my_sheet, line, slice_, header): slice_range = (-1, -1) if header_i != []: try: - slice_range = next((h.colindex, header_i[i+1].colindex) + slice_range = next((h.colindex, header_i[i + 1].colindex) for i, h in enumerate(header_i) if header in h.header) except Exception: pass @@ -167,20 +167,20 @@ def parse_headers(my_sheet, input_headers_dict, headers, start_line, slice_in): iteration = 1 while slice_out == (-1, -1) and iteration < 10: # try next lines - slice_out = read_slice(my_sheet, start_line+iteration, slice_in, h0) + slice_out = read_slice(my_sheet, start_line + iteration, slice_in, h0) iteration += 1 if slice_out == (-1, -1): if h0 in ('east', 'Node A', 'Node Z', 'City'): - print(f'\x1b[1;31;40m'+f'CRITICAL: missing _{h0}_ header: EXECUTION ENDS' + '\x1b[0m') + print(f'\x1b[1;31;40m' + f'CRITICAL: missing _{h0}_ header: EXECUTION ENDS' + '\x1b[0m') exit() else: print(f'missing header {h0}') elif not isinstance(input_headers_dict[h0], dict): headers[slice_out[0]] = input_headers_dict[h0] else: - headers = parse_headers(my_sheet, input_headers_dict[h0], headers, start_line+1, slice_out) + headers = parse_headers(my_sheet, input_headers_dict[h0], headers, start_line + 1, slice_out) if headers == {}: - print('\x1b[1;31;40m'+f'CRITICAL ERROR: could not find any header to read _ ABORT' + '\x1b[0m') + print('\x1b[1;31;40m' + f'CRITICAL ERROR: could not find any header to read _ ABORT' + '\x1b[0m') exit() return headers @@ -266,16 +266,16 @@ def convert_file(input_filename, names_matching=False, filter_region=[]): data = { 'elements': [{'uid': f'trx {x.city}', - 'metadata': {'location': {'city': x.city, - 'region': x.region, - 'latitude': x.latitude, + 'metadata': {'location': {'city': x.city, + 'region': x.region, + 'latitude': x.latitude, 'longitude': x.longitude}}, 'type': 'Transceiver'} for x in nodes_by_city.values() if x.node_type.lower() == 'roadm'] + [{'uid': f'roadm {x.city}', - 'metadata': {'location': {'city': x.city, - 'region': x.region, - 'latitude': x.latitude, + 'metadata': {'location': {'city': x.city, + 'region': x.region, + 'latitude': x.latitude, 'longitude': x.longitude}}, 'type': 'Roadm'} for x in nodes_by_city.values() if x.node_type.lower() == 'roadm' @@ -287,24 +287,24 @@ def convert_file(input_filename, names_matching=False, filter_region=[]): 'booster_variety_list': silent_remove(x.booster_restriction.split(' | '), '') } }, - 'metadata': {'location': {'city': x.city, - 'region': x.region, - 'latitude': x.latitude, + 'metadata': {'location': {'city': x.city, + 'region': x.region, + 'latitude': x.latitude, 'longitude': x.longitude}}, 'type': 'Roadm'} for x in nodes_by_city.values() if x.node_type.lower() == 'roadm' and (x.booster_restriction != '' or x.preamp_restriction != '')] + [{'uid': f'west fused spans in {x.city}', - 'metadata': {'location': {'city': x.city, - 'region': x.region, - 'latitude': x.latitude, + 'metadata': {'location': {'city': x.city, + 'region': x.region, + 'latitude': x.latitude, 'longitude': x.longitude}}, 'type': 'Fused'} for x in nodes_by_city.values() if x.node_type.lower() == 'fused'] + [{'uid': f'east fused spans in {x.city}', - 'metadata': {'location': {'city': x.city, - 'region': x.region, - 'latitude': x.latitude, + 'metadata': {'location': {'city': x.city, + 'region': x.region, + 'latitude': x.latitude, 'longitude': x.longitude}}, 'type': 'Fused'} for x in nodes_by_city.values() if x.node_type.lower() == 'fused'] + @@ -313,8 +313,8 @@ def convert_file(input_filename, names_matching=False, filter_region=[]): nodes_by_city[x.to_city])}, 'type': 'Fiber', 'type_variety': x.east_fiber, - 'params': {'length': round(x.east_distance, 3), - 'length_units': x.distance_units, + 'params': {'length': round(x.east_distance, 3), + 'length_units': x.distance_units, 'loss_coef': x.east_lineic, 'con_in': x.east_con_in, 'con_out': x.east_con_out} @@ -325,36 +325,36 @@ def convert_file(input_filename, names_matching=False, filter_region=[]): nodes_by_city[x.to_city])}, 'type': 'Fiber', 'type_variety': x.west_fiber, - 'params': {'length': round(x.west_distance, 3), - 'length_units': x.distance_units, + 'params': {'length': round(x.west_distance, 3), + 'length_units': x.distance_units, 'loss_coef': x.west_lineic, 'con_in': x.west_con_in, 'con_out': x.west_con_out} } # missing ILA construction for x in links] + [{'uid': f'east edfa in {e.from_city} to {e.to_city}', - 'metadata': {'location': {'city': nodes_by_city[e.from_city].city, - 'region': nodes_by_city[e.from_city].region, - 'latitude': nodes_by_city[e.from_city].latitude, + 'metadata': {'location': {'city': nodes_by_city[e.from_city].city, + 'region': nodes_by_city[e.from_city].region, + 'latitude': nodes_by_city[e.from_city].latitude, 'longitude': nodes_by_city[e.from_city].longitude}}, 'type': 'Edfa', 'type_variety': e.east_amp_type, 'operational': {'gain_target': e.east_amp_gain, - 'delta_p': e.east_amp_dp, + 'delta_p': e.east_amp_dp, 'tilt_target': e.east_tilt, 'out_voa': e.east_att_out} } for e in eqpts if (e.east_amp_type.lower() != '' and \ e.east_amp_type.lower() != 'fused')] + [{'uid': f'west edfa in {e.from_city} to {e.to_city}', - 'metadata': {'location': {'city': nodes_by_city[e.from_city].city, - 'region': nodes_by_city[e.from_city].region, - 'latitude': nodes_by_city[e.from_city].latitude, + 'metadata': {'location': {'city': nodes_by_city[e.from_city].city, + 'region': nodes_by_city[e.from_city].region, + 'latitude': nodes_by_city[e.from_city].latitude, 'longitude': nodes_by_city[e.from_city].longitude}}, 'type': 'Edfa', 'type_variety': e.west_amp_type, 'operational': {'gain_target': e.west_amp_gain, - 'delta_p': e.west_amp_dp, + 'delta_p': e.west_amp_dp, 'tilt_target': e.west_tilt, 'out_voa': e.west_att_out} } @@ -365,18 +365,18 @@ def convert_file(input_filename, names_matching=False, filter_region=[]): # If user specifies ILA in Nodes sheet and fused in Eqpt sheet, then assumes that # this is a fused nodes. [{'uid': f'east edfa in {e.from_city} to {e.to_city}', - 'metadata': {'location': {'city': nodes_by_city[e.from_city].city, - 'region': nodes_by_city[e.from_city].region, - 'latitude': nodes_by_city[e.from_city].latitude, + 'metadata': {'location': {'city': nodes_by_city[e.from_city].city, + 'region': nodes_by_city[e.from_city].region, + 'latitude': nodes_by_city[e.from_city].latitude, 'longitude': nodes_by_city[e.from_city].longitude}}, 'type': 'Fused', 'params': {'loss': 0} } for e in eqpts if e.east_amp_type.lower() == 'fused'] + [{'uid': f'west edfa in {e.from_city} to {e.to_city}', - 'metadata': {'location': {'city': nodes_by_city[e.from_city].city, - 'region': nodes_by_city[e.from_city].region, - 'latitude': nodes_by_city[e.from_city].latitude, + 'metadata': {'location': {'city': nodes_by_city[e.from_city].city, + 'region': nodes_by_city[e.from_city].region, + 'latitude': nodes_by_city[e.from_city].latitude, 'longitude': nodes_by_city[e.from_city].longitude}}, 'type': 'Fused', 'params': {'loss': 0} @@ -388,17 +388,17 @@ def convert_file(input_filename, names_matching=False, filter_region=[]): + list(chain.from_iterable(zip( [{'from_node': f'trx {x.city}', - 'to_node': f'roadm {x.city}'} + 'to_node': f'roadm {x.city}'} for x in nodes_by_city.values() if x.node_type.lower() == 'roadm'], [{'from_node': f'roadm {x.city}', - 'to_node': f'trx {x.city}'} + 'to_node': f'trx {x.city}'} for x in nodes_by_city.values() if x.node_type.lower() == 'roadm']))) } suffix_filename = str(input_filename.suffixes[0]) full_input_filename = str(input_filename) - split_filename = [full_input_filename[0:len(full_input_filename)-len(suffix_filename)], suffix_filename[1:]] - output_json_file_name = split_filename[0]+'.json' + split_filename = [full_input_filename[0:len(full_input_filename) - len(suffix_filename)], suffix_filename[1:]] + output_json_file_name = split_filename[0] + '.json' with open(output_json_file_name, 'w', encoding='utf-8') as edfa_json_file: edfa_json_file.write(dumps(data, indent=2, ensure_ascii=False)) return output_json_file_name @@ -484,32 +484,32 @@ def parse_excel(input_filename): 'Node A': 'from_city', 'Node Z': 'to_city', 'east': { - 'Distance (km)': 'east_distance', - 'Fiber type': 'east_fiber', - 'lineic att': 'east_lineic', - 'Con_in': 'east_con_in', - 'Con_out': 'east_con_out', - 'PMD': 'east_pmd', - 'Cable id': 'east_cable' + 'Distance (km)': 'east_distance', + 'Fiber type': 'east_fiber', + 'lineic att': 'east_lineic', + 'Con_in': 'east_con_in', + 'Con_out': 'east_con_out', + 'PMD': 'east_pmd', + 'Cable id': 'east_cable' }, 'west': { - 'Distance (km)': 'west_distance', - 'Fiber type': 'west_fiber', - 'lineic att': 'west_lineic', - 'Con_in': 'west_con_in', - 'Con_out': 'west_con_out', - 'PMD': 'west_pmd', - 'Cable id': 'west_cable' + 'Distance (km)': 'west_distance', + 'Fiber type': 'west_fiber', + 'lineic att': 'west_lineic', + 'Con_in': 'west_con_in', + 'Con_out': 'west_con_out', + 'PMD': 'west_pmd', + 'Cable id': 'west_cable' } } node_headers = { - 'City': 'city', - 'State': 'state', - 'Country': 'country', - 'Region': 'region', - 'Latitude': 'latitude', - 'Longitude': 'longitude', - 'Type': 'node_type', + 'City': 'city', + 'State': 'state', + 'Country': 'country', + 'Region': 'region', + 'Latitude': 'latitude', + 'Longitude': 'longitude', + 'Type': 'node_type', 'Booster_restriction': 'booster_restriction', 'Preamp_restriction': 'preamp_restriction' } @@ -517,20 +517,20 @@ def parse_excel(input_filename): 'Node A': 'from_city', 'Node Z': 'to_city', 'east': { - 'amp type': 'east_amp_type', - 'att_in': 'east_att_in', - 'amp gain': 'east_amp_gain', - 'delta p': 'east_amp_dp', - 'tilt': 'east_tilt', - 'att_out': 'east_att_out' + 'amp type': 'east_amp_type', + 'att_in': 'east_att_in', + 'amp gain': 'east_amp_gain', + 'delta p': 'east_amp_dp', + 'tilt': 'east_tilt', + 'att_out': 'east_att_out' }, 'west': { - 'amp type': 'west_amp_type', - 'att_in': 'west_att_in', - 'amp gain': 'west_amp_gain', - 'delta p': 'west_amp_dp', - 'tilt': 'west_tilt', - 'att_out': 'west_att_out' + 'amp type': 'west_amp_type', + 'att_in': 'west_att_in', + 'amp gain': 'west_amp_gain', + 'delta p': 'west_amp_dp', + 'tilt': 'west_tilt', + 'att_out': 'west_att_out' } } @@ -544,7 +544,7 @@ def parse_excel(input_filename): eqpt_sheet = None nodes = [] - for node in parse_sheet(nodes_sheet, node_headers, NODES_LINE, NODES_LINE+1, NODES_COLUMN): + for node in parse_sheet(nodes_sheet, node_headers, NODES_LINE, NODES_LINE + 1, NODES_COLUMN): nodes.append(Node(**node)) expected_node_types = {'ROADM', 'ILA', 'FUSED'} for n in nodes: @@ -552,12 +552,12 @@ def parse_excel(input_filename): n.node_type = 'ILA' links = [] - for link in parse_sheet(links_sheet, link_headers, LINKS_LINE, LINKS_LINE+2, LINKS_COLUMN): + for link in parse_sheet(links_sheet, link_headers, LINKS_LINE, LINKS_LINE + 2, LINKS_COLUMN): links.append(Link(**link)) eqpts = [] if eqpt_sheet is not None: - for eqpt in parse_sheet(eqpt_sheet, eqpt_headers, EQPTS_LINE, EQPTS_LINE+2, EQPTS_COLUMN): + for eqpt in parse_sheet(eqpt_sheet, eqpt_headers, EQPTS_LINE, EQPTS_LINE + 2, EQPTS_COLUMN): eqpts.append(Eqpt(**eqpt)) # sanity check @@ -583,7 +583,7 @@ def eqpt_connection_by_city(city_name): for i in range(2): from_ = fiber_link(other_cities[i], city_name) in_ = eqpt_in_city_to_city(city_name, other_cities[0], direction[i]) - to_ = fiber_link(city_name, other_cities[1-i]) + to_ = fiber_link(city_name, other_cities[1 - i]) subdata += connect_eqpt(from_, in_, to_) elif nodes_by_city[city_name].node_type.lower() == 'roadm': for other_city in other_cities: @@ -712,12 +712,12 @@ def midpoint(city_a, city_b): longs = city_a.longitude, city_b.longitude try: result = { - 'latitude': sum(lats) / 2, + 'latitude': sum(lats) / 2, 'longitude': sum(longs) / 2 } except TypeError: result = { - 'latitude': 0, + 'latitude': 0, 'longitude': 0 } return result diff --git a/gnpy/topology/request.py b/gnpy/topology/request.py index f7fac917..6ca21760 100644 --- a/gnpy/topology/request.py +++ b/gnpy/topology/request.py @@ -206,7 +206,7 @@ class ResultElement(Element): }, { 'metric-type': 'SNR-0.1nm', - 'accumulative-value': round(mean(pth[-1].snr+lin2db(req.baud_rate/12.5e9)), 2) + 'accumulative-value': round(mean(pth[-1].snr + lin2db(req.baud_rate / 12.5e9)), 2) }, { 'metric-type': 'OSNR-bandwidth', @@ -313,7 +313,7 @@ def compute_constrained_path(network, req): # print(network.get_edge_data(s,e)) # s = e except NetworkXNoPath: - msg = f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path from' +\ + msg = f'\x1b[1;33;40m' + f'Request {req.request_id} could not find a path from' +\ f' {source.uid} to node: {destination.uid} in network topology' + '\x1b[0m' LOGGER.critical(msg) print(msg) @@ -332,8 +332,8 @@ def compute_constrained_path(network, req): # select the shortest path (in nb of hops) -> changed to shortest path in km length if len(candidate) > 0: # candidate.sort(key=lambda x: len(x)) - candidate.sort(key=lambda x: sum(network.get_edge_data(x[i], x[i+1])['weight'] - for i in range(len(x)-2))) + candidate.sort(key=lambda x: sum(network.get_edge_data(x[i], x[i + 1])['weight'] + for i in range(len(x) - 2))) total_path = candidate[0] else: # TODO: better account for individual loose and strict node @@ -343,17 +343,17 @@ def compute_constrained_path(network, req): # a path w/o constraints, else there is no possible path # no candidate can be found with the constraints - print(f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path crossing ' + + print(f'\x1b[1;33;40m' + f'Request {req.request_id} could not find a path crossing ' + f'{[el.uid for el in nodes_list[:-1]]} in network topology' + '\x1b[0m') if 'STRICT' not in req.loose_list[:-1]: - msg = f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path with user_' +\ + msg = f'\x1b[1;33;40m' + f'Request {req.request_id} could not find a path with user_' +\ f'include node constraints' + '\x1b[0m' LOGGER.info(msg) print(f'constraint ignored') else: # one STRICT makes the whole list STRICT - msg = f'\x1b[1;33;40m'+f'Request {req.request_id} could not find a path with user ' +\ + msg = f'\x1b[1;33;40m' + f'Request {req.request_id} could not find a path with user ' +\ f'include node constraints.\nNo path computed' + '\x1b[0m' LOGGER.critical(msg) print(msg) @@ -441,7 +441,7 @@ def propagate_and_optimize_mode(path, req, equipment): for this_mode in modes_to_explore: if path[-1].snr is not None: path[-1].update_snr(this_mode['tx_osnr'], equipment['Roadm']['default'].add_drop_osnr) - if round(min(path[-1].snr+lin2db(this_br/(12.5e9))), 2) > this_mode['OSNR']: + if round(min(path[-1].snr + lin2db(this_br / (12.5e9))), 2) > this_mode['OSNR']: return path, this_mode else: last_explored_mode = this_mode @@ -717,7 +717,7 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list): # sort them in km length instead of hop # all_simp_pths = sorted(all_simp_pths, key=lambda path: len(path)) all_simp_pths = sorted(all_simp_pths, key=lambda - x: sum(network.get_edge_data(x[i], x[i+1])['weight'] for i in range(len(x)-2))) + x: sum(network.get_edge_data(x[i], x[i + 1])['weight'] for i in range(len(x) - 2))) # reversed direction paths required to check disjunction on both direction all_simp_pths_reversed = [] for pth in all_simp_pths: @@ -858,7 +858,7 @@ def compute_path_dsjctn(network, equipment, pathreqlist, disjunctions_list): # or not if not ispart(allpaths[id(pth)].req.nodes_list, pth): # print(f'nb of solutions {len(temp)}') - if j < len(candidates[this_d.disjunction_id])-1: + if j < len(candidates[this_d.disjunction_id]) - 1: msg = f'removing {sol}' LOGGER.info(msg) testispartok = False diff --git a/tests/compare.py b/tests/compare.py index c2bca7d1..3e6ab45d 100644 --- a/tests/compare.py +++ b/tests/compare.py @@ -7,8 +7,8 @@ from collections import namedtuple class Results(namedtuple('Results', 'missing extra different expected actual')): def _asdict(self): - return {'missing': self.missing, - 'extra': self.extra, + return {'missing': self.missing, + 'extra': self.extra, 'different': self.different} def __str__(self): @@ -29,7 +29,7 @@ class Results(namedtuple('Results', 'missing extra different expected actual')): class NetworksResults(namedtuple('NetworksResult', 'elements connections')): def _asdict(self): - return {'elements': self.elements._asdict(), + return {'elements': self.elements._asdict(), 'connections': self.connections._asdict()} def __str__(self): @@ -43,7 +43,7 @@ class NetworksResults(namedtuple('NetworksResult', 'elements connections')): class ServicesResults(namedtuple('ServicesResult', 'requests synchronizations')): def _asdict(self): - return {'requests': self.requests.asdict(), + return {'requests': self.requests.asdict(), 'synchronizations': self.synchronizations.asdict()} def __str__(self): @@ -104,13 +104,13 @@ def compare_paths(expected_output, actual_output): COMPARISONS = { 'networks': compare_networks, 'services': compare_services, - 'paths': compare_paths, + 'paths': compare_paths, } parser = ArgumentParser() parser.add_argument('expected_output', type=Path, metavar='FILE') -parser.add_argument('actual_output', type=Path, metavar='FILE') -parser.add_argument('-o', '--output', default=None) +parser.add_argument('actual_output', type=Path, metavar='FILE') +parser.add_argument('-o', '--output', default=None) parser.add_argument('-c', '--comparison', choices=COMPARISONS, default='networks') diff --git a/tests/test_amplifier.py b/tests/test_amplifier.py index 6fffed47..4e1b18e6 100644 --- a/tests/test_amplifier.py +++ b/tests/test_amplifier.py @@ -84,8 +84,8 @@ def test_variable_gain_nf(gain, nf_expected, setup_edfa_variable_gain, si): """=> unitary test for variable gain model Edfa._calc_nf() (and Edfa.interpol_params)""" edfa = setup_edfa_variable_gain frequencies = array([c.frequency for c in si.carriers]) - pin = array([c.power.signal+c.power.nli+c.power.ase for c in si.carriers]) - pin = pin/db2lin(gain) + pin = array([c.power.signal + c.power.nli + c.power.ase for c in si.carriers]) + pin = pin / db2lin(gain) baud_rates = array([c.baud_rate for c in si.carriers]) edfa.operational.gain_target = gain pref = Pref(0, -gain, lin2db(len(frequencies))) @@ -99,8 +99,8 @@ def test_fixed_gain_nf(gain, nf_expected, setup_edfa_fixed_gain, si): """=> unitary test for fixed gain model Edfa._calc_nf() (and Edfa.interpol_params)""" edfa = setup_edfa_fixed_gain frequencies = array([c.frequency for c in si.carriers]) - pin = array([c.power.signal+c.power.nli+c.power.ase for c in si.carriers]) - pin = pin/db2lin(gain) + pin = array([c.power.signal + c.power.nli + c.power.ase for c in si.carriers]) + pin = pin / db2lin(gain) baud_rates = array([c.baud_rate for c in si.carriers]) edfa.operational.gain_target = gain pref = Pref(0, -gain, lin2db(len(frequencies))) @@ -112,7 +112,7 @@ def test_fixed_gain_nf(gain, nf_expected, setup_edfa_fixed_gain, si): def test_si(si, nch_and_spacing): """basic total power check of the channel comb generation""" nb_channel = nch_and_spacing[0] - pin = array([c.power.signal+c.power.nli+c.power.ase for c in si.carriers]) + pin = array([c.power.signal + c.power.nli + c.power.ase for c in si.carriers]) p_tot = pin.sum() expected_p_tot = si.carriers[0].power.signal * nb_channel assert pytest.approx(expected_p_tot, abs=0.01) == p_tot @@ -126,8 +126,8 @@ def test_compare_nf_models(gain, setup_edfa_variable_gain, si): => unitary test for Edfa._calc_nf (and Edfa.interpol_params)""" edfa = setup_edfa_variable_gain frequencies = array([c.frequency for c in si.carriers]) - pin = array([c.power.signal+c.power.nli+c.power.ase for c in si.carriers]) - pin = pin/db2lin(gain) + pin = array([c.power.signal + c.power.nli + c.power.ase for c in si.carriers]) + pin = pin / db2lin(gain) baud_rates = array([c.baud_rate for c in si.carriers]) edfa.operational.gain_target = gain # edfa is variable gain type @@ -178,7 +178,7 @@ def test_ase_noise(gain, si, setup_trx, bw): span = next(n for n in network.nodes() if n.uid == 'Span1') # update span1 and Edfa1 according to new gain before building network # updating span 1 avoids to overload amp - span.params.length = gain*1e3 / 0.2 + span.params.length = gain * 1e3 / 0.2 edfa.operational.gain_target = gain build_network(network, equipment, 0, 20) edfa.gain_ripple = zeros(96) @@ -188,20 +188,20 @@ def test_ase_noise(gain, si, setup_trx, bw): print(span) frequencies = array([c.frequency for c in si.carriers]) - pin = array([c.power.signal+c.power.nli+c.power.ase for c in si.carriers]) + pin = array([c.power.signal + c.power.nli + c.power.ase for c in si.carriers]) baud_rates = array([c.baud_rate for c in si.carriers]) pref = Pref(0, -gain, lin2db(len(frequencies))) edfa.interpol_params(frequencies, pin, baud_rates, pref) nf = edfa.nf print('nf', nf) - pin = lin2db(pin[0]*1e3) + pin = lin2db(pin[0] * 1e3) osnr_expected = pin - nf[0] + 58 si = edfa(si) print(edfa) pout = array([c.power.signal for c in si.carriers]) pase = array([c.power.ase for c in si.carriers]) - osnr = lin2db(pout[0] / pase[0]) - lin2db(12.5e9/bw) + osnr = lin2db(pout[0] / pase[0]) - lin2db(12.5e9 / bw) assert pytest.approx(osnr_expected, abs=0.01) == osnr trx = setup_trx diff --git a/tests/test_automaticmodefeature.py b/tests/test_automaticmodefeature.py index 52a6f3c4..0e5f6330 100644 --- a/tests/test_automaticmodefeature.py +++ b/tests/test_automaticmodefeature.py @@ -58,7 +58,7 @@ def test_automaticmodefeature(net, eqpt, serv, expected_mode): # use the power specified in requests but might be different from the one specified for design # the power is an optional parameter for requests definition # if optional, use the one defines in eqt_config.json - p_db = lin2db(pathreq.power*1e3) + p_db = lin2db(pathreq.power * 1e3) p_total_db = p_db + lin2db(pathreq.nb_channel) print(f'request {pathreq.request_id}') print(f'Computing path from {pathreq.source} to {pathreq.destination}') diff --git a/tests/test_parser.py b/tests/test_parser.py index 0966a7f1..7f36b17b 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -42,8 +42,8 @@ equipment = load_equipment(eqpt_filename) @pytest.mark.parametrize('xls_input,expected_json_output', { - DATA_DIR / 'CORONET_Global_Topology.xlsx': DATA_DIR / 'CORONET_Global_Topology_expected.json', - DATA_DIR / 'testTopology.xls': DATA_DIR / 'testTopology_expected.json', + DATA_DIR / 'CORONET_Global_Topology.xlsx': DATA_DIR / 'CORONET_Global_Topology_expected.json', + DATA_DIR / 'testTopology.xls': DATA_DIR / 'testTopology_expected.json', }.items()) def test_excel_json_generation(xls_input, expected_json_output): """ tests generation of topology json @@ -156,8 +156,8 @@ def test_auto_design_generation_fromjson(json_input, expected_json_output): @pytest.mark.parametrize('xls_input,expected_json_output', { - DATA_DIR / 'testTopology.xls': DATA_DIR / 'testTopology_services_expected.json', - DATA_DIR / 'testService.xls': DATA_DIR / 'testService_services_expected.json' + DATA_DIR / 'testTopology.xls': DATA_DIR / 'testTopology_services_expected.json', + DATA_DIR / 'testService.xls': DATA_DIR / 'testService_services_expected.json' }.items()) def test_excel_service_json_generation(xls_input, expected_json_output): """ test services creation @@ -193,7 +193,7 @@ def test_excel_service_json_generation(xls_input, expected_json_output): @pytest.mark.parametrize('json_input, csv_output', { - DATA_DIR / 'testTopology_response.json': DATA_DIR / 'testTopology_response', + DATA_DIR / 'testTopology_response.json': DATA_DIR / 'testTopology_response', }.items()) def test_csv_response_generation(json_input, csv_output): """ tests if generated csv is consistant with expected generation @@ -202,11 +202,11 @@ def test_csv_response_generation(json_input, csv_output): with open(json_input) as jsonfile: json_data = load(jsonfile) equipment = load_equipment(eqpt_filename) - csv_filename = str(csv_output)+'.csv' + csv_filename = str(csv_output) + '.csv' with open(csv_filename, 'w', encoding='utf-8') as fcsv: jsontocsv(json_data, equipment, fcsv) - expected_csv_filename = str(csv_output)+'_expected.csv' + expected_csv_filename = str(csv_output) + '_expected.csv' # expected header # csv_header = \ @@ -266,14 +266,14 @@ def compare_response(exp_resp, act_resp): print(act_resp) test = True for key in act_resp.keys(): - if not key in exp_resp.keys(): + if key not in exp_resp.keys(): print(f'{key} is not expected') return False if isinstance(act_resp[key], dict): test = compare_response(exp_resp[key], act_resp[key]) if test: for key in exp_resp.keys(): - if not key in act_resp.keys(): + if key not in act_resp.keys(): print(f'{key} is expected') return False if isinstance(exp_resp[key], dict): @@ -290,7 +290,7 @@ def compare_response(exp_resp, act_resp): # test json answers creation @pytest.mark.parametrize('xls_input, expected_response_file', { - DATA_DIR / 'testTopology.xls': DATA_DIR / 'testTopology_response.json', + DATA_DIR / 'testTopology.xls': DATA_DIR / 'testTopology_response.json', }.items()) def test_json_response_generation(xls_input, expected_response_file): """ tests if json response is correctly generated for all combinations of requests diff --git a/tests/test_propagation.py b/tests/test_propagation.py index 1ccb67fa..9621b960 100644 --- a/tests/test_propagation.py +++ b/tests/test_propagation.py @@ -51,7 +51,7 @@ def propagation(input_power, con_in, con_out, dest): p = input_power p = db2lin(p) * 1e-3 spacing = 50e9 # THz - si = create_input_spectral_information(191.3e12, 191.3e12+79*spacing, 0.15, 32e9, p, spacing) + si = create_input_spectral_information(191.3e12, 191.3e12 + 79 * spacing, 0.15, 32e9, p, spacing) source = next(transceivers[uid] for uid in transceivers if uid == 'trx A') sink = next(transceivers[uid] for uid in transceivers if uid == dest) path = dijkstra_path(network, source, sink) @@ -79,16 +79,16 @@ def test_snr(osnr_test, dest): conn_out = test[osnr_test][2] sink, nf = propagation(pw, conn_in, conn_out, dest) osnr = round(mean(sink.osnr_ase), 3) - nli = 1.0/db2lin(round(mean(sink.snr), 3)) - 1.0/db2lin(osnr) + nli = 1.0 / db2lin(round(mean(sink.snr), 3)) - 1.0 / db2lin(osnr) pw = expected[osnr_test][0] conn_in = expected[osnr_test][1] conn_out = expected[osnr_test][2] sink, exp_nf = propagation(pw, conn_in, conn_out, dest) expected_osnr = round(mean(sink.osnr_ase), 3) - expected_nli = 1.0/db2lin(round(mean(sink.snr), 3)) - 1.0/db2lin(expected_osnr) + expected_nli = 1.0 / db2lin(round(mean(sink.snr), 3)) - 1.0 / db2lin(expected_osnr) # compare OSNR taking into account nf change of amps osnr_diff = abs(osnr - expected_osnr + nf - exp_nf) - nli_diff = abs((nli-expected_nli)/nli) + nli_diff = abs((nli - expected_nli) / nli) assert osnr_diff < 0.01 and nli_diff < 0.01 diff --git a/tests/test_roadm_restrictions.py b/tests/test_roadm_restrictions.py index bd8606e7..261d2a97 100644 --- a/tests/test_roadm_restrictions.py +++ b/tests/test_roadm_restrictions.py @@ -193,12 +193,12 @@ def test_restrictions(restrictions, equipment): if restrictions['booster_variety_list'] and \ not roadms[next(network.predecessors(amp)).uid]\ .restrictions['booster_variety_list']: - if not amp.params.type_variety in restrictions['booster_variety_list']: + if amp.params.type_variety not in restrictions['booster_variety_list']: raise AssertionError() for amp in preamp_nodes: if amp.uid not in preamp_nodes_nobuild_uid: if restrictions['preamp_variety_list'] and\ not roadms[next(network.successors(amp)).uid].restrictions['preamp_variety_list']: - if not amp.params.type_variety in restrictions['preamp_variety_list']: + if amp.params.type_variety not in restrictions['preamp_variety_list']: raise AssertionError() diff --git a/tests/test_spectrum_assignment.py b/tests/test_spectrum_assignment.py index b8e7507e..37e8fa85 100644 --- a/tests/test_spectrum_assignment.py +++ b/tests/test_spectrum_assignment.py @@ -69,7 +69,7 @@ def test_oms(setup): assert not isinstance(elem, Roadm) assert elem in network.nodes() assert elem.oms.oms_id == oms.oms_id - assert elem.uid == oms.el_id_list[i+1] + assert elem.uid == oms.el_id_list[i + 1] @pytest.mark.parametrize('nval', [0, 10, -255]) @@ -175,7 +175,7 @@ def test_assign_and_sum(nval1, nval2, setup): # if requested slots exceed grid spectrum should not be assigned and assignment # should return False if ((nval1 - mval) < oms1.spectrum_bitmap.getn(0) or - (nval1 + mval-1) > oms1.spectrum_bitmap.getn(ind_max)): + (nval1 + mval - 1) > oms1.spectrum_bitmap.getn(ind_max)): with pytest.raises(SpectrumError): oms1.assign_spectrum(nval1, mval) for elem in oms1.spectrum_bitmap.bitmap: @@ -317,4 +317,4 @@ def test_reversed_direction(equipment, setup, requests, services): print(f'\t spectrum: ' + f'{this_path[len(this_path)-j-1].oms.spectrum_bitmap.bitmap[imin:imax]}') assert elem.oms.spectrum_bitmap.bitmap[imin:imax] == \ - this_path[len(this_path)-j-1].oms.spectrum_bitmap.bitmap[imin:imax] + this_path[len(this_path) - j - 1].oms.spectrum_bitmap.bitmap[imin:imax]