lfdata: Fix whitespace

Signed-off-by: matthew <stidmatt@gmail.com>
This commit is contained in:
matthew
2021-12-03 07:40:07 -08:00
parent 47e5b47843
commit 348985a3da

View File

@@ -4,16 +4,16 @@ import datetime
# LFData class actions: # LFData class actions:
# - Methods to collect data/store data (use from monitor instance) - used by Profile class. # - Methods to collect data/store data (use from monitor instance) - used by Profile class.
# - file open/save # - file open/save
# - save row (rolling) - to CSV (standard) # - save row (rolling) - to CSV (standard)
# - headers # - headers
# - file to data-storage-type conversion and vice versa (e.g. dataframe (or datatable) to file type and vice versa) # - file to data-storage-type conversion and vice versa (e.g. dataframe (or datatable) to file type and vice versa)
# - other common util methods related to immediate data storage # - other common util methods related to immediate data storage
# - include compression method # - include compression method
# - monitoring truncates every 5 mins and sends to report? --- need clarification. truncate file and rewrite to same file? # - monitoring truncates every 5 mins and sends to report? --- need clarification. truncate file and rewrite to same file?
# - large data collection use NFS share to NAS. # - large data collection use NFS share to NAS.
# Websocket class actions: # Websocket class actions:
#reading data from websockets # reading data from websockets
class LFDataCollection: class LFDataCollection:
def __init__(self, local_realm, debug=False): def __init__(self, local_realm, debug=False):
@@ -26,66 +26,66 @@ class LFDataCollection:
def check_json_validity(self, keyword=None, json_response=None): def check_json_validity(self, keyword=None, json_response=None):
if json_response is None: if json_response is None:
raise ValueError("Cannot find columns requested to be searched in port manager. Exiting script, please retry.") raise ValueError(
"Cannot find columns requested to be searched in port manager. Exiting script, please retry.")
if keyword is not None and keyword not in json_response: if keyword is not None and keyword not in json_response:
raise ValueError("Cannot find proper information from json. Please check your json request. Exiting script, please retry.") raise ValueError(
"Cannot find proper information from json. Please check your json request. Exiting script, please retry.")
def get_milliseconds(self, def get_milliseconds(self,
timestamp): timestamp):
return (timestamp - datetime.datetime(1970,1,1)).total_seconds()*1000 return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds() * 1000
def get_seconds(self, def get_seconds(self,
timestamp): timestamp):
return (timestamp - datetime.datetime(1970,1,1)).total_seconds() return (timestamp - datetime.datetime(1970, 1, 1)).total_seconds()
# only for ipv4_variable_time at the moment
def monitor_interval(self, header_row_=None,
start_time_=None, sta_list_=None,
created_cx_=None, layer3_fields_=None,
port_mgr_fields_=None):
#only for ipv4_variable_time at the moment # time calculations for while loop and writing to csv
def monitor_interval(self, header_row_= None, t = datetime.datetime.now()
start_time_= None, sta_list_= None, timestamp = t.strftime("%m/%d/%Y %I:%M:%S")
created_cx_= None, layer3_fields_= None, t_to_millisec_epoch = int(self.get_milliseconds(t))
port_mgr_fields_= None): time_elapsed = int(self.get_seconds(t)) - int(self.get_seconds(start_time_))
#time calculations for while loop and writing to csv # get responses from json
t = datetime.datetime.now() layer_3_response = self.json_get("/endp/%s?fields=%s" % (created_cx_, layer3_fields_), debug_=self.debug)
timestamp= t.strftime("%m/%d/%Y %I:%M:%S") if port_mgr_fields_ is not None:
t_to_millisec_epoch= int(self.get_milliseconds(t)) port_mgr_response = self.json_get("/port/1/1/%s?fields=%s" % (sta_list_, port_mgr_fields_),
time_elapsed=int(self.get_seconds(t))-int(self.get_seconds(start_time_)) debug_=self.debug)
#get responses from json # check json response validity
layer_3_response = self.json_get("/endp/%s?fields=%s" % (created_cx_, layer3_fields_),debug_=self.debug) self.check_json_validity(keyword="endpoint", json_response=layer_3_response)
self.check_json_validity(keyword="interfaces", json_response=port_mgr_response)
# dict manipulation
temp_list = []
for endpoint in layer_3_response["endpoint"]:
if self.debug:
print("Current endpoint values list... ")
print(list(endpoint.values())[0])
temp_endp_values = list(endpoint.values())[0] # dict
temp_list.extend([timestamp, t_to_millisec_epoch, time_elapsed])
current_sta = temp_endp_values['name']
merge = {}
if port_mgr_fields_ is not None: if port_mgr_fields_ is not None:
port_mgr_response=self.json_get("/port/1/1/%s?fields=%s" % (sta_list_, port_mgr_fields_), debug_=self.debug) for sta_name in sta_list_:
if sta_name in current_sta:
for interface in port_mgr_response["interfaces"]:
if sta_name in list(interface.keys())[0]:
merge = temp_endp_values.copy()
#check json response validity port_mgr_values_dict = list(interface.values())[0]
self.check_json_validity(keyword="endpoint",json_response=layer_3_response) renamed_port_cols = {}
self.check_json_validity(keyword="interfaces",json_response=port_mgr_response) for key in port_mgr_values_dict.keys():
renamed_port_cols['port mgr - ' + key] = port_mgr_values_dict[key]
#dict manipulation merge.update(renamed_port_cols)
temp_list=[] for name in header_row_[3:-3]:
for endpoint in layer_3_response["endpoint"]: temp_list.append(merge[name])
if self.debug: return temp_list
print("Current endpoint values list... ")
print(list(endpoint.values())[0])
temp_endp_values=list(endpoint.values())[0] #dict
temp_list.extend([timestamp,t_to_millisec_epoch,time_elapsed])
current_sta = temp_endp_values['name']
merge={}
if port_mgr_fields_ is not None:
for sta_name in sta_list_:
if sta_name in current_sta:
for interface in port_mgr_response["interfaces"]:
if sta_name in list(interface.keys())[0]:
merge=temp_endp_values.copy()
port_mgr_values_dict =list(interface.values())[0]
renamed_port_cols={}
for key in port_mgr_values_dict.keys():
renamed_port_cols['port mgr - ' +key]=port_mgr_values_dict[key]
merge.update(renamed_port_cols)
for name in header_row_[3:-3]:
temp_list.append(merge[name])
return temp_list
#class WebSocket():
# class WebSocket():