add: 600 csv files to create batch every 20 minutes and fix duplicate csv files problem

This commit is contained in:
kostas 2024-08-07 15:46:45 +02:00
parent 8ff320a5f3
commit e9031fa5ed
2 changed files with 37 additions and 33 deletions

View File

@ -916,7 +916,8 @@ def create_batch_of_csv_files():
csv_files.sort(key=lambda x: os.path.getctime(os.path.join(CSV_DIR, x))) csv_files.sort(key=lambda x: os.path.getctime(os.path.join(CSV_DIR, x)))
# keep the 30 MOST RECENT FILES # keep the 30 MOST RECENT FILES
recent_csv_files = csv_files[-30:] if len(csv_files) > 30 else csv_files #recent_csv_files = csv_files[-30:] if len(csv_files) > 30 else csv_files
recent_csv_files = csv_files[-600:] if len(csv_files) > 600 else csv_files
# get the name of the first csv file # get the name of the first csv file
if not csv_files: if not csv_files:
@ -1089,22 +1090,23 @@ def create_csv_files(signals, statuses, node_numbers, alarms_number_list, warnin
csv_filename = f"{timestamp}.csv" csv_filename = f"{timestamp}.csv"
csv_path = os.path.join(CSV_DIR, csv_filename) csv_path = os.path.join(CSV_DIR, csv_filename)
# Append values to the CSV file # Append values to the CSV file
with open(csv_path, 'a', newline='') as csvfile: if not os.path.exists(csv_path):
csv_writer = csv.writer(csvfile, delimiter=';') with open(csv_path, 'a', newline='') as csvfile:
# Add a special row for the nodes configuration csv_writer = csv.writer(csvfile, delimiter=';')
nodes_config_path = "/Config/Devices/BatteryNodes" # Add a special row for the nodes configuration
nodes_list = ",".join(str(node) for node in node_numbers) nodes_config_path = "/Config/Devices/BatteryNodes"
config_row = [nodes_config_path, nodes_list, ""] nodes_list = ",".join(str(node) for node in node_numbers)
csv_writer.writerow(config_row) config_row = [nodes_config_path, nodes_list, ""]
# Iterate over each node and signal to create rows in the new format csv_writer.writerow(config_row)
for i, node in enumerate(node_numbers): # Iterate over each node and signal to create rows in the new format
csv_writer.writerow([f"/Battery/Devices/{str(i+1)}/Alarms", alarms_number_list[i], ""]) for i, node in enumerate(node_numbers):
csv_writer.writerow([f"/Battery/Devices/{str(i+1)}/Warnings", warnings_number_list[i], ""]) csv_writer.writerow([f"/Battery/Devices/{str(i+1)}/Alarms", alarms_number_list[i], ""])
for s in signals: csv_writer.writerow([f"/Battery/Devices/{str(i+1)}/Warnings", warnings_number_list[i], ""])
signal_name = insert_id(s.name, i+1) for s in signals:
value = s.get_value(statuses[i]) signal_name = insert_id(s.name, i+1)
row_values = [signal_name, value, s.get_text] value = s.get_value(statuses[i])
csv_writer.writerow(row_values) row_values = [signal_name, value, s.get_text]
csv_writer.writerow(row_values)
def main(argv): def main(argv):
# type: (list[str]) -> () # type: (list[str]) -> ()

View File

@ -591,7 +591,8 @@ def create_batch_of_csv_files():
csv_files.sort(key=lambda x: os.path.getctime(os.path.join(CSV_DIR, x))) csv_files.sort(key=lambda x: os.path.getctime(os.path.join(CSV_DIR, x)))
# keep the 30 MOST RECENT FILES # keep the 30 MOST RECENT FILES
recent_csv_files = csv_files[-30:] if len(csv_files) > 30 else csv_files #recent_csv_files = csv_files[-30:] if len(csv_files) > 30 else csv_files
recent_csv_files = csv_files[-600:] if len(csv_files) > 600 else csv_files
# get the name of the first csv file # get the name of the first csv file
if not csv_files: if not csv_files:
@ -685,26 +686,27 @@ def create_batch_of_csv_files():
def create_csv_files(signals, statuses, node_numbers, alarms_number_list, warnings_number_list): def create_csv_files(signals, statuses, node_numbers, alarms_number_list, warnings_number_list):
timestamp = int(time.time()) timestamp = int(time.time())
if timestamp % 2 != 0: if timestamp % 2 != 0:
timestamp-=1 timestamp -= 1
if not os.path.exists(CSV_DIR): if not os.path.exists(CSV_DIR):
os.makedirs(CSV_DIR) os.makedirs(CSV_DIR)
csv_filename = "{}.csv".format(timestamp) csv_filename = "{}.csv".format(timestamp)
csv_path = os.path.join(CSV_DIR, csv_filename) csv_path = os.path.join(CSV_DIR, csv_filename)
with open(csv_path, 'ab') as csvfile: if not os.path.exists(csv_path):
csv_writer = csv.writer(csvfile, delimiter=';') with open(csv_path, 'ab') as csvfile:
nodes_config_path = "/Config/Devices/BatteryNodes" csv_writer = csv.writer(csvfile, delimiter=';')
nodes_list = ",".join(str(node) for node in node_numbers) nodes_config_path = "/Config/Devices/BatteryNodes"
config_row = [nodes_config_path, nodes_list, ""] nodes_list = ",".join(str(node) for node in node_numbers)
csv_writer.writerow(config_row) config_row = [nodes_config_path, nodes_list, ""]
for i, node in enumerate(node_numbers): csv_writer.writerow(config_row)
csv_writer.writerow(["/Battery/Devices/{}/Alarms".format(str(i+1)), alarms_number_list[i], ""]) for i, node in enumerate(node_numbers):
csv_writer.writerow(["/Battery/Devices/{}/Warnings".format(str(i+1)), warnings_number_list[i], ""]) csv_writer.writerow(["/Battery/Devices/{}/Alarms".format(str(i+1)), alarms_number_list[i], ""])
for s in signals: csv_writer.writerow(["/Battery/Devices/{}/Warnings".format(str(i+1)), warnings_number_list[i], ""])
signal_name = insert_id(s.name, i+1) for s in signals:
value = s.get_value(statuses[i]) signal_name = insert_id(s.name, i+1)
row_values = [signal_name, value, s.get_text] value = s.get_value(statuses[i])
csv_writer.writerow(row_values) row_values = [signal_name, value, s.get_text]
csv_writer.writerow(row_values)
def create_watchdog_task(main_loop): def create_watchdog_task(main_loop):
# type: (DBusGMainLoop) -> Callable[[],bool] # type: (DBusGMainLoop) -> Callable[[],bool]