add: 600 csv files to create batch every 20 minutes and fix duplicate csv files problem
This commit is contained in:
parent
8ff320a5f3
commit
e9031fa5ed
|
@ -916,7 +916,8 @@ def create_batch_of_csv_files():
|
|||
csv_files.sort(key=lambda x: os.path.getctime(os.path.join(CSV_DIR, x)))
|
||||
|
||||
# keep the 30 MOST RECENT FILES
|
||||
recent_csv_files = csv_files[-30:] if len(csv_files) > 30 else csv_files
|
||||
#recent_csv_files = csv_files[-30:] if len(csv_files) > 30 else csv_files
|
||||
recent_csv_files = csv_files[-600:] if len(csv_files) > 600 else csv_files
|
||||
|
||||
# get the name of the first csv file
|
||||
if not csv_files:
|
||||
|
@ -1089,6 +1090,7 @@ def create_csv_files(signals, statuses, node_numbers, alarms_number_list, warnin
|
|||
csv_filename = f"{timestamp}.csv"
|
||||
csv_path = os.path.join(CSV_DIR, csv_filename)
|
||||
# Append values to the CSV file
|
||||
if not os.path.exists(csv_path):
|
||||
with open(csv_path, 'a', newline='') as csvfile:
|
||||
csv_writer = csv.writer(csvfile, delimiter=';')
|
||||
# Add a special row for the nodes configuration
|
||||
|
|
|
@ -591,7 +591,8 @@ def create_batch_of_csv_files():
|
|||
csv_files.sort(key=lambda x: os.path.getctime(os.path.join(CSV_DIR, x)))
|
||||
|
||||
# keep the 30 MOST RECENT FILES
|
||||
recent_csv_files = csv_files[-30:] if len(csv_files) > 30 else csv_files
|
||||
#recent_csv_files = csv_files[-30:] if len(csv_files) > 30 else csv_files
|
||||
recent_csv_files = csv_files[-600:] if len(csv_files) > 600 else csv_files
|
||||
|
||||
# get the name of the first csv file
|
||||
if not csv_files:
|
||||
|
@ -691,6 +692,7 @@ def create_csv_files(signals, statuses, node_numbers, alarms_number_list, warnin
|
|||
csv_filename = "{}.csv".format(timestamp)
|
||||
csv_path = os.path.join(CSV_DIR, csv_filename)
|
||||
|
||||
if not os.path.exists(csv_path):
|
||||
with open(csv_path, 'ab') as csvfile:
|
||||
csv_writer = csv.writer(csvfile, delimiter=';')
|
||||
nodes_config_path = "/Config/Devices/BatteryNodes"
|
||||
|
|
Loading…
Reference in New Issue