diff --git a/csharp/App/Backend/Websockets/WebsockerManager.cs b/csharp/App/Backend/Websockets/WebsockerManager.cs index 40be153e9..321597c11 100644 --- a/csharp/App/Backend/Websockets/WebsockerManager.cs +++ b/csharp/App/Backend/Websockets/WebsockerManager.cs @@ -34,7 +34,7 @@ public static class WebsocketManager while (true){ lock (InstallationConnections){ foreach (var installationConnection in InstallationConnections){ - if (installationConnection.Value.Product==1 && (DateTime.Now - installationConnection.Value.Timestamp) > TimeSpan.FromMinutes(20)) + if (installationConnection.Value.Product==1 && (DateTime.Now - installationConnection.Value.Timestamp) > TimeSpan.FromMinutes(30)) { Console.WriteLine("Installation ID is "+installationConnection.Key); Console.WriteLine("installationConnection.Value.Timestamp is "+installationConnection.Value.Timestamp); diff --git a/firmware/Cerbo_Release/CerboReleaseFiles/dbus-fzsonick-48tl/config.py b/firmware/Cerbo_Release/CerboReleaseFiles/dbus-fzsonick-48tl/config.py index a4c57792b..19c62b221 100644 --- a/firmware/Cerbo_Release/CerboReleaseFiles/dbus-fzsonick-48tl/config.py +++ b/firmware/Cerbo_Release/CerboReleaseFiles/dbus-fzsonick-48tl/config.py @@ -13,9 +13,9 @@ DEVICE_INSTANCE = 1 SERVICE_NAME_PREFIX = 'com.victronenergy.battery.' #s3 configuration -S3BUCKET = "17-c0436b6a-d276-4cd8-9c44-1eae86cf5d0e" -S3KEY = "EXO067a6ceb91816b42312226a9" -S3SECRET = "79tQlqQuS6yhexP0IT-6-bIdIP2FGGTQLn2e2oE4FC8" +S3BUCKET = "195-c0436b6a-d276-4cd8-9c44-1eae86cf5d0e" +S3KEY = "EXO4657447df7b842d19b40af13" +S3SECRET = "1ZAZ-ftzKsfYEL7SxldkWKg-3Ik_yQ6vXffLMIz5ACU" # driver configuration diff --git a/firmware/Cerbo_Release/CerboReleaseFiles/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py b/firmware/Cerbo_Release/CerboReleaseFiles/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py index c25398a70..edf960398 100755 --- a/firmware/Cerbo_Release/CerboReleaseFiles/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py +++ b/firmware/Cerbo_Release/CerboReleaseFiles/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py @@ -951,15 +951,12 @@ def create_batch_of_csv_files(): os.rename(temp_file_path, first_csv_file) # create a loggin directory that contains at max 20 batch files for logging info - logging_dir = os.path.join(CSV_DIR, 'logging_batch_files') - if not os.path.exists(logging_dir): - os.makedirs(logging_dir) - - shutil.copy(first_csv_file, logging_dir) - manage_csv_files(logging_dir) - - # keep at most 1900 files at CSV_DIR for logging and aggregation - manage_csv_files(CSV_DIR, 1900) + # logging_dir = os.path.join(CSV_DIR, 'logging_batch_files') + # if not os.path.exists(logging_dir): + # os.makedirs(logging_dir) + # + # shutil.copy(first_csv_file, logging_dir) + # manage_csv_files(logging_dir) # prepare for compression csv_data = read_csv_as_string(first_csv_file) @@ -1026,6 +1023,9 @@ def create_update_task(modbus, dbus, batteries, signals, csv_signals, main_loop) ALLOW = True alive = update(modbus, batteries, dbus, signals, csv_signals) elapsed_time = time.time() - start_time + + # keep at most 1900 files at CSV_DIR for logging and aggregation + manage_csv_files(CSV_DIR, 1900) if elapsed_time >= 1200: create_batch_of_csv_files() start_time = time.time() diff --git a/firmware/Cerbo_Release/update_all_cerbo_installations.sh b/firmware/Cerbo_Release/update_all_cerbo_installations.sh index 386b8aa39..4ae84b187 100755 --- a/firmware/Cerbo_Release/update_all_cerbo_installations.sh +++ b/firmware/Cerbo_Release/update_all_cerbo_installations.sh @@ -60,37 +60,39 @@ echo -e "\n============================ Deploy ============================\n" # "10.2.5.5" \ # "10.2.4.206") -ip_addresses=( - - - "10.2.4.129" \ - "10.2.3.10" \ - "10.2.2.238" \ - "10.2.2.44" \ - "10.2.2.196" \ - "10.2.4.181" \ - "10.2.3.67" \ - "10.2.2.127" \ - "10.2.3.18" \ - "10.2.3.46" \ - "10.2.3.61" \ - "10.2.2.133" \ - "10.2.4.42" \ - "10.2.2.217" \ - "10.2.3.143" \ - "10.2.2.181" \ - "10.2.1.204" \ - "10.2.2.171" \ - "10.2.2.176" \ - "10.2.2.218" \ - "10.2.4.92" \ - "10.2.4.25" \ - "10.2.3.135" \ - "10.2.3.165" \ - "10.2.2.10" \ - "10.2.3.51" \ - "10.2.5.5" \ - "10.2.4.206") +ip_addresses=("10.2.2.36") + +#ip_addresses=( +# +# +# "10.2.4.129" \ +# "10.2.3.10" \ +# "10.2.2.238" \ +# "10.2.2.44" \ +# "10.2.2.196" \ +# "10.2.4.181" \ +# "10.2.3.67" \ +# "10.2.2.127" \ +# "10.2.3.18" \ +# "10.2.3.46" \ +# "10.2.3.61" \ +# "10.2.2.133" \ +# "10.2.4.42" \ +# "10.2.2.217" \ +# "10.2.3.143" \ +# "10.2.2.181" \ +# "10.2.1.204" \ +# "10.2.2.171" \ +# "10.2.2.176" \ +# "10.2.2.218" \ +# "10.2.4.92" \ +# "10.2.4.25" \ +# "10.2.3.135" \ +# "10.2.3.165" \ +# "10.2.2.10" \ +# "10.2.3.51" \ +# "10.2.5.5" \ +# "10.2.4.206") ## scp template # scp "${battery_file_path}/start.sh" "root@"$ip_address":/data/dbus-fzsonick-48tl/" diff --git a/firmware/Venus_Release/VenusReleaseFiles/dbus-fzsonick-48tl/config.py b/firmware/Venus_Release/VenusReleaseFiles/dbus-fzsonick-48tl/config.py index 94b0ae62f..0a83e6064 100755 --- a/firmware/Venus_Release/VenusReleaseFiles/dbus-fzsonick-48tl/config.py +++ b/firmware/Venus_Release/VenusReleaseFiles/dbus-fzsonick-48tl/config.py @@ -54,6 +54,6 @@ INNOVENERGY_PROTOCOL_VERSION = '48TL200V3' # S3 Credentials -S3BUCKET = "139-c0436b6a-d276-4cd8-9c44-1eae86cf5d0e" -S3KEY = "EXO62cdcf9a4da8ab9a260fe676" -S3SECRET = "JoH3mWeQq5FRQZUFnCkcBijT3KuJ4u1P53PO0bI6OOw" +S3BUCKET = "199-c0436b6a-d276-4cd8-9c44-1eae86cf5d0e" +S3KEY = "EXOe09da11251853e22b8b84bbb" +S3SECRET = "bFH9uwoiZjMx-ADsoAOaEADx4I65DRnXF1YtQ7rfzls" diff --git a/firmware/Venus_Release/VenusReleaseFiles/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py b/firmware/Venus_Release/VenusReleaseFiles/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py index cd95213ea..f08209798 100755 --- a/firmware/Venus_Release/VenusReleaseFiles/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py +++ b/firmware/Venus_Release/VenusReleaseFiles/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py @@ -550,6 +550,9 @@ def create_update_task(modbus, service, batteries): elapsed_time = time.time() - start_time create_csv_files(csv_signals, statuses, node_numbers, alarms_number_list, warnings_number_list) + # keep at most 1900 files at CSV_DIR for logging and aggregation + manage_csv_files(CSV_DIR, 1900) + num_files_in_csv_dir = count_files_in_folder(CSV_DIR) if elapsed_time >= 1200: print("CREATE BATCH ======================================>") @@ -569,6 +572,8 @@ def create_update_task(modbus, service, batteries): def manage_csv_files(directory_path, max_files=20): csv_files = [f for f in os.listdir(directory_path) if os.path.isfile(os.path.join(directory_path, f))] csv_files.sort(key=lambda x: os.path.getctime(os.path.join(directory_path, x))) + + print("len of csv files is "+str(len(csv_files))) # Remove oldest files if exceeds maximum while len(csv_files) > max_files: file_to_delete = os.path.join(directory_path, csv_files.pop(0)) @@ -625,15 +630,12 @@ def create_batch_of_csv_files(): os.rename(temp_file_path, first_csv_file) # create a loggin directory that contains at max 20 batch files for logging info - logging_dir = os.path.join(CSV_DIR, 'logging_batch_files') - if not os.path.exists(logging_dir): - os.makedirs(logging_dir) - - shutil.copy(first_csv_file, logging_dir) - manage_csv_files(logging_dir) - - # keep at most 1900 files at CSV_DIR for logging and aggregation - manage_csv_files(CSV_DIR, 1900) + # logging_dir = os.path.join(CSV_DIR, 'logging_batch_files') + # if not os.path.exists(logging_dir): + # os.makedirs(logging_dir) + # + # shutil.copy(first_csv_file, logging_dir) + # manage_csv_files(logging_dir) # print("The batch csv file is: {}".format(recent_csv_files[-1])) @@ -644,13 +646,14 @@ def create_batch_of_csv_files(): print("error while reading csv as string") return + # zip-comp additions compressed_csv = compress_csv_data(csv_data) # Use the name of the last (most recent) CSV file in sorted csv_files as the name for the compressed file last_csv_file_name = os.path.basename(recent_csv_files[-1]) if recent_csv_files else first_csv_filename # we send the csv files every 30 seconds and the timestamp is adjusted to be a multiple of 30 - numeric_part = int(last_csv_file_name.split('.')[0][:-2]) + numeric_part = int(last_csv_file_name.split('.')[0][:-2]) # compressed_filename = "{}.csv".format(new_numeric_part) compressed_filename = "{}.csv".format(numeric_part) @@ -659,7 +662,7 @@ def create_batch_of_csv_files(): response = s3_config.create_put_request(compressed_filename, compressed_csv) # response = s3_config.create_put_request(first_csv_filename, csv_data) - + print(response) if response.status_code == 200: os.remove(first_csv_file) @@ -672,19 +675,17 @@ def create_batch_of_csv_files(): "Warnings": [], "Alarms": [], } - + print(status_message) - + status_message = json.dumps(status_message) - print("AFTERRRRRRRRRRRRRRRRRRRRRR DUMPSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS\n") - try: channel.basic_publish(exchange="", routing_key="statusQueue", body=status_message) except: channel = SubscribeToQueue() channel.basic_publish(exchange="", routing_key="statusQueue", body=status_message) - + print("Successfully sent the heartbit with timestamp") else: # we save data that were not successfully uploaded in s3 in a failed directory inside the CSV_DIR for logging diff --git a/firmware/Venus_Release/update_all_venus_installations.sh b/firmware/Venus_Release/update_all_venus_installations.sh index 3f99f97db..1ac9c9faf 100755 --- a/firmware/Venus_Release/update_all_venus_installations.sh +++ b/firmware/Venus_Release/update_all_venus_installations.sh @@ -15,80 +15,135 @@ echo -e "\n============================ Deploy ============================\n" # Steiger, Rheinau 10.2.0.188 failed with ssh -#ip_addresses=("10.2.0.249") +#ip_addresses=("10.2.0.144") ip_addresses=( -"10.2.1.84" -"10.2.1.83" -"10.2.1.74" -"10.2.1.60" -"10.2.1.53" -"10.2.1.39" -"10.2.1.38" "10.2.1.35" -"10.2.1.33" -"10.2.1.32" -"10.2.1.247" -"10.2.1.219" -"10.2.1.2" -"10.2.1.177" -"10.2.1.173" -"10.2.1.171" +"10.2.1.159" +"10.2.0.227" +"10.2.0.211" +"10.2.1.134" +"10.2.0.130" "10.2.1.169" +"10.2.0.105" +"10.2.0.220" +"10.2.1.124" +"10.2.1.2" +"10.2.1.158" +"10.2.0.195" +"10.2.1.171" +"10.2.0.225" +"10.2.1.53" +"10.2.0.107" +"10.2.0.153" +"10.2.1.106" +"10.2.1.117" +"10.2.0.145" +"10.2.0.110" +"10.2.1.177" +"10.2.1.247" +"10.2.0.101" +"10.2.0.108" +"10.2.1.120" +"10.2.1.160" +"10.2.1.173" +"10.2.0.113" +"10.2.0.150" +"10.2.0.233" +"10.2.1.162" +"10.2.1.138" +"10.2.1.100" "10.2.1.165" "10.2.1.163" -"10.2.1.162" -"10.2.1.160" -"10.2.1.159" -"10.2.1.158" -"10.2.1.142" -"10.2.1.141" -"10.2.1.138" -"10.2.1.134" -"10.2.1.130" -"10.2.1.125" +"10.2.1.32" +"10.2.1.110" +"10.2.0.103" +"10.2.1.113" +"10.2.0.216" +"10.2.0.184" +"10.2.0.191" +"10.2.1.60" +"10.2.1.219" +"10.2.0.214" +"10.2.1.83" +"10.2.0.217" +"10.2.1.39" +"10.2.0.194" "10.2.1.81" - "10.2.1.124" - "10.2.1.120" - "10.2.1.118" - "10.2.1.117" - "10.2.1.113" - "10.2.1.110" - "10.2.1.106" - "10.2.1.100" - "10.2.0.98" - "10.2.0.249" - "10.2.0.233" - "10.2.0.227" - "10.2.0.225" - "10.2.0.220" - "10.2.0.217" - "10.2.0.216" - "10.2.0.214" - "10.2.0.211" - "10.2.0.196" - "10.2.0.195" - "10.2.0.194" - "10.2.0.193" - "10.2.0.191" - "10.2.0.184" - "10.2.0.154" - "10.2.0.153" - "10.2.0.150" - "10.2.0.145" - "10.2.0.144" - "10.2.0.138" - "10.2.0.133" - "10.2.0.130" - "10.2.0.113" - "10.2.0.110" - "10.2.0.109" - "10.2.0.108" - "10.2.0.107" - "10.2.0.105" - "10.2.0.103" - "10.2.0.101" - ) +"10.2.1.125" +"10.2.0.154" +"10.2.0.196" +"10.2.1.84" +"10.2.1.130" +"10.2.1.38" +"10.2.1.33" +"10.2.0.249" +"10.2.0.133" +"10.2.1.118" +"10.2.0.138" +"10.2.0.144" +"10.2.0.188" +"10.2.1.74" +"10.2.1.141" +"10.2.1.142" +"10.2.0.193" +"10.2.1.145" +"10.2.1.15" +"10.2.1.70" +"10.2.0.135" +"10.2.0.247" +"10.2.0.134" +"10.2.1.21" +"10.2.1.73" +"10.2.0.243" +"10.2.1.19" +"10.2.1.166" +"10.2.0.192" +"10.2.1.12" +"10.2.2.188" +"10.2.0.158" +"10.2.1.146" +"10.2.1.27" +"10.2.0.202" +"10.2.0.157" +"10.2.1.55" +"10.2.1.16" +"10.2.1.28" +"10.2.0.254" +"10.2.1.128" +"10.2.1.58" +"10.2.0.187" +"10.2.1.156" +"10.2.1.137" +"10.2.1.135" +"10.2.1.24" +"10.2.1.109" +"10.2.1.90" +"10.2.1.153" +"10.2.1.111" +"10.2.1.71" +"10.2.1.37" +"10.2.0.127" +"10.2.0.126" +"10.2.1.22" +"10.2.1.91" +"10.2.1.123" +"10.2.1.220" +"10.2.1.114" +"10.2.1.41" +"10.2.0.125" +"10.2.1.62" +"10.2.0.161" +"10.2.1.121" +"10.2.1.10" +"10.2.0.112" +"10.2.0.111" +"10.2.0.218" +"10.2.1.49" +"10.2.0.230" +"10.2.1.170" +"10.2.0.114" +) ## scp template diff --git a/typescript/frontend-marios2/src/App.tsx b/typescript/frontend-marios2/src/App.tsx index 6790e1cf3..3ac216888 100644 --- a/typescript/frontend-marios2/src/App.tsx +++ b/typescript/frontend-marios2/src/App.tsx @@ -20,6 +20,7 @@ import InstallationsContextProvider from './contexts/InstallationsContextProvide import AccessContextProvider from './contexts/AccessContextProvider'; import WebSocketContextProvider from './contexts/WebSocketContextProvider'; import SalidomoInstallationTabs from './content/dashboards/SalidomoInstallations'; +import { ProductIdContext } from './contexts/ProductIdContextProvider'; function App() { const context = useContext(UserContext); @@ -29,6 +30,8 @@ function App() { const navigate = useNavigate(); const searchParams = new URLSearchParams(location.search); const username = searchParams.get('username'); + const { setAccessToSalimax, setAccessToSalidomo } = + useContext(ProductIdContext); const [language, setLanguage] = useState('en'); const getTranslations = () => { @@ -67,7 +70,13 @@ function App() { if (response.data && response.data.token) { setNewToken(response.data.token); setUser(response.data.user); - navigate(routes.installations); + setAccessToSalimax(response.data.accessToSalimax); + setAccessToSalidomo(response.data.accessToSalidomo); + if (response.data.accessToSalimax) { + navigate(routes.installations); + } else { + navigate(routes.salidomo_installations); + } } }) .catch(() => {}); diff --git a/typescript/frontend-marios2/src/components/SetNewPassword.tsx b/typescript/frontend-marios2/src/components/SetNewPassword.tsx index ab51e060a..c9cc87eb1 100644 --- a/typescript/frontend-marios2/src/components/SetNewPassword.tsx +++ b/typescript/frontend-marios2/src/components/SetNewPassword.tsx @@ -55,6 +55,7 @@ function SetNewPassword() { .then((res) => { setLoading(false); currentUser.mustResetPassword = false; + setUser(currentUser); window.location.reload(); })