Compare commits
2 Commits
10e2d341d9
...
e39ef59346
Author | SHA1 | Date |
---|---|---|
Noe | e39ef59346 | |
Noe | 29a0071881 |
|
@ -179,16 +179,22 @@ public class Controller : ControllerBase
|
||||||
return str1.Substring(0, i);
|
return str1.Substring(0, i);
|
||||||
}
|
}
|
||||||
|
|
||||||
string commonPrefix = FindCommonPrefix(start.ToString(), end.ToString());
|
|
||||||
|
|
||||||
Int64 startTimestamp = Int64.Parse(start.ToString().Substring(0,5));
|
Int64 startTimestamp = Int64.Parse(start.ToString().Substring(0,5));
|
||||||
Int64 endTimestamp = Int64.Parse(end.ToString().Substring(0,5));
|
Int64 endTimestamp = Int64.Parse(end.ToString().Substring(0,5));
|
||||||
|
|
||||||
|
if (installation.Product == 1)
|
||||||
|
{
|
||||||
|
|
||||||
|
start = Int32.Parse(start.ToString().Substring(0, start.ToString().Length - 2));
|
||||||
|
end = Int32.Parse(end.ToString().Substring(0, end.ToString().Length - 2));
|
||||||
|
}
|
||||||
|
|
||||||
string configPath = "/home/ubuntu/.s3cfg";
|
string configPath = "/home/ubuntu/.s3cfg";
|
||||||
|
|
||||||
while (startTimestamp <= endTimestamp)
|
while (startTimestamp <= endTimestamp)
|
||||||
{
|
{
|
||||||
string bucketPath = "s3://"+installation.S3BucketId + "-3e5b3069-214a-43ee-8d85-57d72000c19d/"+startTimestamp;
|
string bucketPath = installation.Product==0? "s3://"+installation.S3BucketId + "-3e5b3069-214a-43ee-8d85-57d72000c19d/"+startTimestamp :
|
||||||
|
"s3://"+installation.S3BucketId + "-c0436b6a-d276-4cd8-9c44-1eae86cf5d0e/"+startTimestamp;
|
||||||
Console.WriteLine("Fetching data for "+startTimestamp);
|
Console.WriteLine("Fetching data for "+startTimestamp);
|
||||||
|
|
||||||
try
|
try
|
||||||
|
@ -234,6 +240,7 @@ public class Controller : ControllerBase
|
||||||
foreach (var line in output.Split('\n'))
|
foreach (var line in output.Split('\n'))
|
||||||
{
|
{
|
||||||
var match = regex.Match(line);
|
var match = regex.Match(line);
|
||||||
|
|
||||||
if (match.Success && long.Parse(match.Groups[1].Value) >= start && long.Parse(match.Groups[1].Value) <= end)
|
if (match.Success && long.Parse(match.Groups[1].Value) >= start && long.Parse(match.Groups[1].Value) <= end)
|
||||||
{
|
{
|
||||||
allTimestamps.Add(long.Parse(match.Groups[1].Value));
|
allTimestamps.Add(long.Parse(match.Groups[1].Value));
|
||||||
|
|
|
@ -58,15 +58,24 @@ public static class RabbitMqManager
|
||||||
if (receivedStatusMessage != null)
|
if (receivedStatusMessage != null)
|
||||||
{
|
{
|
||||||
Installation installation = Db.Installations.FirstOrDefault(f => f.Product == receivedStatusMessage.Product && f.S3BucketId == receivedStatusMessage.InstallationId);
|
Installation installation = Db.Installations.FirstOrDefault(f => f.Product == receivedStatusMessage.Product && f.S3BucketId == receivedStatusMessage.InstallationId);
|
||||||
int installationId = (int )installation.Id;
|
int installationId = (int)installation.Id;
|
||||||
//Console.WriteLine("Received a message from installation: " + installationId + " , product is: "+receivedStatusMessage.Product+ " and status is: " + receivedStatusMessage.Status);
|
|
||||||
|
//if (installationId == 138)
|
||||||
|
//{
|
||||||
|
// Console.WriteLine("Received a message from installation: " + installationId + " , product is: " + receivedStatusMessage.Product + " and status is: " + receivedStatusMessage.Status);
|
||||||
|
//}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
//This is a heartbit message, just update the timestamp for this installation.
|
//This is a heartbit message, just update the timestamp for this installation.
|
||||||
//There is no need to notify the corresponding front-ends.
|
//There is no need to notify the corresponding front-ends.
|
||||||
//Every 15 iterations(30 seconds), the installation sends a heartbit message to the queue
|
//Every 15 iterations(30 seconds), the installation sends a heartbit message to the queue
|
||||||
if (receivedStatusMessage.Type == MessageType.Heartbit)
|
if (receivedStatusMessage.Type == MessageType.Heartbit)
|
||||||
{
|
{
|
||||||
//Console.WriteLine("This is a heartbit message from installation: " + installationId + " Name of the file is "+ receivedStatusMessage.Timestamp);
|
if (installation.Product == 1 && installation.Device == 2)
|
||||||
|
{
|
||||||
|
Console.WriteLine("This is a heartbit message from installation: " + installationId + " Name of the file is " + receivedStatusMessage.Timestamp);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
|
|
@ -34,13 +34,18 @@ public static class WebsocketManager
|
||||||
while (true){
|
while (true){
|
||||||
lock (InstallationConnections){
|
lock (InstallationConnections){
|
||||||
foreach (var installationConnection in InstallationConnections){
|
foreach (var installationConnection in InstallationConnections){
|
||||||
if (installationConnection.Value.Product==1 && (DateTime.Now - installationConnection.Value.Timestamp) > TimeSpan.FromMinutes(20)){
|
if (installationConnection.Value.Product==1 && (DateTime.Now - installationConnection.Value.Timestamp) > TimeSpan.FromMinutes(20))
|
||||||
|
{
|
||||||
|
Console.WriteLine("Installation ID is "+installationConnection.Key);
|
||||||
|
Console.WriteLine("installationConnection.Value.Timestamp is "+installationConnection.Value.Timestamp);
|
||||||
|
Console.WriteLine("diff is "+(DateTime.Now-installationConnection.Value.Timestamp));
|
||||||
|
|
||||||
installationConnection.Value.Status = -1;
|
installationConnection.Value.Status = -1;
|
||||||
if (installationConnection.Value.Connections.Count > 0){InformWebsocketsForInstallation(installationConnection.Key);}
|
if (installationConnection.Value.Connections.Count > 0){InformWebsocketsForInstallation(installationConnection.Key);}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await Task.Delay(TimeSpan.FromMinutes(30));
|
await Task.Delay(TimeSpan.FromMinutes(1));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,9 +13,9 @@ DEVICE_INSTANCE = 1
|
||||||
SERVICE_NAME_PREFIX = 'com.victronenergy.battery.'
|
SERVICE_NAME_PREFIX = 'com.victronenergy.battery.'
|
||||||
|
|
||||||
#s3 configuration
|
#s3 configuration
|
||||||
S3BUCKET = "2-c0436b6a-d276-4cd8-9c44-1eae86cf5d0e"
|
S3BUCKET = "17-c0436b6a-d276-4cd8-9c44-1eae86cf5d0e"
|
||||||
S3KEY = "EXO5b2e35442791260eaaa7bdc8"
|
S3KEY = "EXO067a6ceb91816b42312226a9"
|
||||||
S3SECRET = "XFFOVzenDiEQoLPmhK6ML9RfQfsAMhrAs25MfJxi-24"
|
S3SECRET = "79tQlqQuS6yhexP0IT-6-bIdIP2FGGTQLn2e2oE4FC8"
|
||||||
|
|
||||||
# driver configuration
|
# driver configuration
|
||||||
|
|
||||||
|
|
|
@ -903,7 +903,7 @@ def count_files_in_folder(folder_path):
|
||||||
|
|
||||||
def create_batch_of_csv_files():
|
def create_batch_of_csv_files():
|
||||||
|
|
||||||
global prev_status
|
global prev_status,INSTALLATION_ID, PRODUCT_ID
|
||||||
# list all files in the directory
|
# list all files in the directory
|
||||||
files = os.listdir(CSV_DIR)
|
files = os.listdir(CSV_DIR)
|
||||||
|
|
||||||
|
@ -973,7 +973,7 @@ def create_batch_of_csv_files():
|
||||||
# Use the name of the last (most recent) CSV file in sorted csv_files as the name for the compressed file
|
# Use the name of the last (most recent) CSV file in sorted csv_files as the name for the compressed file
|
||||||
last_csv_file_name = os.path.basename(recent_csv_files[-1]) if recent_csv_files else first_csv_filename
|
last_csv_file_name = os.path.basename(recent_csv_files[-1]) if recent_csv_files else first_csv_filename
|
||||||
|
|
||||||
numeric_part = int(last_csv_file_name.split('.')[0])
|
numeric_part = int(last_csv_file_name.split('.')[0][:-2])
|
||||||
compressed_filename = "{}.csv".format(numeric_part)
|
compressed_filename = "{}.csv".format(numeric_part)
|
||||||
|
|
||||||
response = s3_config.create_put_request(compressed_filename, compressed_csv)
|
response = s3_config.create_put_request(compressed_filename, compressed_csv)
|
||||||
|
@ -987,10 +987,13 @@ def create_batch_of_csv_files():
|
||||||
"Type": 1,
|
"Type": 1,
|
||||||
"Warnings": [],
|
"Warnings": [],
|
||||||
"Alarms": [],
|
"Alarms": [],
|
||||||
"Timestamp": numeric_part
|
|
||||||
}
|
}
|
||||||
status_message = json.dumps(status_message)
|
status_message = json.dumps(status_message)
|
||||||
channel.basic_publish(exchange="", routing_key="statusQueue", body=status_message)
|
try:
|
||||||
|
channel.basic_publish(exchange="", routing_key="statusQueue", body=status_message)
|
||||||
|
except:
|
||||||
|
channel = SubscribeToQueue()
|
||||||
|
channel.basic_publish(exchange="", routing_key="statusQueue", body=status_message)
|
||||||
print("Successfully sent the heartbit with timestamp")
|
print("Successfully sent the heartbit with timestamp")
|
||||||
else:
|
else:
|
||||||
# we save data that were not successfully uploaded in s3 in a failed directory inside the CSV_DIR for logging
|
# we save data that were not successfully uploaded in s3 in a failed directory inside the CSV_DIR for logging
|
||||||
|
|
|
@ -10,11 +10,87 @@ cerbo_release_file_path="./CerboReleaseFiles"
|
||||||
echo -e "\n============================ Deploy ============================\n"
|
echo -e "\n============================ Deploy ============================\n"
|
||||||
# Polycom 10.2.4.96 are Cerbo with fork usb0
|
# Polycom 10.2.4.96 are Cerbo with fork usb0
|
||||||
|
|
||||||
ip_addresses=("10.2.1.193" "10.2.2.61" "10.2.3.225" "10.2.2.248" "10.2.2.194" "10.2.1.241" "10.2.2.87" "10.2.1.252" "10.2.1.215" "10.2.1.234" \
|
|
||||||
"10.2.2.117" "10.2.3.117" "10.2.4.129" "10.2.3.10" "10.2.2.238" "10.2.2.44" "10.2.2.196" "10.2.2.24" "10.2.4.181" "10.2.3.67" \
|
#ip_addresses=("10.2.0.179" \
|
||||||
"10.2.2.127" "10.2.3.18" "10.2.2.118" "10.2.3.46" "10.2.3.61" "10.2.2.133" "10.2.4.42" "10.2.2.217" "10.2.3.143" "10.2.2.181" "10.2.1.204" "10.2.2.171" \
|
# "10.2.1.241" \
|
||||||
"10.2.2.176" "10.2.2.218" "10.2.2.36" "10.2.4.92" "10.2.4.25" "10.2.3.135" "10.2.3.165" "10.2.3.51" "10.2.3.244" "10.2.0.179" "10.2.2.10" \
|
# "10.2.2.118" \
|
||||||
"10.2.4.155" "10.2.4.127" "10.2.5.5" "10.2.4.206")
|
# "10.2.4.155" \
|
||||||
|
# "10.2.3.244" \
|
||||||
|
# "10.2.4.127" \
|
||||||
|
# "10.2.2.36" \
|
||||||
|
# "10.2.4.96" \
|
||||||
|
# "10.2.1.193" \
|
||||||
|
# "10.2.2.61" \
|
||||||
|
# "10.2.3.225" \
|
||||||
|
# "10.2.2.248" \
|
||||||
|
# "10.2.2.194" \
|
||||||
|
# "10.2.3.117" \
|
||||||
|
# "10.2.2.24" \
|
||||||
|
# "10.2.2.87" \
|
||||||
|
# "10.2.1.252" \
|
||||||
|
# "10.2.1.215" \
|
||||||
|
# "10.2.1.234" \
|
||||||
|
# "10.2.2.117" \
|
||||||
|
# "10.2.4.129" \
|
||||||
|
# "10.2.3.10" \
|
||||||
|
# "10.2.2.238" \
|
||||||
|
# "10.2.2.44" \
|
||||||
|
# "10.2.2.196" \
|
||||||
|
# "10.2.4.181" \
|
||||||
|
# "10.2.3.67" \
|
||||||
|
# "10.2.2.127" \
|
||||||
|
# "10.2.3.18" \
|
||||||
|
# "10.2.3.46" \
|
||||||
|
# "10.2.3.61" \
|
||||||
|
# "10.2.2.133" \
|
||||||
|
# "10.2.4.42" \
|
||||||
|
# "10.2.2.217" \
|
||||||
|
# "10.2.3.143" \
|
||||||
|
# "10.2.2.181" \
|
||||||
|
# "10.2.1.204" \
|
||||||
|
# "10.2.2.171" \
|
||||||
|
# "10.2.2.176" \
|
||||||
|
# "10.2.2.218" \
|
||||||
|
# "10.2.4.92" \
|
||||||
|
# "10.2.4.25" \
|
||||||
|
# "10.2.3.135" \
|
||||||
|
# "10.2.3.165" \
|
||||||
|
# "10.2.2.10" \
|
||||||
|
# "10.2.3.51" \
|
||||||
|
# "10.2.5.5" \
|
||||||
|
# "10.2.4.206")
|
||||||
|
|
||||||
|
ip_addresses=(
|
||||||
|
|
||||||
|
|
||||||
|
"10.2.4.129" \
|
||||||
|
"10.2.3.10" \
|
||||||
|
"10.2.2.238" \
|
||||||
|
"10.2.2.44" \
|
||||||
|
"10.2.2.196" \
|
||||||
|
"10.2.4.181" \
|
||||||
|
"10.2.3.67" \
|
||||||
|
"10.2.2.127" \
|
||||||
|
"10.2.3.18" \
|
||||||
|
"10.2.3.46" \
|
||||||
|
"10.2.3.61" \
|
||||||
|
"10.2.2.133" \
|
||||||
|
"10.2.4.42" \
|
||||||
|
"10.2.2.217" \
|
||||||
|
"10.2.3.143" \
|
||||||
|
"10.2.2.181" \
|
||||||
|
"10.2.1.204" \
|
||||||
|
"10.2.2.171" \
|
||||||
|
"10.2.2.176" \
|
||||||
|
"10.2.2.218" \
|
||||||
|
"10.2.4.92" \
|
||||||
|
"10.2.4.25" \
|
||||||
|
"10.2.3.135" \
|
||||||
|
"10.2.3.165" \
|
||||||
|
"10.2.2.10" \
|
||||||
|
"10.2.3.51" \
|
||||||
|
"10.2.5.5" \
|
||||||
|
"10.2.4.206")
|
||||||
|
|
||||||
## scp template
|
## scp template
|
||||||
# scp "${battery_file_path}/start.sh" "root@"$ip_address":/data/dbus-fzsonick-48tl/"
|
# scp "${battery_file_path}/start.sh" "root@"$ip_address":/data/dbus-fzsonick-48tl/"
|
||||||
|
@ -23,12 +99,14 @@ ip_addresses=("10.2.1.193" "10.2.2.61" "10.2.3.225" "10.2.2.248" "10.2.2.194" "1
|
||||||
|
|
||||||
for ip_address in "${ip_addresses[@]}"; do
|
for ip_address in "${ip_addresses[@]}"; do
|
||||||
## stop battery service if change any file related to battery service otherwise no need to do this
|
## stop battery service if change any file related to battery service otherwise no need to do this
|
||||||
# ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -d /service/dbus-fzsonick-48tl.*"
|
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -d /service/dbus-fzsonick-48tl.*"
|
||||||
scp "${cerbo_release_file_path}/flows.json" "root@"$ip_address":/data/home/nodered/.node-red/"
|
scp "${cerbo_release_file_path}/flows.json" "root@"$ip_address":/data/home/nodered/.node-red/"
|
||||||
|
scp "${cerbo_release_file_path}/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py" "root@"$ip_address":/opt/victronenergy/dbus-fzsonick-48tl"
|
||||||
|
scp "${cerbo_release_file_path}/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py" "root@"$ip_address":/data/dbus-fzsonick-48tl"
|
||||||
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "dbus -y com.victronenergy.platform /Services/NodeRed/Mode SetValue %0"
|
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "dbus -y com.victronenergy.platform /Services/NodeRed/Mode SetValue %0"
|
||||||
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "dbus -y com.victronenergy.platform /Services/NodeRed/Mode SetValue %1"
|
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "dbus -y com.victronenergy.platform /Services/NodeRed/Mode SetValue %1"
|
||||||
## start battery service
|
## start battery service
|
||||||
# ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -u /service/dbus-fzsonick-48tl.*"
|
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -u /service/dbus-fzsonick-48tl.*"
|
||||||
|
|
||||||
echo "Deployed and ran commands on $ip_address"
|
echo "Deployed and ran commands on $ip_address"
|
||||||
done
|
done
|
||||||
|
|
|
@ -54,6 +54,6 @@ INNOVENERGY_PROTOCOL_VERSION = '48TL200V3'
|
||||||
|
|
||||||
|
|
||||||
# S3 Credentials
|
# S3 Credentials
|
||||||
S3BUCKET = "91-c0436b6a-d276-4cd8-9c44-1eae86cf5d0e"
|
S3BUCKET = "139-c0436b6a-d276-4cd8-9c44-1eae86cf5d0e"
|
||||||
S3KEY = "EXOe6dce12288f11a676c2025a1"
|
S3KEY = "EXO62cdcf9a4da8ab9a260fe676"
|
||||||
S3SECRET = "xpqM4Eh0Gg1HaYVkzlR9X6PwYa-QNb-mVk0XUkwW3cc"
|
S3SECRET = "JoH3mWeQq5FRQZUFnCkcBijT3KuJ4u1P53PO0bI6OOw"
|
||||||
|
|
|
@ -16,6 +16,7 @@ from pymodbus.other_message import ReportSlaveIdRequest
|
||||||
from pymodbus.pdu import ExceptionResponse
|
from pymodbus.pdu import ExceptionResponse
|
||||||
from pymodbus.register_read_message import ReadInputRegistersResponse
|
from pymodbus.register_read_message import ReadInputRegistersResponse
|
||||||
from data import BatteryStatus, BatterySignal, Battery, ServiceSignal
|
from data import BatteryStatus, BatterySignal, Battery, ServiceSignal
|
||||||
|
from data import BatteryStatus, BatterySignal, Battery, ServiceSignal
|
||||||
from python_libs.ie_dbus.dbus_service import DBusService
|
from python_libs.ie_dbus.dbus_service import DBusService
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
@ -579,7 +580,7 @@ def insert_id(path, id_number):
|
||||||
return "/".join(parts)
|
return "/".join(parts)
|
||||||
|
|
||||||
def create_batch_of_csv_files():
|
def create_batch_of_csv_files():
|
||||||
global prev_status
|
global prev_status,channel,INSTALLATION_ID, PRODUCT_ID
|
||||||
# list all files in the directory
|
# list all files in the directory
|
||||||
files = os.listdir(CSV_DIR)
|
files = os.listdir(CSV_DIR)
|
||||||
|
|
||||||
|
@ -649,11 +650,17 @@ def create_batch_of_csv_files():
|
||||||
last_csv_file_name = os.path.basename(recent_csv_files[-1]) if recent_csv_files else first_csv_filename
|
last_csv_file_name = os.path.basename(recent_csv_files[-1]) if recent_csv_files else first_csv_filename
|
||||||
|
|
||||||
# we send the csv files every 30 seconds and the timestamp is adjusted to be a multiple of 30
|
# we send the csv files every 30 seconds and the timestamp is adjusted to be a multiple of 30
|
||||||
numeric_part = int(last_csv_file_name.split('.')[0][:-2])
|
numeric_part = int(last_csv_file_name.split('.')[0][:-2])
|
||||||
|
|
||||||
|
# compressed_filename = "{}.csv".format(new_numeric_part)
|
||||||
compressed_filename = "{}.csv".format(numeric_part)
|
compressed_filename = "{}.csv".format(numeric_part)
|
||||||
|
|
||||||
|
print("FILE NAME =========================================================> ", compressed_filename)
|
||||||
|
|
||||||
response = s3_config.create_put_request(compressed_filename, compressed_csv)
|
response = s3_config.create_put_request(compressed_filename, compressed_csv)
|
||||||
# response = s3_config.create_put_request(first_csv_filename, csv_data)
|
# response = s3_config.create_put_request(first_csv_filename, csv_data)
|
||||||
|
|
||||||
|
print(response)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
os.remove(first_csv_file)
|
os.remove(first_csv_file)
|
||||||
print("Successfully uploaded the compresseed batch of files in s3")
|
print("Successfully uploaded the compresseed batch of files in s3")
|
||||||
|
@ -664,10 +671,20 @@ def create_batch_of_csv_files():
|
||||||
"Type": 1,
|
"Type": 1,
|
||||||
"Warnings": [],
|
"Warnings": [],
|
||||||
"Alarms": [],
|
"Alarms": [],
|
||||||
"Timestamp": numeric_part
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
print(status_message)
|
||||||
|
|
||||||
status_message = json.dumps(status_message)
|
status_message = json.dumps(status_message)
|
||||||
channel.basic_publish(exchange="", routing_key="statusQueue", body=status_message)
|
print("AFTERRRRRRRRRRRRRRRRRRRRRR DUMPSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS\n")
|
||||||
|
|
||||||
|
try:
|
||||||
|
channel.basic_publish(exchange="", routing_key="statusQueue", body=status_message)
|
||||||
|
except:
|
||||||
|
channel = SubscribeToQueue()
|
||||||
|
channel.basic_publish(exchange="", routing_key="statusQueue", body=status_message)
|
||||||
|
|
||||||
|
|
||||||
print("Successfully sent the heartbit with timestamp")
|
print("Successfully sent the heartbit with timestamp")
|
||||||
else:
|
else:
|
||||||
# we save data that were not successfully uploaded in s3 in a failed directory inside the CSV_DIR for logging
|
# we save data that were not successfully uploaded in s3 in a failed directory inside the CSV_DIR for logging
|
||||||
|
@ -735,12 +752,12 @@ def load_battery_counts():
|
||||||
return [int(row[0]) for row in reader]
|
return [int(row[0]) for row in reader]
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
def save_battery_counts(battery_counts):
|
def save_battery_counts(battery_counts):
|
||||||
with open(BATTERY_COUNTS_FILE, 'w', newline='') as f:
|
with open(BATTERY_COUNTS_FILE, 'wb') as f: # Use 'wb' mode for Python 2
|
||||||
writer = csv.writer(f)
|
writer = csv.writer(f)
|
||||||
for count in battery_counts:
|
for count in battery_counts:
|
||||||
writer.writerow([count])
|
writer.writerow([count])
|
||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
# type: (List[str]) -> ()
|
# type: (List[str]) -> ()
|
||||||
print("INSIDE DBUS SONICK")
|
print("INSIDE DBUS SONICK")
|
||||||
|
|
|
@ -14,12 +14,82 @@ echo -e "\n============================ Deploy ============================\n"
|
||||||
# Buecheler 10.2.0.224 failed with ssh
|
# Buecheler 10.2.0.224 failed with ssh
|
||||||
# Steiger, Rheinau 10.2.0.188 failed with ssh
|
# Steiger, Rheinau 10.2.0.188 failed with ssh
|
||||||
|
|
||||||
ip_addresses=("10.2.0.104" "10.2.1.134" "10.2.0.130" "10.2.1.169" "10.2.1.159" "10.2.0.227" "10.2.0.211" "10.2.0.105" "10.2.0.225" "10.2.1.53" "10.2.0.107" \
|
|
||||||
"10.2.0.108" "10.2.1.160" "10.2.0.98" "10.2.0.94" "10.2.1.110" "10.2.0.220" "10.2.0.153" "10.2.1.106" "10.2.0.184" "10.2.1.60" "10.2.0.217" "10.2.1.81" "10.2.1.125" \
|
#ip_addresses=("10.2.0.249")
|
||||||
"10.2.0.154" "10.2.0.196" "10.2.1.84" "10.2.0.99" "10.2.0.109" "10.2.1.35" "10.2.0.110" "10.2.1.2" "10.2.1.163" "10.2.1.120" "10.2.1.173" "10.2.0.113" "10.2.0.233" \
|
|
||||||
"10.2.1.124" "10.2.0.193" "10.2.1.142" "10.2.1.141" "10.2.1.74" "10.2.0.141" "10.2.0.144" "10.2.0.138" "10.2.1.118" "10.2.0.133" "10.2.0.249" "10.2.1.33" "10.2.1.38" \
|
ip_addresses=(
|
||||||
"10.2.0.185" "10.2.1.130" "10.2.0.194" "10.2.1.39" "10.2.1.83" "10.2.0.214" "10.2.1.219" "10.2.0.191" "10.2.0.216" "10.2.1.113" "10.2.0.103" "10.2.1.32" "10.2.1.100" \
|
"10.2.1.84"
|
||||||
"10.2.1.138" "10.2.1.165" "10.2.1.162" "10.2.0.150" "10.2.1.158" "10.2.0.195" "10.2.1.171" "10.2.1.117" "10.2.0.145" "10.2.1.177" "10.2.1.247" "10.2.0.101")
|
"10.2.1.83"
|
||||||
|
"10.2.1.74"
|
||||||
|
"10.2.1.60"
|
||||||
|
"10.2.1.53"
|
||||||
|
"10.2.1.39"
|
||||||
|
"10.2.1.38"
|
||||||
|
"10.2.1.35"
|
||||||
|
"10.2.1.33"
|
||||||
|
"10.2.1.32"
|
||||||
|
"10.2.1.247"
|
||||||
|
"10.2.1.219"
|
||||||
|
"10.2.1.2"
|
||||||
|
"10.2.1.177"
|
||||||
|
"10.2.1.173"
|
||||||
|
"10.2.1.171"
|
||||||
|
"10.2.1.169"
|
||||||
|
"10.2.1.165"
|
||||||
|
"10.2.1.163"
|
||||||
|
"10.2.1.162"
|
||||||
|
"10.2.1.160"
|
||||||
|
"10.2.1.159"
|
||||||
|
"10.2.1.158"
|
||||||
|
"10.2.1.142"
|
||||||
|
"10.2.1.141"
|
||||||
|
"10.2.1.138"
|
||||||
|
"10.2.1.134"
|
||||||
|
"10.2.1.130"
|
||||||
|
"10.2.1.125"
|
||||||
|
"10.2.1.81"
|
||||||
|
"10.2.1.124"
|
||||||
|
"10.2.1.120"
|
||||||
|
"10.2.1.118"
|
||||||
|
"10.2.1.117"
|
||||||
|
"10.2.1.113"
|
||||||
|
"10.2.1.110"
|
||||||
|
"10.2.1.106"
|
||||||
|
"10.2.1.100"
|
||||||
|
"10.2.0.98"
|
||||||
|
"10.2.0.249"
|
||||||
|
"10.2.0.233"
|
||||||
|
"10.2.0.227"
|
||||||
|
"10.2.0.225"
|
||||||
|
"10.2.0.220"
|
||||||
|
"10.2.0.217"
|
||||||
|
"10.2.0.216"
|
||||||
|
"10.2.0.214"
|
||||||
|
"10.2.0.211"
|
||||||
|
"10.2.0.196"
|
||||||
|
"10.2.0.195"
|
||||||
|
"10.2.0.194"
|
||||||
|
"10.2.0.193"
|
||||||
|
"10.2.0.191"
|
||||||
|
"10.2.0.184"
|
||||||
|
"10.2.0.154"
|
||||||
|
"10.2.0.153"
|
||||||
|
"10.2.0.150"
|
||||||
|
"10.2.0.145"
|
||||||
|
"10.2.0.144"
|
||||||
|
"10.2.0.138"
|
||||||
|
"10.2.0.133"
|
||||||
|
"10.2.0.130"
|
||||||
|
"10.2.0.113"
|
||||||
|
"10.2.0.110"
|
||||||
|
"10.2.0.109"
|
||||||
|
"10.2.0.108"
|
||||||
|
"10.2.0.107"
|
||||||
|
"10.2.0.105"
|
||||||
|
"10.2.0.103"
|
||||||
|
"10.2.0.101"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
## scp template
|
## scp template
|
||||||
# scp "${battery_file_path}/start.sh" "root@"$ip_address":/data/dbus-fzsonick-48tl/"
|
# scp "${battery_file_path}/start.sh" "root@"$ip_address":/data/dbus-fzsonick-48tl/"
|
||||||
|
@ -28,13 +98,17 @@ ip_addresses=("10.2.0.104" "10.2.1.134" "10.2.0.130" "10.2.1.169" "10.2.1.159" "
|
||||||
|
|
||||||
for ip_address in "${ip_addresses[@]}"; do
|
for ip_address in "${ip_addresses[@]}"; do
|
||||||
## stop battery service if change any file related to battery service otherwise no need to do this
|
## stop battery service if change any file related to battery service otherwise no need to do this
|
||||||
# ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -d /service/dbus-fzsonick-48tl.*"
|
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -d /service/dbus-fzsonick-48tl.*"
|
||||||
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -d /service/controller"
|
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -d /service/controller"
|
||||||
scp "${venus_release_file_path}/controller.py" "root@"$ip_address":/data"
|
scp "${venus_release_file_path}/controller.py" "root@"$ip_address":/data"
|
||||||
|
scp "${venus_release_file_path}/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py" "root@"$ip_address":/opt/innovenergy/dbus-fzsonick-48tl"
|
||||||
|
scp "${venus_release_file_path}/dbus-fzsonick-48tl/dbus-fzsonick-48tl.py" "root@"$ip_address":/data/dbus-fzsonick-48tl"
|
||||||
scp "${venus_release_file_path}/controller.py" "root@"$ip_address":/opt/innovenergy/controller"
|
scp "${venus_release_file_path}/controller.py" "root@"$ip_address":/opt/innovenergy/controller"
|
||||||
|
|
||||||
|
|
||||||
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -u /service/controller"
|
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -u /service/controller"
|
||||||
## start battery service
|
## start battery service
|
||||||
# ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -u /service/dbus-fzsonick-48tl.*"
|
ssh -o StrictHostKeyChecking=no "$username"@"$ip_address" "svc -u /service/dbus-fzsonick-48tl.*"
|
||||||
|
|
||||||
echo "Deployed and ran commands on $ip_address"
|
echo "Deployed and ran commands on $ip_address"
|
||||||
done
|
done
|
||||||
|
|
|
@ -8,7 +8,7 @@ import {
|
||||||
Typography
|
Typography
|
||||||
} from '@mui/material';
|
} from '@mui/material';
|
||||||
import { FormattedMessage } from 'react-intl';
|
import { FormattedMessage } from 'react-intl';
|
||||||
import React, { useEffect, useState } from 'react';
|
import React, { useContext, useEffect, useState } from 'react';
|
||||||
import { I_S3Credentials } from '../../../interfaces/S3Types';
|
import { I_S3Credentials } from '../../../interfaces/S3Types';
|
||||||
import ReactApexChart from 'react-apexcharts';
|
import ReactApexChart from 'react-apexcharts';
|
||||||
import { getChartOptions } from '../Overview/chartOptions';
|
import { getChartOptions } from '../Overview/chartOptions';
|
||||||
|
@ -25,6 +25,7 @@ import { AdapterDayjs } from '@mui/x-date-pickers/AdapterDayjs';
|
||||||
import CircularProgress from '@mui/material/CircularProgress';
|
import CircularProgress from '@mui/material/CircularProgress';
|
||||||
import { useLocation, useNavigate } from 'react-router-dom';
|
import { useLocation, useNavigate } from 'react-router-dom';
|
||||||
import ArrowBackIcon from '@mui/icons-material/ArrowBack';
|
import ArrowBackIcon from '@mui/icons-material/ArrowBack';
|
||||||
|
import { ProductIdContext } from '../../../contexts/ProductIdContextProvider';
|
||||||
|
|
||||||
interface MainStatsProps {
|
interface MainStatsProps {
|
||||||
s3Credentials: I_S3Credentials;
|
s3Credentials: I_S3Credentials;
|
||||||
|
@ -49,6 +50,7 @@ function MainStats(props: MainStatsProps) {
|
||||||
const [dateSelectionError, setDateSelectionError] = useState('');
|
const [dateSelectionError, setDateSelectionError] = useState('');
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
const location = useLocation();
|
const location = useLocation();
|
||||||
|
const { product, setProduct } = useContext(ProductIdContext);
|
||||||
|
|
||||||
const blueColors = [
|
const blueColors = [
|
||||||
'#99CCFF',
|
'#99CCFF',
|
||||||
|
@ -93,7 +95,15 @@ function MainStats(props: MainStatsProps) {
|
||||||
const resultPromise: Promise<{
|
const resultPromise: Promise<{
|
||||||
chartData: BatteryDataInterface;
|
chartData: BatteryDataInterface;
|
||||||
chartOverview: BatteryOverviewInterface;
|
chartOverview: BatteryOverviewInterface;
|
||||||
}> = transformInputToBatteryViewData(props.s3Credentials, props.id);
|
}> = transformInputToBatteryViewData(
|
||||||
|
props.s3Credentials,
|
||||||
|
props.id,
|
||||||
|
product,
|
||||||
|
UnixTime.fromTicks(new Date().getTime() / 1000).earlier(
|
||||||
|
TimeSpan.fromDays(1)
|
||||||
|
),
|
||||||
|
UnixTime.fromTicks(new Date().getTime() / 1000)
|
||||||
|
);
|
||||||
|
|
||||||
resultPromise
|
resultPromise
|
||||||
.then((result) => {
|
.then((result) => {
|
||||||
|
@ -184,6 +194,7 @@ function MainStats(props: MainStatsProps) {
|
||||||
}> = transformInputToBatteryViewData(
|
}> = transformInputToBatteryViewData(
|
||||||
props.s3Credentials,
|
props.s3Credentials,
|
||||||
props.id,
|
props.id,
|
||||||
|
product,
|
||||||
UnixTime.fromTicks(startDate.unix()),
|
UnixTime.fromTicks(startDate.unix()),
|
||||||
UnixTime.fromTicks(endDate.unix())
|
UnixTime.fromTicks(endDate.unix())
|
||||||
);
|
);
|
||||||
|
@ -245,6 +256,7 @@ function MainStats(props: MainStatsProps) {
|
||||||
}> = transformInputToBatteryViewData(
|
}> = transformInputToBatteryViewData(
|
||||||
props.s3Credentials,
|
props.s3Credentials,
|
||||||
props.id,
|
props.id,
|
||||||
|
product,
|
||||||
UnixTime.fromTicks(startX).earlier(TimeSpan.fromHours(2)),
|
UnixTime.fromTicks(startX).earlier(TimeSpan.fromHours(2)),
|
||||||
UnixTime.fromTicks(endX).earlier(TimeSpan.fromHours(2))
|
UnixTime.fromTicks(endX).earlier(TimeSpan.fromHours(2))
|
||||||
);
|
);
|
||||||
|
@ -418,20 +430,20 @@ function MainStats(props: MainStatsProps) {
|
||||||
<ArrowBackIcon />
|
<ArrowBackIcon />
|
||||||
</IconButton>
|
</IconButton>
|
||||||
|
|
||||||
{/*<Button*/}
|
<Button
|
||||||
{/* variant="contained"*/}
|
variant="contained"
|
||||||
{/* onClick={handleSetDate}*/}
|
onClick={handleSetDate}
|
||||||
{/* disabled={loading}*/}
|
disabled={loading}
|
||||||
{/* sx={{*/}
|
sx={{
|
||||||
{/* marginTop: '20px',*/}
|
marginTop: '20px',
|
||||||
{/* marginLeft: '20px',*/}
|
marginLeft: '20px',
|
||||||
{/* backgroundColor: dateOpen ? '#808080' : '#ffc04d',*/}
|
backgroundColor: dateOpen ? '#808080' : '#ffc04d',
|
||||||
{/* color: '#000000',*/}
|
color: '#000000',
|
||||||
{/* '&:hover': { bgcolor: '#f7b34d' }*/}
|
'&:hover': { bgcolor: '#f7b34d' }
|
||||||
{/* }}*/}
|
}}
|
||||||
{/*>*/}
|
>
|
||||||
{/* <FormattedMessage id="set_date" defaultMessage="Set Date" />*/}
|
<FormattedMessage id="set_date" defaultMessage="Set Date" />
|
||||||
{/*</Button>*/}
|
</Button>
|
||||||
</Grid>
|
</Grid>
|
||||||
<Grid
|
<Grid
|
||||||
container
|
container
|
||||||
|
|
|
@ -74,14 +74,14 @@ export const fetchData = (
|
||||||
if (r.status === 404) {
|
if (r.status === 404) {
|
||||||
return Promise.resolve(FetchResult.notAvailable);
|
return Promise.resolve(FetchResult.notAvailable);
|
||||||
} else if (r.status === 200) {
|
} else if (r.status === 200) {
|
||||||
console.log('FOUND ITTTTTTTTTTTT');
|
//console.log('FOUND ITTTTTTTTTTTT');
|
||||||
const csvtext = await r.text(); // Assuming the server returns the Base64 encoded ZIP file as text
|
const csvtext = await r.text(); // Assuming the server returns the Base64 encoded ZIP file as text
|
||||||
const contentEncoding = r.headers.get('content-type');
|
const contentEncoding = r.headers.get('content-type');
|
||||||
|
|
||||||
console.log(contentEncoding);
|
//console.log(contentEncoding);
|
||||||
|
|
||||||
if (contentEncoding != 'application/base64; charset=utf-8') {
|
if (contentEncoding != 'application/base64; charset=utf-8') {
|
||||||
console.log('uncompressed');
|
// console.log('uncompressed');
|
||||||
return parseChunk(csvtext);
|
return parseChunk(csvtext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ export const fetchData = (
|
||||||
// Assuming the CSV file is named "data.csv" inside the ZIP archive
|
// Assuming the CSV file is named "data.csv" inside the ZIP archive
|
||||||
const csvContent = await zip.file('data.csv').async('text');
|
const csvContent = await zip.file('data.csv').async('text');
|
||||||
|
|
||||||
console.log(csvContent);
|
//console.log(csvContent);
|
||||||
|
|
||||||
return parseChunk(csvContent);
|
return parseChunk(csvContent);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -101,7 +101,7 @@ function SalidomoInstallation(props: singleInstallationProps) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (i <= 0) {
|
if (i >= timeperiodToSearch) {
|
||||||
setConnected(false);
|
setConnected(false);
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -13,7 +13,14 @@ export class UnixTime {
|
||||||
return UnixTime.fromTicks(date.getTime() / 1000);
|
return UnixTime.fromTicks(date.getTime() / 1000);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static fromTicks(ticks: number): UnixTime {
|
public static fromTicks(
|
||||||
|
ticks: number,
|
||||||
|
dont_change: boolean = false
|
||||||
|
): UnixTime {
|
||||||
|
if (dont_change) {
|
||||||
|
return new UnixTime(Math.floor(ticks));
|
||||||
|
}
|
||||||
|
|
||||||
if (Math.floor(ticks) % 2 != 0) {
|
if (Math.floor(ticks) % 2 != 0) {
|
||||||
return new UnixTime(Math.floor(ticks) + 1);
|
return new UnixTime(Math.floor(ticks) + 1);
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ import {
|
||||||
fetchData
|
fetchData
|
||||||
} from '../content/dashboards/Installations/fetchData';
|
} from '../content/dashboards/Installations/fetchData';
|
||||||
import { FetchResult } from '../dataCache/dataCache';
|
import { FetchResult } from '../dataCache/dataCache';
|
||||||
import { CsvTimestamp, I_S3Credentials } from './S3Types';
|
import { I_S3Credentials } from './S3Types';
|
||||||
import { TimeSpan, UnixTime } from '../dataCache/time';
|
import { TimeSpan, UnixTime } from '../dataCache/time';
|
||||||
import { DataRecord } from '../dataCache/data';
|
import { DataRecord } from '../dataCache/data';
|
||||||
import axiosConfig from '../Resources/axiosConfig';
|
import axiosConfig from '../Resources/axiosConfig';
|
||||||
|
@ -82,6 +82,7 @@ export interface BatteryOverviewInterface {
|
||||||
export const transformInputToBatteryViewData = async (
|
export const transformInputToBatteryViewData = async (
|
||||||
s3Credentials: I_S3Credentials,
|
s3Credentials: I_S3Credentials,
|
||||||
id: number,
|
id: number,
|
||||||
|
product: number,
|
||||||
start_time?: UnixTime,
|
start_time?: UnixTime,
|
||||||
end_time?: UnixTime
|
end_time?: UnixTime
|
||||||
): Promise<{
|
): Promise<{
|
||||||
|
@ -90,7 +91,6 @@ export const transformInputToBatteryViewData = async (
|
||||||
}> => {
|
}> => {
|
||||||
const prefixes = ['', 'k', 'M', 'G', 'T'];
|
const prefixes = ['', 'k', 'M', 'G', 'T'];
|
||||||
const MAX_NUMBER = 9999999;
|
const MAX_NUMBER = 9999999;
|
||||||
|
|
||||||
const categories = ['Soc', 'Temperature', 'Power', 'Voltage', 'Current'];
|
const categories = ['Soc', 'Temperature', 'Power', 'Voltage', 'Current'];
|
||||||
const pathCategories = [
|
const pathCategories = [
|
||||||
'Soc',
|
'Soc',
|
||||||
|
@ -133,47 +133,36 @@ export const transformInputToBatteryViewData = async (
|
||||||
|
|
||||||
let initialiation = true;
|
let initialiation = true;
|
||||||
|
|
||||||
let timestampArray: CsvTimestamp[] = [];
|
let timestampArray: number[] = [];
|
||||||
let adjustedTimestampArray = [];
|
let adjustedTimestampArray = [];
|
||||||
const timestampPromises = [];
|
const timestampPromises = [];
|
||||||
|
|
||||||
if (start_time && end_time) {
|
await axiosConfig
|
||||||
await axiosConfig
|
.get(
|
||||||
.get(
|
`/GetCsvTimestampsForInstallation?id=${id}&start=${start_time.ticks}&end=${end_time.ticks}`
|
||||||
`/GetCsvTimestampsForInstallation?id=${id}&start=${start_time.ticks}&end=${end_time.ticks}`
|
)
|
||||||
)
|
.then((res: AxiosResponse<number[]>) => {
|
||||||
.then((res: AxiosResponse<CsvTimestamp[]>) => {
|
timestampArray = res.data;
|
||||||
timestampArray = res.data;
|
})
|
||||||
})
|
.catch((err: AxiosError) => {
|
||||||
.catch((err: AxiosError) => {
|
if (err.response && err.response.status == 401) {
|
||||||
if (err.response && err.response.status == 401) {
|
//removeToken();
|
||||||
//removeToken();
|
//navigate(routes.login);
|
||||||
//navigate(routes.login);
|
}
|
||||||
}
|
});
|
||||||
});
|
|
||||||
} else {
|
|
||||||
await axiosConfig
|
|
||||||
.get(`/GetCsvTimestampsForInstallation?id=${id}&start=${0}&end=${0}`)
|
|
||||||
.then((res: AxiosResponse<CsvTimestamp[]>) => {
|
|
||||||
timestampArray = res.data;
|
|
||||||
})
|
|
||||||
.catch((err: AxiosError) => {
|
|
||||||
if (err.response && err.response.status == 401) {
|
|
||||||
//removeToken();
|
|
||||||
//navigate(routes.login);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
for (var i = 0; i < timestampArray.length; i++) {
|
for (var i = 0; i < timestampArray.length; i++) {
|
||||||
timestampPromises.push(
|
timestampPromises.push(
|
||||||
fetchDataForOneTime(
|
fetchDataForOneTime(
|
||||||
UnixTime.fromTicks(timestampArray[i].timestamp),
|
UnixTime.fromTicks(timestampArray[i], true),
|
||||||
s3Credentials
|
s3Credentials
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
const adjustedTimestamp = new Date(timestampArray[i].timestamp * 1000);
|
const adjustedTimestamp =
|
||||||
|
product == 0
|
||||||
|
? new Date(timestampArray[i] * 1000)
|
||||||
|
: new Date(timestampArray[i] * 100000);
|
||||||
//Timezone offset is negative, so we convert the timestamp to the current zone by subtracting the corresponding offset
|
//Timezone offset is negative, so we convert the timestamp to the current zone by subtracting the corresponding offset
|
||||||
adjustedTimestamp.setHours(
|
adjustedTimestamp.setHours(
|
||||||
adjustedTimestamp.getHours() - adjustedTimestamp.getTimezoneOffset() / 60
|
adjustedTimestamp.getHours() - adjustedTimestamp.getTimezoneOffset() / 60
|
||||||
|
@ -309,6 +298,7 @@ const fetchDataForOneTime = async (
|
||||||
|
|
||||||
for (var i = 0; i < timeperiodToSearch; i++) {
|
for (var i = 0; i < timeperiodToSearch; i++) {
|
||||||
timestampToFetch = startUnixTime.later(TimeSpan.fromSeconds(i));
|
timestampToFetch = startUnixTime.later(TimeSpan.fromSeconds(i));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
res = await fetchData(timestampToFetch, s3Credentials);
|
res = await fetchData(timestampToFetch, s3Credentials);
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue