Get telemetry history from influx on CUBEs

This commit is contained in:
Quentin WEPHRE
2025-09-25 09:51:07 +02:00
parent 1a4a1bfab2
commit a910e0ba31
5 changed files with 415 additions and 16 deletions

55
Python/check_folders.sh Normal file
View File

@@ -0,0 +1,55 @@
#!/bin/bash
for i in $(seq -w 1 57); do
# Expand matching folders
matches=( ${i}cube-* )
# Case 1: no match (literal string left)
if [ "${matches[0]}" = "${i}cube-*" ]; then
echo "❌ No folder found for prefix ${i}cube-xxxxx"
continue
fi
# Case 2: more than one match
if [ "${#matches[@]}" -ne 1 ]; then
echo "⚠️ Multiple folders found for prefix ${i}cube-xxxxx: ${matches[*]}"
continue
fi
folder="${matches[0]}"
echo "✅ Found folder: $folder"
dataset_dir="$folder/SMALL_DATASET"
if [ -d "$dataset_dir" ]; then
echo " SMALL_DATASET exists"
# Count CSV files
csv_count=$(find "$dataset_dir" -maxdepth 1 -type f -name "*.csv" | wc -l)
if [ "$csv_count" -eq 4 ]; then
echo " ✅ Found 4 CSV files"
else
echo " ⚠️ Found $csv_count CSV files (expected 4)"
fi
else
echo " ❌ SMALL_DATASET folder missing"
fi
dataset_dir="$folder/MEDIUM_DATASET"
if [ -d "$dataset_dir" ]; then
echo " MEDIUM_DATASET exists"
# Count CSV files
csv_count=$(find "$dataset_dir" -maxdepth 1 -type f -name "*.csv" | wc -l)
if [ "$csv_count" -eq 2 ]; then
echo " ✅ Found 2 CSV files"
else
echo " ⚠️ Found $csv_count CSV files (expected 2)"
fi
else
echo " ❌ MEDIUM_DATASET folder missing"
fi
done

View File

@@ -36,9 +36,6 @@ def authenticate(base_url):
password = ENV_WEB["DEFAULT_CUBE_WEB_ADMIN_PASSWORD"]
certificate = ENV_WEB["DEFAULT_CERTIFICATE"].encode("utf-8")
#print(f"{username} {password} {certificate}")
# Prepare the multipart form data
auth_params = {
"login": username,
"password": password
@@ -47,7 +44,6 @@ def authenticate(base_url):
"params": (None, json.dumps(auth_params), "application/json"),
"certificate": ("certificate.pem", certificate, "application/octet-stream")
}
print(f"Authenticating against {auth_url}")
try:
response = requests.post(auth_url, files=files, verify=False, timeout=10)
response.raise_for_status() # Raise exception for 4XX/5XX responses
@@ -57,15 +53,16 @@ def authenticate(base_url):
token = auth_data.get("token")
if not token:
print("Authentication failure!")
raise requests.exceptions.RequestException
print("Authentication success!", end = " ")
print("HTTPS ✅", end = " ", flush=True)
return token
except requests.exceptions.RequestException as e:
print(f"Authentication failed: {e}")
print(f"HTTPS ❌", flush=True)
if hasattr(e, 'response') and e.response:
print(f"Response: {e.response.text}")
raise Exception(e.response)
else:
raise
def set_ssh_status(base_url, token):
@@ -86,16 +83,16 @@ def set_ssh_status(base_url, token):
response = requests.post(ssh_url, headers=headers, json=payload, verify=False, timeout=10)
response.raise_for_status()
print(f"SSH activation success!")
print(f"SSH ", end = " ", flush=True)
return True
except requests.exceptions.RequestException as e:
print("SSH activation failure!")
print(f"Exception: {e}")
print("SSH ", flush=True)
if hasattr(e, 'response') and e.response:
print(f"Response: {e.response.text}")
return False
raise Exception(e.response)
else:
raise
def activate_ssh(ip_address):
@@ -110,7 +107,6 @@ def activate_ssh(ip_address):
if not url.endswith(":9080"):
url = url + ":9080"
print(f"Target address: {url}")
verify_ssl = False
if not verify_ssl:
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

View File

@@ -0,0 +1,149 @@
import csv
import paramiko
from cube_activate_ssh import activate_ssh
from dotenv import load_dotenv
import os
import re
import sys
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller """
try:
# PyInstaller creates a temp folder and stores path in _MEIPASS
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".")
return os.path.join(base_path, relative_path)
dotenv_path = resource_path('.env')
load_dotenv(dotenv_path=dotenv_path)
ip_address_prefix = "10.84.171." # Myrtle subnet
ip_address_range = range(1, 58) # From 1 to 57
csv_filename = "MYRTLE_01.csv"
ENV_SSH = {
"DEFAULT_CUBE_LINUX_ADMIN_USER": os.getenv("DEFAULT_CUBE_LINUX_ADMIN_USER"),
"DEFAULT_CUBE_LINUX_ADMIN_PASSWORD": os.getenv("DEFAULT_CUBE_LINUX_ADMIN_PASSWORD")
}
ssh_username = ENV_SSH["DEFAULT_CUBE_LINUX_ADMIN_USER"]
ssh_password = ENV_SSH["DEFAULT_CUBE_LINUX_ADMIN_PASSWORD"]
def execute_ssh_command(ip, command):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
client.connect(ip, port=11022, username=ssh_username, password=ssh_password, allow_agent=False, look_for_keys=False)
stdin, stdout, stderr = client.exec_command(command)
result = stdout.read().decode().lower().strip()
return result
except Exception as e:
print(f"SSH Error: {str(e)}")
raise
finally:
client.close()
def update_cloud_config(ip, new_content):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
client.connect(ip, port=11022, username=ssh_username, password=ssh_password, allow_agent=False, look_for_keys=False)
stdin, stdout, stderr = client.exec_command(f'sudo -S bash -c \'cat > /etc/cube/config-azure.properties << EOF\n{new_content}\nEOF\'\n')
stdin.write(ssh_password + "\n")
stdin.flush()
stdoutput = [line for line in stdout]
stderroutput = [line for line in stderr]
for output in stdoutput:
print(output.strip())
except Exception as e:
print(f"SSH Error: {str(e)}")
raise
finally:
client.close()
def restart_cloudagent(ip):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
client.connect(ip, port=11022, username=ssh_username, password=ssh_password, allow_agent=False, look_for_keys=False)
stdin, stdout, stderr = client.exec_command(f'sudo -S bash -c \'systemctl restart cube-web-cloudagent << EOF\n\nEOF\'\n')
stdin.write(ssh_password + "\n")
stdin.flush()
stdoutput = [line for line in stdout]
stderroutput = [line for line in stderr]
for output in stdoutput:
print(output.strip())
except Exception as e:
print(f"SSH Error: {str(e)}")
raise
finally:
client.close()
def main():
print("Starting...")
#print(f"{ssh_username} {ssh_password}")
with open(csv_filename, mode="w", newline="") as file:
writer = csv.writer(file)
writer.writerow(["Number", "IP address", "Cube ID", "Environment", "Correct configuration"])
numbers = ip_address_range
for i in numbers:
ip_address = f"{ip_address_prefix}{i}"
print(f"Activating SSH for {ip_address}:", end=" ")
try:
activate_ssh(ip_address)
except Exception as e:
print(f"Failed! {e}")
writer.writerow([i, ip_address, "UNREACHABLE", "NA", "NA"])
file.flush()
continue
print("Activated!")
print(f"Getting hostname for {ip_address}:", end=" ")
try:
cube_id = execute_ssh_command(ip_address, "hostname")
except Exception as e:
print(f"Failed! {e}")
writer.writerow([i, ip_address, "UNREACHABLE", "NA", "NA"])
file.flush()
continue
print(cube_id)
print(f"Getting configured Connection String for {cube_id} ({ip_address})")
try:
connection_string = execute_ssh_command(ip_address, "grep \"connection-string\" /etc/cube/config-azure.properties")
if connection_string == "":
raise Exception("No Connection String extracted!")
iothub_match = re.search(r"hostname\\=(.*?);", connection_string, re.IGNORECASE)
iothub = iothub_match.group(1) if iothub_match else None
if iothub.lower() == "IotHub-CUBE-PROD.azure-devices.net".lower():
migration = "SAFT"
elif iothub.lower() == "iot-ingest-ess-prod.azure-devices.net".lower():
migration = "INOX"
else:
migration = "NONE"
device_id_match = re.search(r"deviceid\\=(.*?);", connection_string, re.IGNORECASE)
cloud_cube_id = device_id_match.group(1) if device_id_match else None
if cloud_cube_id.lower() == cube_id.lower():
status = "CORRECT"
else:
status = "INCORRECT"
except Exception as e:
print(e)
migration = "NONE"
status = "INCORRECT"
writer.writerow([i, ip_address, cube_id, migration, status])
file.flush()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,198 @@
from paramiko import SSHClient, AutoAddPolicy
import paramiko
from cube_activate_ssh import activate_ssh
from dotenv import load_dotenv
import os
import sys
import shlex
from scp import SCPClient
import time
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller """
try:
# PyInstaller creates a temp folder and stores path in _MEIPASS
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".")
return os.path.join(base_path, relative_path)
dotenv_path = resource_path('.env')
load_dotenv(dotenv_path=dotenv_path)
ip_address_prefix = "10.84.171." # Myrtle subnet
ip_address_range = range(3, 5) # From 1 to 57
ENV_SSH = {
"DEFAULT_CUBE_LINUX_ADMIN_USER": os.getenv("DEFAULT_CUBE_LINUX_ADMIN_USER"),
"DEFAULT_CUBE_LINUX_ADMIN_PASSWORD": os.getenv("DEFAULT_CUBE_LINUX_ADMIN_PASSWORD")
}
ssh_username = ENV_SSH["DEFAULT_CUBE_LINUX_ADMIN_USER"]
ssh_password = ENV_SSH["DEFAULT_CUBE_LINUX_ADMIN_PASSWORD"]
def execute_ssh_command(ip, command):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
client.connect(ip, port=11022, username=ssh_username, password=ssh_password, allow_agent=False, look_for_keys=False)
stdin, stdout, stderr = client.exec_command(command)
result = stdout.read().decode().lower().strip()
return result
except Exception as e:
print(f"SSH Error: {str(e)}", flush=True)
raise
finally:
client.close()
def execute_sudo_ssh_command(ip, command):
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
client.connect(ip, port=11022, username=ssh_username, password=ssh_password, allow_agent=False, look_for_keys=False)
client.get_transport().set_keepalive(10)
quoted_command = f"bash -c {shlex.quote(command)}"
sudo_command = f"sudo -S -p '' {quoted_command}"
stdin, stdout, stderr = client.exec_command(sudo_command, timeout=60)
time.sleep(3)
stdin.write(ssh_password + '\n')
stdin.flush()
error = stderr.read().decode().strip()
if error:
raise Exception(f"sudo SSH command failed with error: {error}")
result = stdout.read().decode().lower().strip()
return result
except Exception as e:
raise
finally:
client.close()
def scp_get_file(ip, remote_path, local_path):
client = SSHClient()
client.set_missing_host_key_policy(AutoAddPolicy())
local_path = os.path.expanduser(local_path)
local_path = os.path.abspath(local_path)
local_dir = os.path.dirname(local_path)
if local_dir:
os.makedirs(local_dir, exist_ok=True)
try:
client.connect(
ip,
port=11022,
username=ssh_username,
password=ssh_password,
allow_agent=False,
look_for_keys=False
)
with SCPClient(client.get_transport()) as scp:
scp.get(remote_path, local_path)
except Exception as e:
raise
finally:
client.close()
def main():
print(f"Starting...\n", flush=True)
numbers = ip_address_range
for i in numbers:
ip_address = f"{ip_address_prefix}{i}"
print(f"[{time.ctime(time.time())}] {str(i)} ({ip_address})", end=" ", flush=True)
try:
activate_ssh(ip_address)
except Exception as e:
print(f"SSH activation failed for {ip_address}:", flush=True)
print(f"{e}", flush=True)
print(f"Skipping CUBE...", flush=True)
continue
cube_id = "NA"
try:
cube_id = execute_ssh_command(ip_address, "hostname")
print(f"{cube_id}", flush=True)
except Exception as e:
print(f"cube-xxxxx ❌", flush=True)
print(f"Error getting hostname for {ip_address}:", flush=True)
print(f"{e}", flush=True)
print(f"Skipping CUBE...", flush=True)
continue
print(f"Getting small datasets:", flush=True)
metric_names = [] #["ModTmpAvg", "ModTmpMin", "ModTmpMax", "StrModTmpAvg"]
start_date = "2024-01-01"
end_date = "2026-01-01"
for metric in metric_names:
try:
print(f"\t{metric}", end=" ", flush=True)
command = "nice -n 19 influx -database cube-db -precision rfc3339 -execute \"SELECT * FROM RETENTION_CUBE_78w." + str(metric) + " WHERE time >= '" + str(start_date) + "' AND time < '" + str(end_date) + "'\" -format csv > /data/tmp/" + str(metric) + ".csv"
execute_sudo_ssh_command(ip_address, command)
print(f"", flush=True)
except Exception as e:
print(f"", flush=True)
print(f"Failed requesting {metric}:", flush=True)
print(f"{e}", flush=True)
print(f"Skipping metric...", flush=True)
continue
print(f"Downloading small datasets:", flush=True)
for metric in metric_names:
try:
print(f"\t{metric}", end=" ", flush=True)
absolute_remote_path = "/data/tmp/" + str(metric) + ".csv"
relative_local_path = "./JUILLAUME_GOURET/" + str(i).zfill(2) + str(cube_id) + "/SMALL_DATASET/" + str(metric) + ".csv"
scp_get_file(ip_address, absolute_remote_path, relative_local_path)
print(f"", flush=True)
except Exception as e:
print(f"", flush=True)
print(f"Failed downloading {metric}", flush=True)
print(f"{e}", flush=True)
print(f"Skipping metric...", flush=True)
continue
print(f"Getting medium datasets:", flush=True)
metric_names = ["SoC", "StrSoC"]
start_date = "2024-01-01"
end_date = "2026-01-01"
for metric in metric_names:
try:
print(f"\t{metric}", end=" ", flush=True)
command = "nice -n 19 influx -database cube-db -precision rfc3339 -execute \"SELECT * FROM RETENTION_CUBE_78w." + str(metric) + " WHERE time >= '" + str(start_date) + "' AND time < '" + str(end_date) + "'\" -format csv > /data/tmp/" + str(metric) + ".csv"
execute_sudo_ssh_command(ip_address, command)
print(f"", flush=True)
except Exception as e:
print(f"", flush=True)
print(f"Failed requesting {metric}:", flush=True)
print(f"{e}", flush=True)
print(f"Skipping metric...", flush=True)
continue
print(f"Downloading medium datasets:", flush=True)
for metric in metric_names:
try:
print(f"\t{metric}", end=" ", flush=True)
absolute_remote_path = "/data/tmp/" + str(metric) + ".csv"
relative_local_path = "./JUILLAUME_GOURET/" + str(i).zfill(2) + str(cube_id) + "/MEDIUM_DATASET/" + str(metric) + ".csv"
scp_get_file(ip_address, absolute_remote_path, relative_local_path)
print(f"", flush=True)
except Exception as e:
print(f"", flush=True)
print(f"Failed downloading {metric}", flush=True)
print(f"{e}", flush=True)
print(f"Skipping metric...", flush=True)
continue
if __name__ == "__main__":
main()

View File

@@ -5,3 +5,4 @@ paramiko
requests
pandas
openpyxl
scp