Files
ess-moxa-configuration-tools/Moulinette/data_path_config.py
2026-03-26 08:46:57 +01:00

585 lines
32 KiB
Python

import json
import pandas as pd
import inspect
import logging
import string
import os
import shutil
import datetime
import sys
import re
import warnings
import math
from dotenv import load_dotenv
warnings.simplefilter(action='ignore', category=UserWarning)
load_dotenv()
default_user = str(os.getenv("DEFAULT_MOXA_USER"))
default_password = str(os.getenv("DEFAULT_MOXA_PASSWORD"))
# This dictionary will have tpfunc tags for corresponding slave device to be included in step 4
tags_of_tpfunc = []
# Variables required for TP function
expose_tags = []
subscribe_tags = []
assets = []
assets_name = {}
all_metrics = []
jq_filter_set = set()
allowed_name_characters = list(string.ascii_letters + string.digits)
allowed_name_characters.append('_')
allowed_name_characters.append('-')
allowed_name_characters.append('~')
allowed_name_characters.append('.')
logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', filename='data_config_debug.log', filemode='w', level=logging.DEBUG, datefmt='%Y%m%d%H%M%S')
dir_name = 'I-Sight_Generated_Files'
input_datamodel = 'DATAMODEL_Ruakaka_QWE1.xlsx'
shell_script_name = dir_name + '/I-Sight_Configuration_'
global_shell_script_name = dir_name + '/I-Sight_Global_Configuration.sh'
if (os.path.isdir(dir_name)):
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
backup_dir_name = f"{dir_name}_backup_{timestamp}"
shutil.move(dir_name, backup_dir_name)
logging.debug("Folder " + dir_name + " already exists, backup made: " + backup_dir_name)
os.mkdir(dir_name)
if os.path.isfile(input_datamodel):
logging.debug("Input datamodel " + str(input_datamodel) + " found, archiving it.")
shutil.copy(input_datamodel, dir_name)
else:
print(f"{input_datamodel} not found. Exiting!")
sys.exit(1)
def excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh, dsh_global, row_device):
global tags_of_tpfunc
global expose_tags
global subscribe_tags
df_prop = pd.read_excel(input_datamodel, sheet_name=sheet_det, header = 0)
df_prop_initial_size = len(df_prop)
df_prop_columns = ["metric_name","modbus_function","modbus_address","type"]
# The below loop formats the column names properly by replacing the \n with space
for k in range(len(df_prop_columns)):
if '\n' in df_prop_columns[k]:
df_prop_columns[k] = df_prop_columns[k].replace("\n", " ")
df_prop = df_prop.rename(columns = {df_prop.columns[k]:df_prop_columns[k]})
dsh.write("# [STEP 2] Assigning " + str(sheet_det) + " Modbus commands to asset " + str(assets_name[slave_no]) + "\n\n")
slave_prop_list = []
allowed_data_types = ['raw','boolean','int16','int32','int64','uint16','uint32','uint64','float','double','string']
allowed_data_sizes = [1, 1, 1, 2, 4, 1, 2, 4, 2, 4, 1]
total_considered_commands = 0
for index, row in df_prop.iterrows():
filtered_df_prop = df_prop
for k in range(len(df_prop_columns)):
filtered_df_prop = filtered_df_prop[filtered_df_prop[df_prop_columns[k]].notnull()]
logging.debug("Starting registration of Modbus commands for " + str(assets_name[slave_no]) + ".")
for index, row in filtered_df_prop.iterrows():
step2_data = {}
logging.debug("Registering command " + row['metric_name'] + " on address " + str(row['modbus_address']))
if allowed_data_sizes[allowed_data_types.index(row['type'])] != 1 and int(row['modbus_quantity']) % allowed_data_sizes[allowed_data_types.index(row['type'])] != 0:
logging.debug("Wrong quantity (" + str(int(row['modbus_quantity'])) + ") for data type " + str(row['type']) + ", using default size " + str(allowed_data_sizes[allowed_data_types.index(row['type'])]) + ".")
step2_data["readQuantity"] = allowed_data_sizes[allowed_data_types.index(row['type'])]
else:
step2_data["readQuantity"] = int(row['modbus_quantity'])
step2_data["remoteDevId"] = slave_no
step2_data["name"] = row['metric_name']
step2_data["mode"] = 0
step2_data["func"] = row['modbus_function']
step2_data["readAddress"] = int(row['modbus_address'])
if not math.isnan(row['poll_interval']):
step2_data["pollInterval"] = int(row['poll_interval'])
else:
logging.debug("Poll interval undefined, using 1000ms as default.")
step2_data["pollInterval"] = 1000
if not math.isnan(row['endian_swap']):
logging.debug(f"Endian Swap: {row['endian_swap']} {row['metric_name']}")
step2_data["swap"] = int(row['endian_swap'])
else:
logging.debug(f"Endian Swap undefined, not using swap as default. {row['metric_name']}")
step2_data["swap"] = 0
step2_data["dataType"] = row['type']
if not math.isnan(row['scaling_factor']):
step2_data['scalingFunc'] = 1
step2_data['interceptSlope'] = row['scaling_factor']
else:
logging.debug("No scaling factor provided, using 1 (no scaling) as default.")
logging.debug(row['metric_name'])
total_considered_commands = total_considered_commands + 1
slave_prop_list.append(step2_data)
all_metrics.append({"remoteDevId": step2_data["remoteDevId"], "metric_name": step2_data["name"], "jqfilter": row['jq_filter_name']})
logging.debug(str(total_considered_commands) + "/" + str(df_prop_initial_size) + " commands registered.")
if total_considered_commands < df_prop_initial_size:
logging.debug("A command may not be registered if its row is incomplete.")
df_bitfield = pd.read_excel(input_datamodel, sheet_name='BITFIELDS', header=0)
df_bitfield_columns = ["metric_name","modbus_function","modbus_address"]
filtered_df_bitfield= df_bitfield
for index, row in filtered_df_bitfield.iterrows():
for column in df_bitfield_columns:
if not check_string_format(column):
filtered_df_bitfield = filtered_df_bitfield[filtered_df_bitfield[column].notnull()]
for index, row in filtered_df_bitfield.iterrows():
if row['asset_type'] == sheet_det:
logging.debug("Bitfield " + row['metric_name'] + " assigned to " + str(assets_name[slave_no]) + ".")
step2_data = {}
step2_data["remoteDevId"] = slave_no
step2_data["name"] = row['metric_name']
step2_data["mode"] = 0
step2_data["func"] = row['modbus_function']
step2_data["readAddress"] = int(row['modbus_address'])
if row['modbus_quantity']:
step2_data["readQuantity"] = int(row['modbus_quantity'])
else:
step2_data["readQuantity"] = 1
if row['poll_interval']:
step2_data["pollInterval"] = int(row['poll_interval'])
else:
step2_data["pollInterval"] = 1000
if row['endian_swap']:
step2_data["swap"] = int(row['endian_swap'])
else:
step2_data["swap"] = 0
step2_data["dataType"] = 'uint16'
slave_prop_list.append(step2_data)
all_metrics.append({"remoteDevId": step2_data["remoteDevId"], "metric_name": step2_data["name"], "jqfilter": row['jq_filter_name']})
slave_prop_list = json.dumps(slave_prop_list)
dsh.write(
inspect.cleandoc(
"""sudo curl -X POST https://127.0.0.1:8443/api/v1/modbusmaster/config/mcmds?autoCreate=tags \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + str(slave_prop_list) +"'" + """ -k | jq
"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
dsh_global.write("### Creating Modbus commands " + "" + " for " + str(row_device['device_name']) + "\n")
dsh_global.write("curl -s -X POST -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/modbusmaster/config/mcmds?autoCreate=tags \\\n")
dsh_global.write("\t-H \"Content-Type: application/json\" \\\n")
dsh_global.write("\t-H \"mx-api-token: ${token}\" \\\n")
dsh_global.write("\t-d '" + str(slave_prop_list) + "' >> global_config.log\n\n")
dsh_global.write("echo -e \"\\n\" >> global_config.log\n\n")
def common_code(dsh, dsh_global, row_device):
dsh.write("# [STEP 3] Applying Modbus configuration\n\n")
dsh.write(
inspect.cleandoc(
"""sudo curl -X PUT https://127.0.0.1:8443/api/v1/modbusmaster/control/config/apply \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + "'" + """ -k | jq
\n"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
dsh_global.write("### Applying Modbus configuration " + "" + " for " + str(row_device['device_name']) + "\n")
dsh_global.write("curl -s -X PUT -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/modbusmaster/control/config/apply \\\n")
dsh_global.write("\t-H \"Content-Type: application/json\" \\\n")
dsh_global.write("\t-H \"mx-api-token: ${token}\" >> global_config.log\n\n")
dsh_global.write("echo -e \"\\n\" >> global_config.log\n\n")
def jq_filter(current_device, dsh, dsh_global, row_device):
df_validation = pd.read_excel(input_datamodel, sheet_name='VALIDATION', header = 0)
filtered_df_validation = df_validation[df_validation['jq_filter_name'].notnull()]
filtered_df_validation = filtered_df_validation[filtered_df_validation['jq_filter'].notnull()]
for index,row in filtered_df_validation.iterrows():
filter = row['jq_filter_name']
if filter in jq_filter_set and filter != "bitfield":
logging.debug("Creating standard telemetry topic " + filter + ".")
jq_data = {}
jq_data["enable"] = False
jq_data["properties"] = [{"key": "deviceType", "value": "AC_GATEWAY"}, {"key": "cdid", "value": current_device}]
jq_data["outputTopic"] = filter
jq_data["sendOutThreshold"] = {"mode": "bySize", "size": int(128000), "time": int(30), "sizeIdleTimer": {"enable": True, "time": int(5)}}
jq_data["minPublishInterval"] = int(0)
jq_data["samplingMode"] = "allValues"
jq_data["customSamplingRate"] = False
jq_data["pollingInterval"] = int(0)
jq_data["onChange"] = False
final_filter = row["jq_filter"].replace("***device_name***", current_device)
cmd_list = {}
for metric in all_metrics:
if metric["jqfilter"] == filter:
if assets_name[metric["remoteDevId"]] not in cmd_list:
cmd_list[assets_name[metric["remoteDevId"]]] = []
cmd_list[assets_name[metric["remoteDevId"]]].append(metric["metric_name"])
jq_data["format"] = final_filter
jq_data["tags"]= {"modbus_tcp_master": cmd_list}
json_object = json.dumps(jq_data)
dsh.write("# [STEP 4] Creating " + filter + " standard Azure telemetry topic\n\n")
dsh.write(
inspect.cleandoc(
"""sudo curl -X POST https://127.0.0.1:8443/api/v1/azure-iotedge/messages \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + str(json_object) +"'" + """ -k | jq
\n"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
dsh_global.write("### Creating Azure messages " + "" + " for " + str(row_device['device_name']) + "\n")
dsh_global.write("curl -s -X POST -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/azure-iotedge/messages \\\n")
dsh_global.write("\t-H \"Content-Type: application/json\" \\\n")
dsh_global.write("\t-H \"mx-api-token: ${token}\" \\\n")
dsh_global.write("\t-d '" + str(json_object) + "' >> global_config.log\n\n")
dsh_global.write("echo -e \"\\n\" >> global_config.log\n\n")
dsh_global.write("echo \"Finished work on " + str(row_device['device_name']) + "\"")
if filter == "generic_metrics":
logging.debug("Creating OnEvent telemetry topic.")
jq_data = {}
jq_data["enable"] = False
jq_data["properties"] = [{"key": "deviceType", "value": "AC_GATEWAY"}, {"key": "cdid", "value": current_device}]
jq_data["outputTopic"] = "PCS_OnEvent"
jq_data["sendOutThreshold"] = {"mode": "immediately", "size": int(4096), "time": int(60), "sizeIdleTimer": {"enable": True, "time": int(60)}}
jq_data["minPublishInterval"] = int(0)
jq_data["samplingMode"] = "allChangedValues"
jq_data["customSamplingRate"] = False
jq_data["pollingInterval"] = int(0)
jq_data["onChange"] = True
final_filter = row["jq_filter"].replace("***device_name***", current_device)
cmd_list = {}
jq_data["format"] = final_filter
jq_data["tags"]= {"modbus_tcp_master": cmd_list}
json_object = json.dumps(jq_data)
dsh.write("# [STEP 4] Creating " + filter + " OnEvent Azure telemetry topic\n\n")
dsh.write(
inspect.cleandoc(
"""sudo curl -X POST https://127.0.0.1:8443/api/v1/azure-iotedge/messages \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + str(json_object) +"'" + """ -k | jq
\n"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
dsh_global.write("### Creating OnEvent Azure messages " + "" + " for " + str(row_device['device_name']) + "\n")
dsh_global.write("curl -s -X POST -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/azure-iotedge/messages \\\n")
dsh_global.write("\t-H \"Content-Type: application/json\" \\\n")
dsh_global.write("\t-H \"mx-api-token: ${token}\" \\\n")
dsh_global.write("\t-d '" + str(json_object) + "' >> global_config.log\n\n")
dsh_global.write("echo -e \"\\n\" >> global_config.log\n\n")
dsh_global.write("echo \"Finished work on " + str(row_device['device_name']) + "\"")
logging.debug("Creating 12H telemetry topic.")
jq_data = {}
jq_data["enable"] = False
jq_data["properties"] = [{"key": "deviceType", "value": "AC_GATEWAY"}, {"key": "cdid", "value": current_device}]
jq_data["outputTopic"] = "PCS_12H"
jq_data["sendOutThreshold"] = {"mode": "byTime", "size": int(4096), "time": int(43200), "sizeIdleTimer": {"enable": True, "time": int(60)}}
jq_data["minPublishInterval"] = int(0)
jq_data["samplingMode"] = "latestValues"
jq_data["customSamplingRate"] = False
jq_data["pollingInterval"] = int(43200)
jq_data["onChange"] = False
final_filter = row["jq_filter"].replace("***device_name***", current_device)
cmd_list = {}
jq_data["format"] = final_filter
jq_data["tags"]= {"modbus_tcp_master": cmd_list}
json_object = json.dumps(jq_data)
dsh.write("# [STEP 4] Creating " + filter + " 12H Azure telemetry topic\n\n")
dsh.write(
inspect.cleandoc(
"""sudo curl -X POST https://127.0.0.1:8443/api/v1/azure-iotedge/messages \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + str(json_object) +"'" + """ -k | jq
\n"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
dsh_global.write("### Creating 12H Azure messages " + "" + " for " + str(row_device['device_name']) + "\n")
dsh_global.write("curl -s -X POST -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/azure-iotedge/messages \\\n")
dsh_global.write("\t-H \"Content-Type: application/json\" \\\n")
dsh_global.write("\t-H \"mx-api-token: ${token}\" \\\n")
dsh_global.write("\t-d '" + str(json_object) + "' >> global_config.log\n\n")
dsh_global.write("echo -e \"\\n\" >> global_config.log\n\n")
dsh_global.write("echo \"Finished work on " + str(row_device['device_name']) + "\"")
STATIC_JQ_FILTER_EMISSIONDATE = "(now|todateiso8601)"
def bitfield_jq_filter(current_device):
STATIC_JQ_FILTER_EMISSIONDATE = "(now|todateiso8601)"
STATIC_JQ_FILTER_BITNAME = []
STATIC_JQ_FILTER_BITSOURCE = []
STATIC_JQ_FILTER_BITFORMULA = []
STATIC_JQ_FILTER_TIMESTAMP = "((.ts/1000000)|todateiso8601)"
df_bitfield = pd.read_excel(input_datamodel, sheet_name='BITFIELDS', header=0)
for i,r in df_bitfield.iterrows():
for metric in all_metrics:
if metric["metric_name"] == r["metric_name"]:
logging.debug("Creating bitfield specific telemetry topic" + metric['metric_name'] + " for " + str(assets_name[metric["remoteDevId"]]) + ".")
final_filter = {}
json_request = {}
json_request["enable"] = False
json_request["properties"] = [{"key": "deviceType", "value": "AC_GATEWAY"}, {"key": "cdid", "value": current_device}]
json_request["outputTopic"] = metric["metric_name"]
json_request["pollingInterval"] = 60
json_request["sendOutThreshold"] = {"size": 256000, "time": 180}
json_format = {}
json_format["version"] = 3
json_format["deviceId"] = current_device
json_format["emissionDate"] = "PLACEHOLDER_EMISSIONDATE"
json_format["deviceType"] = "AC_GATEWAY"
json_format["faults"] = []
for i in range(1, 17):
current_power = pow(2, i-1)
current_name = "bit_name_" + str(i)
current_source = "bit_source_" + str(i)
current_formula = "((.dataValue)/" + str(current_power) + "%2)"
current_fault = {"name": "PLACEHOLDER_NAME", "idAsset": "PLACEHOLDER_SOURCE", "active": "PLACEHOLDER_FORMULA", "timestamp": "PLACEHOLDER_TIMESTAMP"}
if not pd.isna(r[current_name]) and not pd.isna(r[current_source]):
json_format["faults"].append(current_fault)
STATIC_JQ_FILTER_BITNAME.append(r[current_name])
STATIC_JQ_FILTER_BITSOURCE.append(r[current_source])
STATIC_JQ_FILTER_BITFORMULA.append(current_formula)
elif not pd.isna(r[current_name]) and pd.isna(r[current_source]):
json_format["faults"].append(current_fault)
STATIC_JQ_FILTER_BITNAME.append(r[current_name])
STATIC_JQ_FILTER_BITSOURCE.append(str(assets_name[metric["remoteDevId"]]))
STATIC_JQ_FILTER_BITFORMULA.append(current_formula)
json_request["format"] = json.dumps(json_format)
json_request["tags"]= {}
json_request["tags"]["modbus_tcp_master"] = {}
json_request["tags"]["modbus_tcp_master"][assets_name[metric["remoteDevId"]]] = [metric["metric_name"]]
json_request_dump = json.dumps(json_request)
json_request_dump = json_request_dump.replace("\\\"PLACEHOLDER_EMISSIONDATE\\\"", STATIC_JQ_FILTER_EMISSIONDATE)
for bit in range(len(STATIC_JQ_FILTER_BITNAME)):
json_request_dump = json_request_dump.replace("PLACEHOLDER_NAME", STATIC_JQ_FILTER_BITNAME[bit], 1)
json_request_dump = json_request_dump.replace("PLACEHOLDER_SOURCE", STATIC_JQ_FILTER_BITSOURCE[bit], 1)
json_request_dump = json_request_dump.replace("\\\"PLACEHOLDER_FORMULA\\\"", STATIC_JQ_FILTER_BITFORMULA[bit], 1)
json_request_dump = json_request_dump.replace("\\\"PLACEHOLDER_TIMESTAMP\\\"", STATIC_JQ_FILTER_TIMESTAMP, 1)
dsh.write("# [STEP 5] Creating " + metric['metric_name'] + " bitfield specific Azure telemetry topic\n\n")
logging.debug(json_request_dump)
dsh.write(
inspect.cleandoc(
"""sudo curl -X POST https://127.0.0.1:8443/api/v1/azure-iotedge/messages \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + json_request_dump +"'" + """ -k | jq
\n"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
def slave_script(slave_no, row_asset, dsh, dsh_global, row_device):
slave_device = ""
for k in row_asset['asset_name']:
if k in allowed_name_characters:
slave_device = slave_device +k
slave_ip = row_asset['ip_address']
slave_port = row_asset['port']
slave_id = row_asset['slave_id']
assets_name[slave_no] = slave_device
step1_dict = {"masterTcpIfaceId": 1,"name": slave_device,"enable": 1,"slaveIpaddr": slave_ip,"slaveTcpPort": slave_port, "slaveId": slave_id, "remoteDevId": slave_no}
json_object = json.dumps(step1_dict)
dsh.write("# [STEP 1] Creating asset "+ slave_device + "\n\n")
dsh.write(
inspect.cleandoc(
"""sudo curl -X POST https://127.0.0.1:8443/api/v1/modbusmaster/config/remote-devs \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + str(json_object) +"'" + """ -k | jq
\n"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
dsh_global.write("### Creating " + str(row_asset['asset_name']) + " for " + str(row_device['device_name']) + "\n")
dsh_global.write("curl -s -X POST -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/modbusmaster/config/remote-devs \\\n")
dsh_global.write("\t-H \"Content-Type: application/json\" \\\n")
dsh_global.write("\t-H \"mx-api-token: ${token}\" \\\n")
dsh_global.write("\t-d '" + str(json_object) + "' >> global_config.log\n\n")
dsh_global.write("echo -e \"\\n\" >> global_config.log\n\n")
# Add the contents related to each slave device
sheet_det = row_asset['asset_type']
logging.debug("Asset " + slave_device + " created!")
# The below function retrieves the data required for step 2 of shell script
excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh, dsh_global, row_device)
def check_string_format(input_string):
# define the pattern using regular expression
pattern = r'^bit_(name|source)_(1[0-6]|[1-9])$'
# check if the input string matches the pattern
match = re.match(pattern, input_string)
# return True if there is a match, False otherwise
return bool(match)
def auth_moxa(dsh_global, row_device):
json_object = "{\"acceptEULA\": true, \"name\": \"" + default_user + "\", \"password\": \"" + default_password + "\"}"
dsh_global.write("### Authenticating " + "" + "" + str(row_device['device_name']) + "\n")
dsh_global.write("echo \"Starting work on " + str(row_device['device_name']) + "\"\n")
dsh_global.write("token=$(curl -s -X POST -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/auth \\\n")
dsh_global.write("\t-H \"Content-Type: application/json\" \\\n")
dsh_global.write("\t-d '" + str(json_object) + "' | jq -r \".data.token\") >> global_config.log\n\n")
dsh_global.write("echo -e \"\\n\" >> global_config.log\n\n")
def clean_modbus(dsh_global, row_device):
dsh_global.write("### Cleaning Modbus " + "" + "" + str(row_device['device_name']) + "\n")
dsh_global.write("echo \"CLEANING MODBUS ON " + str(row_device['device_name']) + "\"\n")
dsh_global.write("curl -s -X DELETE -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/modbusmaster/config/remote-devs \\\n")
dsh_global.write("\t-H \"Content-Type: application/json\" \\\n")
dsh_global.write("\t-H \"mx-api-token: ${token}\" \\\n")
dsh_global.write("\t-d '" + "" + "' >> global_config.log\n\n")
dsh_global.write("echo -e \"\\n\" >> global_config.log\n\n")
def clean_azure(dsh_global, row_device):
dsh_global.write("echo -e \"\\nCLEANING AZURE\\nCLEANING AZURE\\nCLEANING AZURE\\nCLEANING AZURE\\nCLEANING AZURE\\n\" >> global_config.log\n\n")
dsh_global.write("### Cleaning Azure telemetry " + "" + "" + str(row_device['device_name']) + "\n")
dsh_global.write("echo \"CLEANING AZURE TELEMETRY ON " + str(row_device['device_name']) + " " + str(row_device['device_ip_address_http']) + "\"\n")
dsh_global.write("azure_topics=$(curl -s -X GET -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/azure-iotedge/messages \\\n")
dsh_global.write("\t-H \"Content-Type: application/json\" \\\n")
dsh_global.write("\t-H \"mx-api-token: ${token}\" \\\n")
dsh_global.write("\t-d '" + "{}" + "') >> global_config.log\n")
dsh_global.write("echo \"${azure_topics}\" | jq -r '.data[].id' | while read -r id; do" + "\n")
dsh_global.write("\tcurl -s -X DELETE -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/azure-iotedge/messages/${id}" + "\\\n")
dsh_global.write("\t\t-H \"Content-Type: application/json\" \\\n")
dsh_global.write("\t\t-H \"mx-api-token: ${token}\" \\\n")
dsh_global.write("\t\t-d '" + "{}" + "' >> global_config.log\n\n")
dsh_global.write("done\n")
dsh_global.write("echo -e \"\\n\" >> global_config.log\n\n")
def main():
# Create the shell script to write content
global dsh
assets = []
# Read the VLAN_Modbus spreadsheet from the "I-Sight_Project_Communication_Network_Config.xlsx" file
df_slave = pd.read_excel(input_datamodel, sheet_name='DEVICES', header = 0)
# # The below loop formats the column names properly by replacing the \n with space
df_slave_columns = list(df_slave.columns)
for k in range(len(df_slave_columns)):
if '\n' in df_slave_columns[k]:
df_slave_columns[k] = df_slave_columns[k].replace("\n", " ")
df_slave = df_slave.rename(columns = {df_slave.columns[k]:df_slave_columns[k]})
null_elements_1 = list(df_slave['device_name'].notnull())
null_elements_2 = list(df_slave['device_ip_address_http'].notnull())
for index in range(len(null_elements_1)):
if null_elements_1[index] == False:
if null_elements_2[index] == False:
logging.debug(f"Missing IP address and name for device at line ${index}.")
else:
logging.debug(f"Missing name for device at line ${index} ({df_slave.at[index, 'device_ip_address_http']})")
else:
if null_elements_2[index] == False:
logging.debug(f"Missing IP address for device at line ${index} ({df_slave.at[index, 'device_name']})")
# try:
# if null_elements_1[index] == False:
# if null_elements_2[index] == False:
# logging.debug("The slave device %s at index %d is not considered due to missing data model and missing IP address \n",df_slave.at[index,'Equipment Designation'],index+2)
# else:
# logging.debug("The slave device %s at index %d is not considered due to missing data model \n",df_slave.at[index,'Equipment Designation'],index+2)
# else:
# if null_elements_2[index] == False:
# logging.debug("The slave device %s at index %d is not considered due to missing IP address \n",df_slave.at[index,'Equipment Designation'],index+2)
# except Exception as e:
# print(e)
filtered_df_slave = df_slave[df_slave['device_name'].notnull()]
filtered_df_slave = filtered_df_slave[filtered_df_slave['device_ip_address_http'].notnull()]
slave_no = 1
current_device = ""
dsh_global = open (global_shell_script_name, 'w')
for index, row_device in filtered_df_slave.iterrows():
auth_moxa(dsh_global, row_device)
clean_modbus(dsh_global, row_device)
clean_azure(dsh_global, row_device)
current_device = str(row_device['device_name'])
logging.debug("Defining parameters for " + current_device + " Moxa device...")
dsh = open (shell_script_name + current_device + '.sh', 'w')
df_assets = pd.read_excel(input_datamodel, sheet_name='ASSETS', header = 0)
df_assets_columns = ["asset_name", "asset_type", "ip_address", "port", "device"]
filtered_df_assets = df_assets
for k in range(len(df_assets_columns)):
filtered_df_assets = filtered_df_assets[filtered_df_assets[df_assets_columns[k]].notnull()]
filtered_df_assets = filtered_df_assets[filtered_df_assets['device'] == row_device['device_name']]
filtered_df_assets.drop_duplicates(subset=['asset_type', 'ip_address', 'slave_id', 'port', 'device'], inplace=True)
for index, row_asset in filtered_df_assets.iterrows():
current_asset = {"ip_address": row_asset['ip_address'], "slave_id": row_asset['slave_id'], "port": row_asset['port']}
exists = False
existing = 0
for a in range(len(assets)):
if current_asset == assets[a]:
exists = True
existing = a + 1
logging.debug("Asset " + row_asset['asset_name'] + " not created because it already exists. Processing it under the existing asset.")
print(f"Excel parsing for {row_asset['asset_type']}")
excel_parser(row_asset['asset_type'], existing, row_asset["asset_name"], allowed_name_characters, dsh, dsh_global, row_device)
break
if exists != True:
assets.append(current_asset)
logging.debug("Creating new asset...")
slave_script(slave_no, row_asset, dsh, dsh_global, row_device)
slave_no = slave_no + 1
common_code(dsh, dsh_global, row_device)
for metric in range(len(all_metrics)):
jq_filter_set.add(all_metrics[metric]['jqfilter'])
jq_filter(current_device, dsh, dsh_global, row_device)
logging.debug("BITFIELDS STARTED")
bitfield_jq_filter(current_device)
logging.debug("BITDIELDS ENDED")
logging.debug("Parameters for " + current_device + " Moxa device completed!")
dsh_global.write("\n\n\n\n\n")
all_metrics.clear()
#bitfields(dsh, filtered_df_assets, current_device, assets)
dsh.write("# [STEP 6] Ending configuration.")
dsh.close()
if __name__ == "__main__":
logging.debug("Process started...")
main()
logging.shutdown()
shutil.move("data_config_debug.log", dir_name + "/data_config_debug.log")