Files
ess-moxa-configuration-tools/Moulinette/data_path_config.py
Quentin WEPHRE fe82b3962f Initial commit
2024-06-18 14:59:46 +02:00

473 lines
29 KiB
Python

import json
import pandas as pd
import inspect
import logging
import string
import os
import shutil
import datetime
import sys
import re
import warnings
import jq
import math
warnings.simplefilter(action='ignore', category=UserWarning)
# This dictionary will have tpfunc tags for corresponding slave device to be included in step 4
tags_of_tpfunc = []
# Variables required for TP function
expose_tags = []
subscribe_tags = []
assets = []
assets_name = {}
all_metrics = []
jq_filter_set = set()
allowed_name_characters = list(string.ascii_letters + string.digits)
allowed_name_characters.append('_')
allowed_name_characters.append('-')
allowed_name_characters.append('~')
allowed_name_characters.append('.')
logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', filename='data_config_debug.log', filemode='w', level=logging.DEBUG, datefmt='%Y%m%d%H%M%S')
dir_name = 'I-Sight_Generated_Files'
input_datamodel = 'DATAMODEL_1.0.6_MCO2.xlsx'
shell_script_name = dir_name + '/I-Sight_Configuration_'
if (os.path.isdir(dir_name)):
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
backup_dir_name = f"{dir_name}_backup_{timestamp}"
shutil.move(dir_name, backup_dir_name)
logging.debug("Folder " + dir_name + " already exists, backup made: " + backup_dir_name)
os.mkdir(dir_name)
if os.path.isfile(input_datamodel):
logging.debug("Input datamodel " + str(input_datamodel) + " found, archiving it.")
shutil.copy(input_datamodel, dir_name)
else:
print(f"{input_datamodel} not found.")
sys.exit(1)
def excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh):
global tags_of_tpfunc
global expose_tags
global subscribe_tags
df_prop = pd.read_excel(input_datamodel, sheet_name=sheet_det, header = 0)
df_prop_initial_size = len(df_prop)
df_prop_columns = ["metric_name","modbus_function","modbus_address","type"]
# The below loop formats the column names properly by replacing the \n with space
for k in range(len(df_prop_columns)):
if '\n' in df_prop_columns[k]:
df_prop_columns[k] = df_prop_columns[k].replace("\n", " ")
df_prop = df_prop.rename(columns = {df_prop.columns[k]:df_prop_columns[k]})
dsh.write("# [STEP 2] Assigning " + str(sheet_det) + " Modbus commands to asset " + str(assets_name[slave_no]) + "\n\n")
slave_prop_list = []
allowed_data_types = ['raw','boolean','int16','int32','int64','uint16','uint32','uint64','float','double','string']
allowed_data_sizes = [1, 1, 1, 2, 4, 1, 2, 4, 2, 4, 1]
total_considered_commands = 0
for index, row in df_prop.iterrows():
#print(str(index) + " " + str(row))
filtered_df_prop = df_prop
for k in range(len(df_prop_columns)):
filtered_df_prop = filtered_df_prop[filtered_df_prop[df_prop_columns[k]].notnull()]
logging.debug("Starting registration of Modbus commands for " + str(assets_name[slave_no]) + ".")
for index, row in filtered_df_prop.iterrows():
#print(str(index) + " " + str(row['metric_name']))
step2_data = {}
logging.debug("Registering command " + row['metric_name'] + "...")
if allowed_data_sizes[allowed_data_types.index(row['type'])] != 1 and int(row['modbus_quantity']) % allowed_data_sizes[allowed_data_types.index(row['type'])] != 0:
logging.debug("Wrong quantity (" + str(int(row['modbus_quantity'])) + ") for data type " + str(row['type']) + ", using default size " + str(allowed_data_sizes[allowed_data_types.index(row['type'])]) + ".")
step2_data["readQuantity"] = allowed_data_sizes[allowed_data_types.index(row['type'])]
else:
step2_data["readQuantity"] = int(row['modbus_quantity'])
step2_data["remoteDevId"] = slave_no
step2_data["name"] = row['metric_name']
step2_data["mode"] = 0
step2_data["func"] = row['modbus_function']
step2_data["readAddress"] = int(row['modbus_address'])
if not math.isnan(row['poll_interval']):
step2_data["pollInterval"] = int(row['poll_interval'])
else:
logging.debug("Poll interval undefined, using 1000ms as default.")
step2_data["pollInterval"] = 1000
if row['endian_swap']:
step2_data["swap"] = int(row['endian_swap'])
else:
logging.debug("Endian Swap undefined, not using swap as default.")
step2_data["swap"] = 0
step2_data["dataType"] = row['type']
print(row['scaling_factor'])
if not math.isnan(row['scaling_factor']):
step2_data['scalingFunc'] = 1
step2_data['interceptSlope'] = row['scaling_factor']
else:
logging.debug("No scaling factor provided, using 1 (no scaling) as default.")
logging.debug(row['metric_name'])
total_considered_commands = total_considered_commands + 1
print(step2_data)
slave_prop_list.append(step2_data)
all_metrics.append({"remoteDevId": step2_data["remoteDevId"], "metric_name": step2_data["name"], "jqfilter": row['jq_filter_name']})
logging.debug(str(total_considered_commands) + "/" + str(df_prop_initial_size) + " commands registered.")
if total_considered_commands < df_prop_initial_size:
logging.debug("A command may not be registered if its row is incomplete.")
df_bitfield = pd.read_excel(input_datamodel, sheet_name='BITFIELDS', header=0)
df_bitfield_columns = ["metric_name","modbus_function","modbus_address"]
filtered_df_bitfield= df_bitfield
for index, row in filtered_df_bitfield.iterrows():
for column in df_bitfield_columns:
if not check_string_format(column):
filtered_df_bitfield = filtered_df_bitfield[filtered_df_bitfield[column].notnull()]
for index, row in filtered_df_bitfield.iterrows():
if row['asset_type'] == sheet_det:
logging.debug("Bitfield " + row['metric_name'] + " assigned to " + str(assets_name[slave_no]) + ".")
step2_data = {}
step2_data["remoteDevId"] = slave_no
step2_data["name"] = row['metric_name']
step2_data["mode"] = 0
step2_data["func"] = row['modbus_function']
step2_data["readAddress"] = int(row['modbus_address'])
if row['modbus_quantity']:
step2_data["readQuantity"] = int(row['modbus_quantity'])
else:
step2_data["readQuantity"] = 1
if row['poll_interval']:
step2_data["pollInterval"] = int(row['poll_interval'])
else:
step2_data["pollInterval"] = 1000
if row['endian_swap']:
step2_data["swap"] = int(row['endian_swap'])
else:
step2_data["swap"] = 0
step2_data["dataType"] = 'uint16'
slave_prop_list.append(step2_data)
all_metrics.append({"remoteDevId": step2_data["remoteDevId"], "metric_name": step2_data["name"], "jqfilter": row['jq_filter_name']})
slave_prop_list = json.dumps(slave_prop_list)
dsh.write(
inspect.cleandoc(
"""sudo curl -X POST https://127.0.0.1:8443/api/v1/modbusmaster/config/mcmds?autoCreate=tags \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + str(slave_prop_list) +"'" + """ -k | jq
"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
def common_code(dsh):
dsh.write("# [STEP 3] Applying Modbus configuration\n\n")
dsh.write(
inspect.cleandoc(
"""sudo curl -X PUT https://127.0.0.1:8443/api/v1/modbusmaster/control/config/apply \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + "'" + """ -k | jq
\n"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
def jq_filter(current_device, dsh):
df_validation = pd.read_excel(input_datamodel, sheet_name='VALIDATION', header = 0)
filtered_df_validation = df_validation[df_validation['jq_filter_name'].notnull()]
filtered_df_validation = filtered_df_validation[filtered_df_validation['jq_filter'].notnull()]
for index,row in filtered_df_validation.iterrows():
filter = row['jq_filter_name']
if filter in jq_filter_set and filter != "bitfield":
logging.debug("Creating standard telemetry topic " + filter + ".")
jq_data = {}
jq_data["enable"] = True
jq_data["properties"] = [{"key": "deviceType", "value": "AC_GATEWAY"}, {"key": "cdid", "value": current_device}]
jq_data["outputTopic"] = filter
if row['message_polling_interval']:
jq_data["pollingInterval"] = int(row['message_polling_interval'])
else:
jq_data["pollingInterval"] = int(1)
if row['message_time_limit'] and row['message_size_limit']:
jq_data["sendOutThreshold"] = {"size": int(row['message_size_limit']), "time": int(row['message_time_limit'])}
elif not row['message_time_limit'] and row['message_size_limit']:
jq_data["sendOutThreshold"] = {"size": int(row['message_size_limit']), "time": int(30)}
elif row['message_time_limit'] and not row['message_size_limit']:
jq_data["sendOutThreshold"] = {"size": int(256000), "time": int(row['message_time_limit'])}
else:
jq_data["sendOutThreshold"] = {"size": int(256000), "time": int(30)}
final_filter = row["jq_filter"].replace("***device_name***", current_device)
cmd_list = {}
for metric in all_metrics:
if metric["jqfilter"] == filter:
if assets_name[metric["remoteDevId"]] not in cmd_list:
cmd_list[assets_name[metric["remoteDevId"]]] = []
cmd_list[assets_name[metric["remoteDevId"]]].append(metric["metric_name"])
jq_data["format"] = final_filter
jq_data["tags"]= {"modbus_tcp_master": cmd_list}
json_object = json.dumps(jq_data)
dsh.write("# [STEP 4] Creating " + filter + " standard Azure telemetry topic\n\n")
dsh.write(
inspect.cleandoc(
"""sudo curl -X POST https://127.0.0.1:8443/api/v1/azure-iotedge/messages \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + str(json_object) +"'" + """ -k | jq
\n"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
STATIC_JQ_FILTER_EMISSIONDATE = "(now|todateiso8601)"
def bitfield_jq_filter(current_device):
STATIC_JQ_FILTER_EMISSIONDATE = "(now|todateiso8601)"
STATIC_JQ_FILTER_BITNAME = []
STATIC_JQ_FILTER_BITSOURCE = []
STATIC_JQ_FILTER_BITFORMULA = []
STATIC_JQ_FILTER_TIMESTAMP = "((.ts/1000000)|todateiso8601)"
df_bitfield = pd.read_excel(input_datamodel, sheet_name='BITFIELDS', header=0)
for i,r in df_bitfield.iterrows():
for metric in all_metrics:
if metric["metric_name"] == r["metric_name"]:
logging.debug("Creating bitfield specific telemetry topic" + metric['metric_name'] + " for " + str(assets_name[metric["remoteDevId"]]) + ".")
final_filter = {}
json_request = {}
json_request["enable"] = True
json_request["properties"] = [{"key": "deviceType", "value": "AC_GATEWAY"}, {"key": "cdid", "value": current_device}]
json_request["outputTopic"] = metric["metric_name"]
json_request["pollingInterval"] = 60
json_request["sendOutThreshold"] = {"size": 256000, "time": 180}
json_format = {}
json_format["version"] = 3
json_format["deviceId"] = current_device
json_format["emissionDate"] = "PLACEHOLDER_EMISSIONDATE"
json_format["deviceType"] = "AC_GATEWAY"
json_format["faults"] = []
for i in range(1, 17):
current_power = pow(2, i-1)
current_name = "bit_name_" + str(i)
current_source = "bit_source_" + str(i)
current_formula = "((.dataValue)/" + str(current_power) + "%2)"
current_fault = {"name": "PLACEHOLDER_NAME", "idAsset": "PLACEHOLDER_SOURCE", "active": "PLACEHOLDER_FORMULA", "timestamp": "PLACEHOLDER_TIMESTAMP"}
if not pd.isna(r[current_name]) and not pd.isna(r[current_source]):
json_format["faults"].append(current_fault)
STATIC_JQ_FILTER_BITNAME.append(r[current_name])
STATIC_JQ_FILTER_BITSOURCE.append(r[current_source])
STATIC_JQ_FILTER_BITFORMULA.append(current_formula)
elif not pd.isna(r[current_name]) and pd.isna(r[current_source]):
json_format["faults"].append(current_fault)
STATIC_JQ_FILTER_BITNAME.append(r[current_name])
STATIC_JQ_FILTER_BITSOURCE.append(str(assets_name[metric["remoteDevId"]]))
STATIC_JQ_FILTER_BITFORMULA.append(current_formula)
json_request["format"] = json.dumps(json_format)
json_request["tags"]= {}
json_request["tags"]["modbus_tcp_master"] = {}
json_request["tags"]["modbus_tcp_master"][assets_name[metric["remoteDevId"]]] = [metric["metric_name"]]
json_request_dump = json.dumps(json_request)
json_request_dump = json_request_dump.replace("\\\"PLACEHOLDER_EMISSIONDATE\\\"", STATIC_JQ_FILTER_EMISSIONDATE)
for bit in range(len(STATIC_JQ_FILTER_BITNAME)):
json_request_dump = json_request_dump.replace("PLACEHOLDER_NAME", STATIC_JQ_FILTER_BITNAME[bit], 1)
json_request_dump = json_request_dump.replace("PLACEHOLDER_SOURCE", STATIC_JQ_FILTER_BITSOURCE[bit], 1)
json_request_dump = json_request_dump.replace("\\\"PLACEHOLDER_FORMULA\\\"", STATIC_JQ_FILTER_BITFORMULA[bit], 1)
json_request_dump = json_request_dump.replace("\\\"PLACEHOLDER_TIMESTAMP\\\"", STATIC_JQ_FILTER_TIMESTAMP, 1)
dsh.write("# [STEP 5] Creating " + metric['metric_name'] + " bitfield specific Azure telemetry topic\n\n")
logging.debug(json_request_dump)
dsh.write(
inspect.cleandoc(
"""sudo curl -X POST https://127.0.0.1:8443/api/v1/azure-iotedge/messages \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + json_request_dump +"'" + """ -k | jq
\n"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
def slave_script(slave_no, row, dsh):
slave_device = ""
for k in row['asset_name']:
if k in allowed_name_characters:
slave_device = slave_device +k
slave_ip = row['ip_address']
slave_port = row['port']
slave_id = row['slave_id']
assets_name[slave_no] = slave_device
step1_dict = {"masterTcpIfaceId": 1,"name": slave_device,"enable": 1,"slaveIpaddr": slave_ip,"slaveTcpPort": slave_port, "slaveId": slave_id, "remoteDevId": slave_no}
json_object = json.dumps(step1_dict)
dsh.write("# [STEP 1] Creating asset "+ slave_device + "\n\n")
dsh.write(
inspect.cleandoc(
"""sudo curl -X POST https://127.0.0.1:8443/api/v1/modbusmaster/config/remote-devs \\
-H "Content-Type: application/json" \\
-H "mx-api-token:$(sudo cat /var/thingspro/data/mx-api-token)" \\
-d """) + "'" + str(json_object) +"'" + """ -k | jq
\n"""
)
dsh.write('printf "\\n \\n" >> data_shell_script.log\n')
dsh.write("\n\n")
# Add the contents related to each slave device
sheet_det = row['asset_type']
logging.debug("Asset " + slave_device + " created!")
# The below function retrieves the data required for step 2 of shell script
excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh)
def tpfunc_gen():
global expose_tags
global subscribe_tags
package = {}
package["name"] = "demoDataFunc"
package["enabled"] = True
package["trigger"] = {"driven":"timeDriven","timeDriven":{"mode":"boot","intervalSec":2,"cronJob":""}}
package["expose"] = {"tags": expose_tags}
package["executable"] = {"language":"python"}
package["params"] = {"setting":{"tpe_publish_interval":1,"test_mode":False},"subscribeTags":subscribe_tags}
try:
package = json.dumps(package)
except Exception as e:
print(f"The exception here is {e}")
dsh = open(shell_script_name_final,'a')
dsh.write("\n# Shell Code to create TP Function: \n\n")
cmd1 = """echo '#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\nfrom thingspro.edge.func_v1 import package\nfrom thingspro.edge.tag_v1 import tag as tpeTAG\nfrom collections import deque\nimport time\nimport logging\n\n__author__ = "Moxa Europe"\n__license__ = "MIT"\n__version__ = "0.0.1"\n__status__ = "beta"\n\nlog_format = "%(asctime)s: %(levelname)s - %(name)s - %(message)s"\nlogging.basicConfig(level=logging.INFO, datefmt="[%Y-%m-%d %H:%M:%S]", format=log_format)\nlogger = logging.getLogger(__name__)\n\n\n\ncache = {}\n\nclass TpeSaftContext():\n def __init__(self):\n \n # initialize app default settings \n self._tpe_publish_interval = 1\n self._test_mode = False\n \n self._publisher = None\n self._subscriber = None \n \n self._vtag_tags = 0 \n self._vtag_publish_data = deque(maxlen=100)\n self._tagList = [] \n \n\n # initialize virtual tags \n self.tagValueInputVoltage = 0\n self.tagValueInputCurrent = 0\n \n # create config instance to read parameters from package.json file \n config = package.Configuration()\n self._params = config.parameters()\n \n # create subscriber client instance\n self._subscriber = tpeTAG.Subscriber()\n self._subscriber.subscribe_callback(self.callback)\n \n def parse_configuration(self):\n print("*********** Parse Configuration ***********")\n # create config instance to read parameters from package.json file \n \n if "setting" in self._params:\n if "tpe_publish_interval" in self._params["setting"]:\n self._tpe_publish_interval = self._params["setting"]["tpe_publish_interval"]\n print("tpe_publish_interval : " + str(self._tpe_publish_interval))\n \n if "test_mode" in self._params["setting"]:\n self._test_mode = self._params["setting"]["test_mode"]\n print("test_mode : " + str(self._test_mode))\n \n if "subscribeTags" in self._params:\n self._tagList = self._params["subscribeTags"]\n print("subscribeTags: ", self._params["subscribeTags"])\n \n print("**** Parse Configuration Successfull! ****")\n return\n \n \n def _callback(self, data={}):\n #print("************** callback function is called *****************")\n \n # Get tag names from package.json file\n for tagDict in self._tagList:\n print(tagDict)\n if "tagName" in tagDict:\n tagName = tagDict["tagName"]\n print(tagName)\n \n # Compare tagHub tagName with tagName comes from package.json file.\n if data["tagName"] == tagName:\n self.put_to_publish_queue("virtual", data["srcName"]+"_onChange", data["tagName"], data["dataValue"], "double", data["ts"]) \n\n return\n \n \n def callback(self, data={}):\n #print("************** callback function is called *****************")\n \n # Get tag names from package.json file\n for tagDict in self._tagList:\n if "tagName" in tagDict:\n tagName = tagDict["tagName"]\n \n # Compare tagHub tagName with tagName comes from package.json file.\n if data["tagName"] == tagName:\n global cache \n # The following store distint tagName in cache\n if tagName not in cache.keys():\n cache[tagName] = data["dataValue"] \n print("CACHE:", cache) \n self.put_to_publish_queue("virtual", data["srcName"]+"_onChange", data["tagName"], data["dataValue"], "double", data["ts"]) \n print("Cache value of virtual/onChange/" + str(tagName) + " :" + str(cache[tagName])) \n else:\n if cache[tagName] == data["dataValue"]:\n print("No Changes in " + tagName + ": ", data["dataValue"])\n else:\n print("Updated virtual/onChange/" + str(tagName) + " by new vlaue: " + str(data["dataValue"])) \n self.put_to_publish_queue("virtual", data["srcName"]+"_onChange", data["tagName"], data["dataValue"], "double", data["ts"]) \n # update cache tagValue with new value\n cache[tagName] = data["dataValue"]\n print("CACHE:", cache)\n \n return\n \n def register_tpe_callback(self):\n # create subscriber client instance\n self._subscriber = tpeTAG.Subscriber()\n self._subscriber.subscribe_callback(self.callback)\n \n def subscribe_tag(self):\n #print("**************** subscribe_tag function is called ***************")\n \n if "subscribeTags" in self._params:\n tags = self._params["subscribeTags"]\n #print("[Subscribe]:", tags)\n for tag in tags:\n try:\n self._subscriber.subscribe(tag["prvdName"], tag["srcName"], [tag["tagName"]])\n except ValueError:\n pass\n if self._test_mode :\n self._subscriber.subscribe("system", "status", ["cpuUsage"])\n return\n \n \n def put_to_publish_queue(self, prvdName, srcName, tagName, dataValue, dataType, timestamp):\n #print("****************** put_to_publish_queue function is called *********************")\n tag = {\n "prvdName": prvdName,\n "srcName": srcName,\n "tagName": tagName, \n "dataValue": dataValue,\n "dataType" : dataType,\n "ts": timestamp\n } \n self._vtag_publish_data.append(tag)\n return True\n \n \n def tpe_publish(self): \n #print("************** tpe_publish function is called *****************")\n \n #self.subscribe_tag()\n \n # print("Length _vtag_publish_data", len(self._vtag_publish_data))\n while len(self._vtag_publish_data)>0:\n tag = self._vtag_publish_data.popleft()\n self._publisher.publish(tag)\n print("[Publish]:", tag)\n #print("publish: " + tag["tagName"] + ":" + str(tag["dataValue"]) )\n return\n \n \n \nif __name__ == "__main__":\n\n my_app = TpeSaftContext()\n \n # initial configuration from package.json file\n my_app.parse_configuration()\n\n # subscribe Tags\n my_app.subscribe_tag()\n \n # create publisher client instance\n my_app._publisher = tpeTAG.Publisher()\n \n # create direct access instance\n # my_app._accesser = tpeTAG.Access()\n \n \n # infinite loop\n while True: \n my_app.tpe_publish()\n print("sleep " + str(my_app._tpe_publish_interval))\n time.sleep(my_app._tpe_publish_interval)' > demoDataFunc/index.py"""
cmd1 = cmd1.replace("\n","\\n")
dsh.write(inspect.cleandoc("""mkdir demoDataFunc"""))
dsh.write('\n')
dsh.write(cmd1)
dsh.write('\n')
dsh.write("""echo '""" + str(package) + "'" + " | jq '.'" + " > demoDataFunc/package.json")
dsh.write('\n')
dsh.write(inspect.cleandoc("""tpfunc add demoDataFunc"""))
dsh.close()
# open both files
with open(shell_script_name,'r') as firstfile, open(shell_script_name_final,'a') as secondfile:
# read content from first file
for line in firstfile:
# append content to second file
secondfile.write(line)
os.remove(shell_script_name)
def check_string_format(input_string):
# define the pattern using regular expression
pattern = r'^bit_(name|source)_(1[0-6]|[1-9])$'
# check if the input string matches the pattern
match = re.match(pattern, input_string)
# return True if there is a match, False otherwise
return bool(match)
def main():
# Create the shell script to write content
global dsh
assets = []
# Read the VLAN_Modbus spreadsheet from the "I-Sight_Project_Communication_Network_Config.xlsx" file
df_slave = pd.read_excel(input_datamodel, sheet_name='DEVICES', header = 0)
print(df_slave)
# # The below loop formats the column names properly by replacing the \n with space
df_slave_columns = list(df_slave.columns)
for k in range(len(df_slave_columns)):
if '\n' in df_slave_columns[k]:
df_slave_columns[k] = df_slave_columns[k].replace("\n", " ")
df_slave = df_slave.rename(columns = {df_slave.columns[k]:df_slave_columns[k]})
null_elements_1 = list(df_slave['device_name'].notnull())
null_elements_2 = list(df_slave['device_ip_address_http'].notnull())
for index in range(len(null_elements_1)):
try:
if null_elements_1[index] == False:
if null_elements_2[index] == False:
logging.debug("The slave device %s at index %d is not considered due to missing data model and missing IP address \n",df_slave.at[index,'Equipment Designation'],index+2)
else:
logging.debug("The slave device %s at index %d is not considered due to missing data model \n",df_slave.at[index,'Equipment Designation'],index+2)
else:
if null_elements_2[index] == False:
logging.debug("The slave device %s at index %d is not considered due to missing IP address \n",df_slave.at[index,'Equipment Designation'],index+2)
except Exception as e:
print(e)
filtered_df_slave = df_slave[df_slave['device_name'].notnull()]
filtered_df_slave = filtered_df_slave[filtered_df_slave['device_ip_address_http'].notnull()]
slave_no = 1
current_device = ""
for index, row in filtered_df_slave.iterrows():
current_device = str(row['device_name'])
logging.debug("Defining parameters for " + current_device + " Moxa device...")
dsh = open (shell_script_name + current_device + '.sh', 'w')
df_assets = pd.read_excel(input_datamodel, sheet_name='ASSETS', header = 0)
df_assets_columns = ["asset_name", "asset_type", "ip_address", "port", "device"]
filtered_df_assets = df_assets
for k in range(len(df_assets_columns)):
filtered_df_assets = filtered_df_assets[filtered_df_assets[df_assets_columns[k]].notnull()]
filtered_df_assets = filtered_df_assets[filtered_df_assets['device'] == row['device_name']]
filtered_df_assets.drop_duplicates(subset=['asset_type', 'ip_address', 'slave_id', 'port', 'device'], inplace=True)
for index, row in filtered_df_assets.iterrows():
current_asset = {"ip_address": row['ip_address'], "slave_id": row['slave_id'], "port": row['port']}
exists = False
existing = 0
for a in range(len(assets)):
if current_asset == assets[a]:
exists = True
existing = a + 1
logging.debug("Asset " + row['asset_name'] + " not created because it already exists. Processing it under the existing asset.")
excel_parser(row['asset_type'], existing, row["asset_name"], allowed_name_characters, dsh)
break
if exists != True:
assets.append(current_asset)
logging.debug("Creating new asset...")
slave_script(slave_no, row, dsh)
slave_no = slave_no + 1
common_code(dsh)
for metric in range(len(all_metrics)):
jq_filter_set.add(all_metrics[metric]['jqfilter'])
jq_filter(current_device, dsh)
logging.debug("BITFIELDS STARTED")
bitfield_jq_filter(current_device)
logging.debug("BITDIELDS ENDED")
logging.debug("Parameters for " + current_device + " Moxa device completed!")
all_metrics.clear()
#bitfields(dsh, filtered_df_assets, current_device, assets)
dsh.write("# [STEP 6] Ending configuration.")
dsh.close()
if __name__ == "__main__":
logging.debug("Process started...")
main()
logging.shutdown()
shutil.move("data_config_debug.log", dir_name + "/data_config_debug.log")