From 034507434deedb73745bde1f98b2f0be5261725e Mon Sep 17 00:00:00 2001 From: Quentin WEPHRE Date: Thu, 21 Nov 2024 16:07:03 +0100 Subject: [PATCH] Secrets & Global Site Configuration --- .gitignore | 5 +- Moulinette/data_path_config.py | 135 ++++++++++++++++----------------- Python/azure_iot_hub_1.py | 19 ++++- Python/danish_D1_api.py | 11 ++- Python/danish_batch_api.py | 10 ++- Python/danish_batch_scp.py | 11 ++- Python/stat_json_buffer.py | 2 +- 7 files changed, 115 insertions(+), 78 deletions(-) diff --git a/.gitignore b/.gitignore index 6caaf75..fc0bff6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,5 @@ I-Sight_Generated_Files* -DATAMODEL_* \ No newline at end of file +DATAMODEL_* +.env +DEV_L_MYRTLE.xlsx +global_config.log diff --git a/Moulinette/data_path_config.py b/Moulinette/data_path_config.py index 888d0f4..4081ccf 100644 --- a/Moulinette/data_path_config.py +++ b/Moulinette/data_path_config.py @@ -9,11 +9,16 @@ import datetime import sys import re import warnings -import jq import math +from dotenv import load_dotenv warnings.simplefilter(action='ignore', category=UserWarning) +load_dotenv() + +default_user = str(os.getenv("DEFAULT_MOXA_USER")) +default_password = str(os.getenv("DEFAULT_MOXA_PASSWORD")) + # This dictionary will have tpfunc tags for corresponding slave device to be included in step 4 tags_of_tpfunc = [] @@ -35,8 +40,9 @@ allowed_name_characters.append('.') logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', filename='data_config_debug.log', filemode='w', level=logging.DEBUG, datefmt='%Y%m%d%H%M%S') dir_name = 'I-Sight_Generated_Files' -input_datamodel = 'DATAMODEL_1.0.6_SASK.xlsx' +input_datamodel = 'DATAMODEL_1.0.6_DANISH_TEST.xlsx' shell_script_name = dir_name + '/I-Sight_Configuration_' +global_shell_script_name = dir_name + '/I-Sight_Global_Configuration.sh' if (os.path.isdir(dir_name)): timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") @@ -50,10 +56,10 @@ if os.path.isfile(input_datamodel): logging.debug("Input datamodel " + str(input_datamodel) + " found, archiving it.") shutil.copy(input_datamodel, dir_name) else: - print(f"{input_datamodel} not found.") + print(f"{input_datamodel} not found. Exiting!") sys.exit(1) -def excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh): +def excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh, dsh_global, row_device): global tags_of_tpfunc global expose_tags global subscribe_tags @@ -76,13 +82,11 @@ def excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh total_considered_commands = 0 for index, row in df_prop.iterrows(): - #print(str(index) + " " + str(row)) filtered_df_prop = df_prop for k in range(len(df_prop_columns)): filtered_df_prop = filtered_df_prop[filtered_df_prop[df_prop_columns[k]].notnull()] logging.debug("Starting registration of Modbus commands for " + str(assets_name[slave_no]) + ".") for index, row in filtered_df_prop.iterrows(): - #print(str(index) + " " + str(row['metric_name'])) step2_data = {} logging.debug("Registering command " + row['metric_name'] + "...") if allowed_data_sizes[allowed_data_types.index(row['type'])] != 1 and int(row['modbus_quantity']) % allowed_data_sizes[allowed_data_types.index(row['type'])] != 0: @@ -106,7 +110,6 @@ def excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh logging.debug("Endian Swap undefined, not using swap as default.") step2_data["swap"] = 0 step2_data["dataType"] = row['type'] - print(row['scaling_factor']) if not math.isnan(row['scaling_factor']): step2_data['scalingFunc'] = 1 step2_data['interceptSlope'] = row['scaling_factor'] @@ -114,7 +117,6 @@ def excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh logging.debug("No scaling factor provided, using 1 (no scaling) as default.") logging.debug(row['metric_name']) total_considered_commands = total_considered_commands + 1 - print(step2_data) slave_prop_list.append(step2_data) all_metrics.append({"remoteDevId": step2_data["remoteDevId"], "metric_name": step2_data["name"], "jqfilter": row['jq_filter_name']}) logging.debug(str(total_considered_commands) + "/" + str(df_prop_initial_size) + " commands registered.") @@ -165,8 +167,14 @@ def excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh dsh.write('printf "\\n \\n" >> data_shell_script.log\n') dsh.write("\n\n") + dsh_global.write("### Creating Modbus commands " + "" + " for " + str(row_device['device_name']) + "\n") + dsh_global.write("curl -s -X POST -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/modbusmaster/config/mcmds?autoCreate=tags \\\n") + dsh_global.write("\t-H \"Content-Type: application/json\" \\\n") + dsh_global.write("\t-H \"mx-api-token: ${token}\" \\\n") + dsh_global.write("\t-d '" + str(slave_prop_list) + "' >> global_config.log\n\n") -def common_code(dsh): + +def common_code(dsh, dsh_global, row_device): dsh.write("# [STEP 3] Applying Modbus configuration\n\n") dsh.write( inspect.cleandoc( @@ -179,7 +187,12 @@ def common_code(dsh): dsh.write('printf "\\n \\n" >> data_shell_script.log\n') dsh.write("\n\n") -def jq_filter(current_device, dsh): + dsh_global.write("### Applying Modbus configuration " + "" + " for " + str(row_device['device_name']) + "\n") + dsh_global.write("curl -s -X PUT -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/modbusmaster/control/config/apply \\\n") + dsh_global.write("\t-H \"Content-Type: application/json\" \\\n") + dsh_global.write("\t-H \"mx-api-token: ${token}\" >> global_config.log\n\n") + +def jq_filter(current_device, dsh, dsh_global, row_device): df_validation = pd.read_excel(input_datamodel, sheet_name='VALIDATION', header = 0) filtered_df_validation = df_validation[df_validation['jq_filter_name'].notnull()] @@ -222,6 +235,12 @@ def jq_filter(current_device, dsh): dsh.write('printf "\\n \\n" >> data_shell_script.log\n') dsh.write("\n\n") + dsh_global.write("### Creating Azure messages " + "" + " for " + str(row_device['device_name']) + "\n") + dsh_global.write("curl -s -X POST -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/azure-iotedge/messages \\\n") + dsh_global.write("\t-H \"Content-Type: application/json\" \\\n") + dsh_global.write("\t-H \"mx-api-token: ${token}\" \\\n") + dsh_global.write("\t-d '" + str(json_object) + "' >> global_config.log\n\n") + STATIC_JQ_FILTER_EMISSIONDATE = "(now|todateiso8601)" def bitfield_jq_filter(current_device): @@ -292,16 +311,16 @@ def bitfield_jq_filter(current_device): -def slave_script(slave_no, row, dsh): +def slave_script(slave_no, row_asset, dsh, dsh_global, row_device): slave_device = "" - for k in row['asset_name']: + for k in row_asset['asset_name']: if k in allowed_name_characters: slave_device = slave_device +k - slave_ip = row['ip_address'] - slave_port = row['port'] - slave_id = row['slave_id'] + slave_ip = row_asset['ip_address'] + slave_port = row_asset['port'] + slave_id = row_asset['slave_id'] assets_name[slave_no] = slave_device step1_dict = {"masterTcpIfaceId": 1,"name": slave_device,"enable": 1,"slaveIpaddr": slave_ip,"slaveTcpPort": slave_port, "slaveId": slave_id, "remoteDevId": slave_no} json_object = json.dumps(step1_dict) @@ -317,49 +336,18 @@ def slave_script(slave_no, row, dsh): dsh.write('printf "\\n \\n" >> data_shell_script.log\n') dsh.write("\n\n") + dsh_global.write("### Creating " + str(row_asset['asset_name']) + " for " + str(row_device['device_name']) + "\n") + dsh_global.write("curl -s -X POST -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/modbusmaster/config/remote-devs \\\n") + dsh_global.write("\t-H \"Content-Type: application/json\" \\\n") + dsh_global.write("\t-H \"mx-api-token: ${token}\" \\\n") + dsh_global.write("\t-d '" + str(json_object) + "' >> global_config.log\n\n") + # Add the contents related to each slave device - sheet_det = row['asset_type'] + sheet_det = row_asset['asset_type'] logging.debug("Asset " + slave_device + " created!") # The below function retrieves the data required for step 2 of shell script - excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh) - -def tpfunc_gen(): - global expose_tags - global subscribe_tags - - package = {} - package["name"] = "demoDataFunc" - package["enabled"] = False - package["trigger"] = {"driven":"timeDriven","timeDriven":{"mode":"boot","intervalSec":2,"cronJob":""}} - package["expose"] = {"tags": expose_tags} - package["executable"] = {"language":"python"} - package["params"] = {"setting":{"tpe_publish_interval":1,"test_mode":False},"subscribeTags":subscribe_tags} - try: - package = json.dumps(package) - except Exception as e: - print(f"The exception here is {e}") - - dsh = open(shell_script_name_final,'a') - dsh.write("\n# Shell Code to create TP Function: \n\n") - cmd1 = """echo '#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\nfrom thingspro.edge.func_v1 import package\nfrom thingspro.edge.tag_v1 import tag as tpeTAG\nfrom collections import deque\nimport time\nimport logging\n\n__author__ = "Moxa Europe"\n__license__ = "MIT"\n__version__ = "0.0.1"\n__status__ = "beta"\n\nlog_format = "%(asctime)s: %(levelname)s - %(name)s - %(message)s"\nlogging.basicConfig(level=logging.INFO, datefmt="[%Y-%m-%d %H:%M:%S]", format=log_format)\nlogger = logging.getLogger(__name__)\n\n\n\ncache = {}\n\nclass TpeSaftContext():\n def __init__(self):\n \n # initialize app default settings \n self._tpe_publish_interval = 1\n self._test_mode = False\n \n self._publisher = None\n self._subscriber = None \n \n self._vtag_tags = 0 \n self._vtag_publish_data = deque(maxlen=100)\n self._tagList = [] \n \n\n # initialize virtual tags \n self.tagValueInputVoltage = 0\n self.tagValueInputCurrent = 0\n \n # create config instance to read parameters from package.json file \n config = package.Configuration()\n self._params = config.parameters()\n \n # create subscriber client instance\n self._subscriber = tpeTAG.Subscriber()\n self._subscriber.subscribe_callback(self.callback)\n \n def parse_configuration(self):\n print("*********** Parse Configuration ***********")\n # create config instance to read parameters from package.json file \n \n if "setting" in self._params:\n if "tpe_publish_interval" in self._params["setting"]:\n self._tpe_publish_interval = self._params["setting"]["tpe_publish_interval"]\n print("tpe_publish_interval : " + str(self._tpe_publish_interval))\n \n if "test_mode" in self._params["setting"]:\n self._test_mode = self._params["setting"]["test_mode"]\n print("test_mode : " + str(self._test_mode))\n \n if "subscribeTags" in self._params:\n self._tagList = self._params["subscribeTags"]\n print("subscribeTags: ", self._params["subscribeTags"])\n \n print("**** Parse Configuration Successfull! ****")\n return\n \n \n def _callback(self, data={}):\n #print("************** callback function is called *****************")\n \n # Get tag names from package.json file\n for tagDict in self._tagList:\n print(tagDict)\n if "tagName" in tagDict:\n tagName = tagDict["tagName"]\n print(tagName)\n \n # Compare tagHub tagName with tagName comes from package.json file.\n if data["tagName"] == tagName:\n self.put_to_publish_queue("virtual", data["srcName"]+"_onChange", data["tagName"], data["dataValue"], "double", data["ts"]) \n\n return\n \n \n def callback(self, data={}):\n #print("************** callback function is called *****************")\n \n # Get tag names from package.json file\n for tagDict in self._tagList:\n if "tagName" in tagDict:\n tagName = tagDict["tagName"]\n \n # Compare tagHub tagName with tagName comes from package.json file.\n if data["tagName"] == tagName:\n global cache \n # The following store distint tagName in cache\n if tagName not in cache.keys():\n cache[tagName] = data["dataValue"] \n print("CACHE:", cache) \n self.put_to_publish_queue("virtual", data["srcName"]+"_onChange", data["tagName"], data["dataValue"], "double", data["ts"]) \n print("Cache value of virtual/onChange/" + str(tagName) + " :" + str(cache[tagName])) \n else:\n if cache[tagName] == data["dataValue"]:\n print("No Changes in " + tagName + ": ", data["dataValue"])\n else:\n print("Updated virtual/onChange/" + str(tagName) + " by new vlaue: " + str(data["dataValue"])) \n self.put_to_publish_queue("virtual", data["srcName"]+"_onChange", data["tagName"], data["dataValue"], "double", data["ts"]) \n # update cache tagValue with new value\n cache[tagName] = data["dataValue"]\n print("CACHE:", cache)\n \n return\n \n def register_tpe_callback(self):\n # create subscriber client instance\n self._subscriber = tpeTAG.Subscriber()\n self._subscriber.subscribe_callback(self.callback)\n \n def subscribe_tag(self):\n #print("**************** subscribe_tag function is called ***************")\n \n if "subscribeTags" in self._params:\n tags = self._params["subscribeTags"]\n #print("[Subscribe]:", tags)\n for tag in tags:\n try:\n self._subscriber.subscribe(tag["prvdName"], tag["srcName"], [tag["tagName"]])\n except ValueError:\n pass\n if self._test_mode :\n self._subscriber.subscribe("system", "status", ["cpuUsage"])\n return\n \n \n def put_to_publish_queue(self, prvdName, srcName, tagName, dataValue, dataType, timestamp):\n #print("****************** put_to_publish_queue function is called *********************")\n tag = {\n "prvdName": prvdName,\n "srcName": srcName,\n "tagName": tagName, \n "dataValue": dataValue,\n "dataType" : dataType,\n "ts": timestamp\n } \n self._vtag_publish_data.append(tag)\n return True\n \n \n def tpe_publish(self): \n #print("************** tpe_publish function is called *****************")\n \n #self.subscribe_tag()\n \n # print("Length _vtag_publish_data", len(self._vtag_publish_data))\n while len(self._vtag_publish_data)>0:\n tag = self._vtag_publish_data.popleft()\n self._publisher.publish(tag)\n print("[Publish]:", tag)\n #print("publish: " + tag["tagName"] + ":" + str(tag["dataValue"]) )\n return\n \n \n \nif __name__ == "__main__":\n\n my_app = TpeSaftContext()\n \n # initial configuration from package.json file\n my_app.parse_configuration()\n\n # subscribe Tags\n my_app.subscribe_tag()\n \n # create publisher client instance\n my_app._publisher = tpeTAG.Publisher()\n \n # create direct access instance\n # my_app._accesser = tpeTAG.Access()\n \n \n # infinite loop\n while True: \n my_app.tpe_publish()\n print("sleep " + str(my_app._tpe_publish_interval))\n time.sleep(my_app._tpe_publish_interval)' > demoDataFunc/index.py""" - cmd1 = cmd1.replace("\n","\\n") - dsh.write(inspect.cleandoc("""mkdir demoDataFunc""")) - dsh.write('\n') - dsh.write(cmd1) - dsh.write('\n') - dsh.write("""echo '""" + str(package) + "'" + " | jq '.'" + " > demoDataFunc/package.json") - dsh.write('\n') - dsh.write(inspect.cleandoc("""tpfunc add demoDataFunc""")) - dsh.close() - - # open both files - with open(shell_script_name,'r') as firstfile, open(shell_script_name_final,'a') as secondfile: - # read content from first file - for line in firstfile: - # append content to second file - secondfile.write(line) - os.remove(shell_script_name) + excel_parser(sheet_det, slave_no, slave_device, allowed_name_characters, dsh, dsh_global, row_device) def check_string_format(input_string): # define the pattern using regular expression @@ -369,6 +357,14 @@ def check_string_format(input_string): # return True if there is a match, False otherwise return bool(match) +def auth_moxa(dsh_global, row_device): + json_object = "{\"acceptEULA\": true, \"name\": \"" + default_user + "\", \"password\": \"" + default_password + "\"}" + dsh_global.write("### Authenticating " + "" + "" + str(row_device['device_name']) + "\n") + dsh_global.write("token=$(curl -s -X POST -k https://" + row_device['device_ip_address_http'] + ":8443/api/v1/auth \\\n") + dsh_global.write("\t-H \"Content-Type: application/json\" \\\n") + dsh_global.write("\t-d '" + str(json_object) + "' | jq -r \".data.token\") >> global_config.log\n\n") + + def main(): # Create the shell script to write content global dsh @@ -376,17 +372,14 @@ def main(): # Read the VLAN_Modbus spreadsheet from the "I-Sight_Project_Communication_Network_Config.xlsx" file df_slave = pd.read_excel(input_datamodel, sheet_name='DEVICES', header = 0) - print(df_slave) # # The below loop formats the column names properly by replacing the \n with space df_slave_columns = list(df_slave.columns) for k in range(len(df_slave_columns)): - if '\n' in df_slave_columns[k]: + if '\n' in df_slave_columns[k]: df_slave_columns[k] = df_slave_columns[k].replace("\n", " ") df_slave = df_slave.rename(columns = {df_slave.columns[k]:df_slave_columns[k]}) - - null_elements_1 = list(df_slave['device_name'].notnull()) null_elements_2 = list(df_slave['device_ip_address_http'].notnull()) for index in range(len(null_elements_1)): @@ -409,8 +402,12 @@ def main(): current_device = "" - for index, row in filtered_df_slave.iterrows(): - current_device = str(row['device_name']) + dsh_global = open (global_shell_script_name, 'w') + + for index, row_device in filtered_df_slave.iterrows(): + + auth_moxa(dsh_global, row_device) + current_device = str(row_device['device_name']) logging.debug("Defining parameters for " + current_device + " Moxa device...") dsh = open (shell_script_name + current_device + '.sh', 'w') df_assets = pd.read_excel(input_datamodel, sheet_name='ASSETS', header = 0) @@ -419,37 +416,39 @@ def main(): for k in range(len(df_assets_columns)): filtered_df_assets = filtered_df_assets[filtered_df_assets[df_assets_columns[k]].notnull()] - filtered_df_assets = filtered_df_assets[filtered_df_assets['device'] == row['device_name']] + filtered_df_assets = filtered_df_assets[filtered_df_assets['device'] == row_device['device_name']] filtered_df_assets.drop_duplicates(subset=['asset_type', 'ip_address', 'slave_id', 'port', 'device'], inplace=True) - for index, row in filtered_df_assets.iterrows(): - current_asset = {"ip_address": row['ip_address'], "slave_id": row['slave_id'], "port": row['port']} + for index, row_asset in filtered_df_assets.iterrows(): + current_asset = {"ip_address": row_asset['ip_address'], "slave_id": row_asset['slave_id'], "port": row_asset['port']} exists = False existing = 0 for a in range(len(assets)): if current_asset == assets[a]: exists = True existing = a + 1 - logging.debug("Asset " + row['asset_name'] + " not created because it already exists. Processing it under the existing asset.") - excel_parser(row['asset_type'], existing, row["asset_name"], allowed_name_characters, dsh) + logging.debug("Asset " + row_asset['asset_name'] + " not created because it already exists. Processing it under the existing asset.") + excel_parser(row_asset['asset_type'], existing, row_asset["asset_name"], allowed_name_characters, dsh, dsh_global) break if exists != True: assets.append(current_asset) logging.debug("Creating new asset...") - slave_script(slave_no, row, dsh) + slave_script(slave_no, row_asset, dsh, dsh_global, row_device) slave_no = slave_no + 1 - common_code(dsh) + common_code(dsh, dsh_global, row_device) for metric in range(len(all_metrics)): jq_filter_set.add(all_metrics[metric]['jqfilter']) - jq_filter(current_device, dsh) + jq_filter(current_device, dsh, dsh_global, row_device) logging.debug("BITFIELDS STARTED") bitfield_jq_filter(current_device) logging.debug("BITDIELDS ENDED") logging.debug("Parameters for " + current_device + " Moxa device completed!") + dsh_global.write("\n\n\n\n\n") + all_metrics.clear() #bitfields(dsh, filtered_df_assets, current_device, assets) diff --git a/Python/azure_iot_hub_1.py b/Python/azure_iot_hub_1.py index bdbca55..4f0fc74 100644 --- a/Python/azure_iot_hub_1.py +++ b/Python/azure_iot_hub_1.py @@ -1,8 +1,12 @@ from azure.iot.hub import IoTHubRegistryManager from azure.iot.hub.protocol.models import QuerySpecification from azure.iot.hub.models import CloudToDeviceMethod, CloudToDeviceMethodResult +from dotenv import load_dotenv import json +import os + +load_dotenv() module_id = "thingspro-agent" method_name = "thingspro-api-v1" @@ -12,18 +16,29 @@ payload = '{"method":"GET", "path":"/device/general"}' # pip install azure-iot-hub # Authenticate to your Azure account -CONNECTION_STRING = "HostName=IotHub-CUBE-PROD.azure-devices.net;SharedAccessKeyName=iothubowner;SharedAccessKey=...=" +CONNECTION_STRING = str(os.getenv("CONNECTION_STRING_SAFT_PROD")) +# CONNECTION_STRING = str(os.getenv("CONNECTION_STRING_INOX_PROD")) if CONNECTION_STRING == "": print("Provide a connection string for the Iot Hub before running the script!") exit(13) registry_manager = IoTHubRegistryManager.from_connection_string(CONNECTION_STRING) -query_spec = QuerySpecification(query="SELECT * FROM devices WHERE tags.site = 'MYRTLE' AND tags.number != '0' AND capabilities.iotEdge = true") +query_spec = QuerySpecification(query="SELECT * FROM devices WHERE IS_DEFINED(tags.site) AND capabilities.iotEdge = true") query_result = registry_manager.query_iot_hub(query_spec) devices = [] for item in query_result.items: + number = int(-1) + deviceId = "null" + site = "null" + if item.tagss['number']: + number = int(item.tags['number']) + if item.tags['deviceId']: + deviceId = item.tags['deviceId'] + if item.tags['site']: + site = item.tags['site'] + devices.append([int(item.tags['number']), item.tags['deviceId'], item.tags['site']]) ordered_devices = sorted(devices, key = lambda x: (x[2], x[0])) diff --git a/Python/danish_D1_api.py b/Python/danish_D1_api.py index d3d1451..ed5587f 100644 --- a/Python/danish_D1_api.py +++ b/Python/danish_D1_api.py @@ -5,6 +5,10 @@ import requests from urllib3.exceptions import InsecureRequestWarning import jq import json +import os +from dotenv import load_dotenv + +load_dotenv() # Function to authenticate and get token def authenticate(device_ip, payload): @@ -106,10 +110,13 @@ def get_API_1(device_ip, token): requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) +default_user = str(os.getenv("DEFAULT_MOXA_USER")) +default_password = str(os.getenv("DEFAULT_MOXA_PASSWORD")) + payload_auth = { "acceptEULA": True, - "name": "", - "password": "" + "name": default_user, + "password": default_password } if payload_auth["name"] == "" or payload_auth["password"] == "": print("Provide the credentials before running the script!") diff --git a/Python/danish_batch_api.py b/Python/danish_batch_api.py index 9dc1f63..291c417 100644 --- a/Python/danish_batch_api.py +++ b/Python/danish_batch_api.py @@ -3,6 +3,10 @@ import requests from urllib3.exceptions import InsecureRequestWarning import jq import json +import os +from dotenv import load_dotenv + +load_dotenv() # Function to authenticate and get token def authenticate(device_ip, payload): @@ -275,13 +279,15 @@ requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) # get_API(device_ip_address_https, token) # print("\n") +default_user = str(os.getenv("DEFAULT_MOXA_USER")) +default_password = str(os.getenv("DEFAULT_MOXA_PASSWORD")) for i in range(193, 222): upgrade_url = "https://files.thingsprocloud.com/package/Upgrade_AIG-301_2.4.0-4020_IMG_1.4_to_1.5.deb.yaml" payload_auth = { "acceptEULA": True, - "name": "admin", - "password": "admin@123" + "name": default_user, + "password": default_password } device_ip_address = str("10.84.171." + str(i)) diff --git a/Python/danish_batch_scp.py b/Python/danish_batch_scp.py index 035a45b..e4fb47b 100644 --- a/Python/danish_batch_scp.py +++ b/Python/danish_batch_scp.py @@ -4,6 +4,10 @@ from urllib3.exceptions import InsecureRequestWarning import json import scp import paramiko +import os +from dotenv import load_dotenv + +load_dotenv() def scp_file(local_path, remote_path, hostname, username, password): try: @@ -78,13 +82,16 @@ def ssh_execute_command_with_password(hostname, username, password, command): requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) +default_user = str(os.getenv("DEFAULT_MOXA_SSH_USER")) +default_password = str(os.getenv("DEFAULT_MOXA_SSH_PASSWORD")) + local_file_path = "AIG-301_1.5.2-20240625_saft1_armhf.deb" if local_file_path == "": print("Provide upgrade file path before running the script!") exit(12) remote_file_path = "./." -username = "moxa" -password = "moxa" +username = default_user +password = default_password if username == "" or password == "": print("Provide credentials before running the script!") exit(10) diff --git a/Python/stat_json_buffer.py b/Python/stat_json_buffer.py index d74ee00..c051699 100644 --- a/Python/stat_json_buffer.py +++ b/Python/stat_json_buffer.py @@ -73,5 +73,5 @@ def main(folder_path): if __name__ == "__main__": - folder_path = '/mnt/c/Users/QWPHR/Downloads/JSON_BUFFER_7' + folder_path = '' main(folder_path)