From 917967abb9a7d0e0fc33e3379e0f3e0a929d4e05 Mon Sep 17 00:00:00 2001 From: ASRAF KHAN NAZAR Date: Tue, 23 Dec 2025 12:09:38 +0000 Subject: [PATCH 1/2] Added Preupgrade-validation script with switch_ssd_diag_test_check - CSCws27722 --- aci-preupgrade-validation-script.py | 151 +++++++++ docs/docs/validations.md | 45 ++- .../fabricNode_no_switches.json | 32 ++ .../fabricNode_single_switch.json | 32 ++ .../fabricNode_single_switch2.json | 32 ++ .../fabricNode_with_switches.json | 47 +++ .../test_switch_ssd_diag_test_check.py | 308 ++++++++++++++++++ 7 files changed, 646 insertions(+), 1 deletion(-) create mode 100644 tests/checks/switch_ssd_diag_test_check/fabricNode_no_switches.json create mode 100644 tests/checks/switch_ssd_diag_test_check/fabricNode_single_switch.json create mode 100644 tests/checks/switch_ssd_diag_test_check/fabricNode_single_switch2.json create mode 100644 tests/checks/switch_ssd_diag_test_check/fabricNode_with_switches.json create mode 100644 tests/checks/switch_ssd_diag_test_check/test_switch_ssd_diag_test_check.py diff --git a/aci-preupgrade-validation-script.py b/aci-preupgrade-validation-script.py index ebe0477..c9c59e4 100644 --- a/aci-preupgrade-validation-script.py +++ b/aci-preupgrade-validation-script.py @@ -6007,6 +6007,156 @@ def apic_vmm_inventory_sync_faults_check(**kwargs): recommended_action=recommended_action, doc_url=doc_url) +@check_wrapper(check_title="Switch SSD Diagnostic Test Validation") +def switch_ssd_diag_test_check(username, password, fabric_nodes, **kwargs): + result = PASS + recommended_action = "Contact Cisco TAC to investigate SSD diagnostic test failures." + doc_url = "https://datacenter.github.io/ACI-Pre-Upgrade-Validation-Script/validations/#switch_ssd_diag_test_check" + + headers = ["Node Name", "Error Code", "Total Failures", "Fault Code"] + data = [] + + # Get APIC's own IP address (bind source for SSH) + try: + apic_hostname = run_cmd("bash -c \"hostname\"", splitlines=True)[0].strip() + if not apic_hostname: + return Result(result=ERROR, msg="Could not determine APIC hostname") + + apic_ip = next( + (node["fabricNode"]["attributes"].get("address") + for node in fabric_nodes + if node["fabricNode"]["attributes"]["name"] == apic_hostname), + None + ) + except Exception as e: + return Result(result=ERROR, msg="Failed to get APIC IP: {}".format(e)) + + if not apic_ip: + return Result(result=ERROR, msg="Could not determine APIC IP address from fabric nodes") + + + # Filter active switches only (exclude controllers/APICs) + switches = [ + node for node in fabric_nodes + if node["fabricNode"]["attributes"].get("role") != "controller" + and node["fabricNode"]["attributes"].get("fabricSt") == "active" + ] + + if not switches: + return Result(result=NA, msg="No active switches found in fabric") + + # Check F2421 fault for SSD issues + fault_per_node = {} + try: + F2421_faults = icurl('class', 'faultInst.json?query-target-filter=eq(faultInst.code,"F2421")') + for fault in F2421_faults: + fault_dn = fault["faultInst"]["attributes"]["dn"] + node_match = re.search(node_regex, fault_dn) + if node_match: + node_id = node_match.group("node") + fault_per_node[node_id] = True + except Exception as e: + return Result(result=ERROR, msg="Failed to retrieve F2421 faults: {}".format(e)) + + # SSH to each switch and run diagnostic test command + for switch in switches: + attr = switch["fabricNode"]["attributes"] + node_id = attr.get("id") + node_name = attr.get("name") + + try: + # Create SSH connection with APIC IP binding + c = Connection(node_name) + c.username = username + c.password = password + c.bind_ip = apic_ip # Route traffic through APIC inband IP + c.connect() + + # Execute diagnostic test command + c.cmd("show diagnostic result module 1 test 24 detail", timeout=60) + output = c.output + c.close() + + # Parse output for Error code + # Looking for: "Error code ------------------> DIAG TEST SUCCESS" or "DIAG TEST FAIL" + error_code = None + total_failures = None + total_run_count = None + + # Extract Error code + error_match = re.search(r'Error\s+code\s+[-]+>\s+(.+)', output, re.IGNORECASE) + if error_match: + error_code = error_match.group(1).strip() + + # Extract Total run count + run_match = re.search(r'Total\s+run\s+count\s+[-]+>\s+(\d+)', output, re.IGNORECASE) + if run_match: + total_run_count = int(run_match.group(1).strip()) + + # Extract Total failure count + failure_match = re.search(r'Total\s+failure\s+count\s+[-]+>\s+(\d+)', output, re.IGNORECASE) + if failure_match: + total_failures = int(failure_match.group(1).strip()) + + # Check if test failed based on Error code + if error_code: + # Check if error code contains FAIL or is not SUCCESS + if "FAIL" in error_code.upper() or "SUCCESS" not in error_code.upper(): + result = FAIL_O + + # Check if F2421 fault exists for this node + fault_code = "F2421" if node_id in fault_per_node else "N/A" + + data.append([ + node_name, + error_code, + total_failures if total_failures else "N/A", + fault_code + ]) + elif total_failures == total_run_count and total_run_count > 0: + # Even if current status is SUCCESS, check if there were historical failures + result = FAIL_O + fault_code = "F2421" if node_id in fault_per_node else "N/A" + + data.append([ + node_name, + error_code, + total_failures, + fault_code + ]) + else: + # Could not get test results or parse output + data.append([node_name, "SSD Diag Test results are not available", "N/A", "N/A"]) + result = ERROR + + except pexpect.TIMEOUT: + data.append([node_name, "SSH Timeout", "N/A", "N/A"]) + result = ERROR + except pexpect.EOF: + data.append([node_name, "SSH Connection Closed", "N/A", "N/A"]) + result = ERROR + except Exception as e: + data.append([node_name, "Error: {}".format(str(e)), "N/A", "N/A"]) + result = ERROR + + if result == PASS: + msg = "All switches passed SSD diagnostic test 24" + recommended_action = "" + elif result == FAIL_O: + msg = "SSD diagnostic test failures detected on {} switch(es)".format(len(data)) + else: + msg = "Errors occurred while checking switches" + recommended_action = "Review the errors and retry the check if necessary" + + return Result( + result=result, + msg=msg, + headers=headers, + data=data, + recommended_action=recommended_action, + doc_url=doc_url + ) + # ---- Script Execution ---- @@ -6115,6 +6265,7 @@ class CheckManager: fabric_port_down_check, equipment_disk_limits_exceeded, apic_vmm_inventory_sync_faults_check, + switch_ssd_diag_test_check, # Configurations vpc_paired_switches_check, diff --git a/docs/docs/validations.md b/docs/docs/validations.md index fa1fc0e..8152fc2 100644 --- a/docs/docs/validations.md +++ b/docs/docs/validations.md @@ -80,7 +80,7 @@ Items | Faults | This Script [Fabric Port Status][f19] | F1394: ethpm-if-port-down-fabric | :white_check_mark: | :no_entry_sign: [Equipment Disk Limits][f20] | F1820: 80% -minor
F1821: -major
F1822: -critical | :white_check_mark: | :no_entry_sign: [VMM Inventory Partially Synced][f21] | F0132: comp-ctrlr-operational-issues | :white_check_mark: | :no_entry_sign: - +[Switch SSD read alone][f22] | F2421: switch-ssd-read-alone-issue | :white_check_mark: | :no_entry_sign: [f1]: #apic-disk-space-usage [f2]: #standby-apic-disk-space-usage @@ -103,6 +103,7 @@ Items | Faults | This Script [f19]: #fabric-port-status [f20]: #equipment-disk-limits [f21]: #vmm-inventory-partially-synced +[f22]: #switch_ssd_diag_test_check ### Configuration Checks @@ -1516,6 +1517,48 @@ EPGs using the `pre-provision` resolution immediacy do not rely on the VMM inven This check returns a `MANUAL` result as there are many reasons for a partial inventory sync to be reported. The goal is to ensure that the VMM inventory sync has fully completed before triggering the APIC upgrade to reduce any chance for unexpected inventory changes to occur. +### Switch SSD Diag Test Check + +This checks for Switch(es) SSD diag test result and fault code F2421 on APIC. This fault is raised when switch SSD becomes Read Only. + +!!! example "Fault Example F2421" +From the APIC CLI: + ``` + apic1# moquery -c faultInst -f 'fault.Inst.code=="F2421"' + Total Objects shown: 1 + + # fault.Inst + code : F2421 + ack : no + alert : no + annotation : + cause : equipment-diags-failed + changeSet : firstExecFailTs (New: 2025-11-27T10:26:33.000+00:00), lastExecFailQual (New: Failed to verify contents written to file), lastExecFailTs (New: 2025-11-27T10:26:33.000+00:00), lastExecTs (New: 2025-11-27T10:26:33.000+00:00), nextExecTs (New: 2025-11-27T10:26:33.000+00:00), numExec (New: 3), numExecFail (New: 1), operSt (New: fail), operStQual (New: Failed to verify contents written to file) + childAction : + created : 2025-11-27T10:26:33.081+00:00 + delegated : no + descr : Diagnostics test failed. reason:Failed to verify contents written to file + dn : topology/pod-1/node-102/sys/diag/rule-ssd-acc-trig-forever/subj-[topology/pod-1/node-102/sys/ch/supslot-1/sup] + domain : infra + extMngdBy : undefined + highestSeverity : critical + lastTransition : 2025-11-27T10:26:33.081+00:00 + lc : soaking + modTs : never + occur : 1 + origSeverity : critical + prevSeverity : critical + rn : fault-F2421 + rule : eqptdiag-subj-oper-st-failed + severity : critical + status : + subject : oper-state-failed + title : + type : operational + uid : + userdom : all + ``` + ## Configuration Check Details ### VPC-paired Leaf switches diff --git a/tests/checks/switch_ssd_diag_test_check/fabricNode_no_switches.json b/tests/checks/switch_ssd_diag_test_check/fabricNode_no_switches.json new file mode 100644 index 0000000..1180c9c --- /dev/null +++ b/tests/checks/switch_ssd_diag_test_check/fabricNode_no_switches.json @@ -0,0 +1,32 @@ +[ + { + "fabricNode": { + "attributes": { + "address": "10.0.0.1", + "dn": "topology/pod-1/node-1", + "fabricSt": "commissioned", + "id": "1", + "model": "APIC-SERVER-L2", + "monPolDn": "uni/fabric/monfab-default", + "name": "apic1", + "nodeType": "unspecified", + "role": "controller" + } + } + }, + { + "fabricNode": { + "attributes": { + "address": "10.0.0.2", + "dn": "topology/pod-1/node-2", + "fabricSt": "commissioned", + "id": "2", + "model": "APIC-SERVER-L2", + "monPolDn": "uni/fabric/monfab-default", + "name": "apic2", + "nodeType": "unspecified", + "role": "controller" + } + } + } +] \ No newline at end of file diff --git a/tests/checks/switch_ssd_diag_test_check/fabricNode_single_switch.json b/tests/checks/switch_ssd_diag_test_check/fabricNode_single_switch.json new file mode 100644 index 0000000..b50d8b8 --- /dev/null +++ b/tests/checks/switch_ssd_diag_test_check/fabricNode_single_switch.json @@ -0,0 +1,32 @@ +[ + { + "fabricNode": { + "attributes": { + "address": "10.0.0.1", + "dn": "topology/pod-1/node-1", + "fabricSt": "commissioned", + "id": "1", + "model": "APIC-SERVER-L2", + "monPolDn": "uni/fabric/monfab-default", + "name": "apic1", + "nodeType": "unspecified", + "role": "controller" + } + } + }, + { + "fabricNode": { + "attributes": { + "address": "10.0.0.101", + "dn": "topology/pod-1/node-101", + "fabricSt": "active", + "id": "101", + "model": "N9K-C93180YC-EX", + "monPolDn": "uni/fabric/monfab-default", + "name": "leaf101", + "nodeType": "unspecified", + "role": "leaf" + } + } + } +] \ No newline at end of file diff --git a/tests/checks/switch_ssd_diag_test_check/fabricNode_single_switch2.json b/tests/checks/switch_ssd_diag_test_check/fabricNode_single_switch2.json new file mode 100644 index 0000000..96b7a8f --- /dev/null +++ b/tests/checks/switch_ssd_diag_test_check/fabricNode_single_switch2.json @@ -0,0 +1,32 @@ +[ + { + "fabricNode": { + "attributes": { + "address": "10.0.0.1", + "dn": "topology/pod-1/node-1", + "fabricSt": "commissioned", + "id": "1", + "model": "APIC-SERVER-L2", + "monPolDn": "uni/fabric/monfab-default", + "name": "apic1", + "nodeType": "unspecified", + "role": "controller" + } + } + }, + { + "fabricNode": { + "attributes": { + "address": "10.0.0.102", + "dn": "topology/pod-1/node-102", + "fabricSt": "active", + "id": "102", + "model": "N9K-C93180YC-FX", + "monPolDn": "uni/fabric/monfab-default", + "name": "leaf102", + "nodeType": "unspecified", + "role": "leaf" + } + } + } +] \ No newline at end of file diff --git a/tests/checks/switch_ssd_diag_test_check/fabricNode_with_switches.json b/tests/checks/switch_ssd_diag_test_check/fabricNode_with_switches.json new file mode 100644 index 0000000..d729954 --- /dev/null +++ b/tests/checks/switch_ssd_diag_test_check/fabricNode_with_switches.json @@ -0,0 +1,47 @@ +[ + { + "fabricNode": { + "attributes": { + "address": "10.0.0.1", + "dn": "topology/pod-1/node-1", + "fabricSt": "commissioned", + "id": "1", + "model": "APIC-SERVER-L2", + "monPolDn": "uni/fabric/monfab-default", + "name": "apic1", + "nodeType": "unspecified", + "role": "controller" + } + } + }, + { + "fabricNode": { + "attributes": { + "address": "10.0.0.101", + "dn": "topology/pod-1/node-101", + "fabricSt": "active", + "id": "101", + "model": "N9K-C93180YC-EX", + "monPolDn": "uni/fabric/monfab-default", + "name": "leaf101", + "nodeType": "unspecified", + "role": "leaf" + } + } + }, + { + "fabricNode": { + "attributes": { + "address": "10.0.0.102", + "dn": "topology/pod-1/node-102", + "fabricSt": "active", + "id": "102", + "model": "N9K-C93180YC-FX", + "monPolDn": "uni/fabric/monfab-default", + "name": "leaf102", + "nodeType": "unspecified", + "role": "leaf" + } + } + } +] \ No newline at end of file diff --git a/tests/checks/switch_ssd_diag_test_check/test_switch_ssd_diag_test_check.py b/tests/checks/switch_ssd_diag_test_check/test_switch_ssd_diag_test_check.py new file mode 100644 index 0000000..05eeac5 --- /dev/null +++ b/tests/checks/switch_ssd_diag_test_check/test_switch_ssd_diag_test_check.py @@ -0,0 +1,308 @@ +import os +import pytest +import logging +import importlib +from helpers.utils import read_data +import pexpect + +script = importlib.import_module("aci-preupgrade-validation-script") + +log = logging.getLogger(__name__) +dir = os.path.dirname(os.path.abspath(__file__)) + +test_function = "switch_ssd_diag_test_check" + +# ---- Test Command & Query Constants ---- +diag_cmd = "show diagnostic result module 1 test 24 detail" +fault_query = 'faultInst.json?query-target-filter=eq(faultInst.code,"F2421")' +hostname_cmd = "bash -c \"hostname\"" + +# ---- Test Data Outputs ---- +output_success_leaf101 = "show diagnostic result module 1 test 24 detail\n\nDiagnostic module 1, test 24:\n\nError code ---------> DIAG TEST SUCCESS\nTotal run count ---------> 2915\nTotal failure count ---------> 0\nLast test execution time ---------> 2023-11-15 10:30:45\n\nleaf101#" + +output_success_leaf102 = "show diagnostic result module 1 test 24 detail\n\nDiagnostic module 1, test 24:\n\nError code ---------> DIAG TEST SUCCESS\nTotal run count ---------> 3100\nTotal failure count ---------> 0\nLast test execution time ---------> 2023-11-15 10:31:12\n\nleaf102#" + +output_fail_leaf101 = "show diagnostic result module 1 test 24 detail\n\nDiagnostic module 1, test 24:\n\nError code ---------> DIAG TEST FAIL\nTotal run count ---------> 1500\nTotal failure count ---------> 1500\nLast test execution time ---------> 2023-11-15 09:15:22\n\nleaf101#" + +output_fail_leaf102 = "show diagnostic result module 1 test 24 detail\n\nDiagnostic module 1, test 24:\n\nError code ---------> DIAG TEST FAIL\nTotal run count ---------> 500\nTotal failure count ---------> 500\nLast test execution time ---------> 2023-11-15 10:25:33\n\nleaf102#" + +output_100pct_failure_leaf102 = "show diagnostic result module 1 test 24 detail\n\nDiagnostic module 1, test 24:\n\nError code ---------> DIAG TEST SUCCESS\nTotal run count ---------> 850\nTotal failure count ---------> 850\nLast test execution time ---------> 2023-11-15 08:45:33\n\nleaf102#" + +output_high_failure_leaf101 = "show diagnostic result module 1 test 24 detail\n\nDiagnostic module 1, test 24:\n\nError code ---------> DIAG TEST SUCCESS\nTotal run count ---------> 1000\nTotal failure count ---------> 150\nLast test execution time ---------> 2023-11-15 07:20:11\n\nleaf101#" + +output_invalid_format = "show diagnostic result module 1 test 24 detail\n\nDiagnostic module 1, test 24:\n\nInvalid output format\nNo error code or counts\n\nleaf101#" + +# ---- Mock Fault Data ---- +no_faults = [] +f2421_fault_leaf101 = [ + { + "faultInst": { + "attributes": { + "dn": "topology/pod-1/node-101/faultInst", + "code": "F2421", + "severity": "critical", + } + } + } +] + +# ---- Mock cmd_outputs (hostname) ---- +cmd_outputs_apic1 = { + hostname_cmd: { + "splitlines": True, + "output": "apic1", + } +} + +# ---- Mock icurl_outputs (faults) ---- +icurl_no_faults = {fault_query: no_faults} +icurl_with_f2421 = {fault_query: f2421_fault_leaf101} + +# ---- Mock conn_cmds (SSH commands) ---- +conn_leaf101_success = { + "leaf101": [ + { + "cmd": diag_cmd, + "output": output_success_leaf101, + "exception": None, + } + ] +} + +conn_leaf102_success = { + "leaf102": [ + { + "cmd": diag_cmd, + "output": output_success_leaf102, + "exception": None, + } + ] +} + +conn_leaf101_fail = { + "leaf101": [ + { + "cmd": diag_cmd, + "output": output_fail_leaf101, + "exception": None, + } + ] +} + +conn_leaf102_fail = { + "leaf102": [ + { + "cmd": diag_cmd, + "output": output_fail_leaf102, + "exception": None, + } + ] +} + +conn_leaf102_100pct_failure = { + "leaf102": [ + { + "cmd": diag_cmd, + "output": output_100pct_failure_leaf102, + "exception": None, + } + ] +} + +conn_leaf101_timeout = { + "leaf101": [ + { + "cmd": diag_cmd, + "output": "", + "exception": pexpect.TIMEOUT("SSH connection timeout"), + } + ] +} + +conn_leaf101_eof = { + "leaf101": [ + { + "cmd": diag_cmd, + "output": "", + "exception": pexpect.EOF("Connection closed"), + } + ] +} + +conn_leaf101_invalid_output = { + "leaf101": [ + { + "cmd": diag_cmd, + "output": output_invalid_format, + "exception": None, + } + ] +} + +conn_both_switches_success = { + "leaf101": [ + { + "cmd": diag_cmd, + "output": output_success_leaf101, + "exception": None, + } + ], + "leaf102": [ + { + "cmd": diag_cmd, + "output": output_success_leaf102, + "exception": None, + } + ], +} + +conn_mixed_results = { + "leaf101": [ + { + "cmd": diag_cmd, + "output": output_success_leaf101, + "exception": None, + } + ], + "leaf102": [ + { + "cmd": diag_cmd, + "output": output_fail_leaf102, + "exception": None, + } + ], +} + +# ---- Fabric Node Data ---- +fabric_with_switches = read_data(dir, "fabricNode_with_switches.json") +fabric_single_leaf101 = read_data(dir, "fabricNode_single_switch.json") +fabric_single_leaf102 = read_data(dir, "fabricNode_single_switch2.json") +fabric_no_switches = read_data(dir, "fabricNode_no_switches.json") + +# ---- Expected Results Data ---- +data_pass_all = [] + +data_fail_leaf101_explicit = [["leaf101", "DIAG TEST FAIL", 1500, "N/A"]] + +data_fail_leaf102_100pct = [["leaf102", "DIAG TEST SUCCESS", 850, "N/A"]] + +data_error_conn_failure = [["leaf101", "Error: Simulated exception at connect()", "N/A", "N/A"]] + +data_error_timeout = [["leaf101", "SSH Timeout", "N/A", "N/A"]] + +data_error_eof = [["leaf101", "SSH Connection Closed", "N/A", "N/A"]] + +data_error_invalid_output = [["leaf101", "SSD Diag Test results are not available", "N/A", "N/A"]] + +data_fail_leaf102_mixed = [["leaf102", "DIAG TEST FAIL", 500, "N/A"]] + + +@pytest.mark.parametrize( + "icurl_outputs, conn_failure, conn_cmds, cmd_outputs, fabric_nodes, expected_result, expected_data", + [ + # Test 1: PASS - All switches have SUCCESS status and no failures + ( + icurl_no_faults, + False, + conn_both_switches_success, + cmd_outputs_apic1, + fabric_with_switches, + script.PASS, + data_pass_all, + ), + + # Test 2: FAIL_O - Error code shows DIAG TEST FAIL + ( + icurl_no_faults, + False, + conn_leaf101_fail, + cmd_outputs_apic1, + fabric_single_leaf101, + script.FAIL_O, + data_fail_leaf101_explicit, + ), + + # Test 3: FAIL_O - 100% failure rate (run_count == failure_count) + ( + icurl_no_faults, + False, + conn_leaf102_100pct_failure, + cmd_outputs_apic1, + fabric_single_leaf102, + script.FAIL_O, + data_fail_leaf102_100pct, + ), + + # Test 4: ERROR - SSH connection failure + ( + icurl_no_faults, + True, + {}, + cmd_outputs_apic1, + fabric_single_leaf101, + script.ERROR, + data_error_conn_failure, + ), + + # Test 5: ERROR - SSH timeout + ( + icurl_no_faults, + False, + conn_leaf101_timeout, + cmd_outputs_apic1, + fabric_single_leaf101, + script.ERROR, + data_error_timeout, + ), + + # Test 6: ERROR - SSH EOF (connection closed) + ( + icurl_no_faults, + False, + conn_leaf101_eof, + cmd_outputs_apic1, + fabric_single_leaf101, + script.ERROR, + data_error_eof, + ), + + # Test 7: NA - No active switches in fabric + ( + icurl_no_faults, + False, + {}, + cmd_outputs_apic1, + fabric_no_switches, + script.NA, + [], + ), + + # Test 8: FAIL_O - Mixed results (some switches pass, some fail) + ( + icurl_no_faults, + False, + conn_mixed_results, + cmd_outputs_apic1, + fabric_with_switches, + script.FAIL_O, + data_fail_leaf102_mixed, + ), + + # Test 9: ERROR - Cannot parse diagnostic output (missing fields) + ( + icurl_no_faults, + False, + conn_leaf101_invalid_output, + cmd_outputs_apic1, + fabric_single_leaf101, + script.ERROR, + data_error_invalid_output, + ), + ], +) + +def test_logic(run_check, mock_icurl, mock_conn, mock_run_cmd, fabric_nodes, expected_result, expected_data): + + result = run_check(username="test_user", password="test_pass", fabric_nodes=fabric_nodes) + assert result.result == expected_result + assert result.data == expected_data \ No newline at end of file From 81aa26e867a79b1132b44d49d946d69c47bf008f Mon Sep 17 00:00:00 2001 From: ASRAF KHAN NAZAR Date: Mon, 5 Jan 2026 04:41:35 +0000 Subject: [PATCH 2/2] Added Bind IP related changes --- aci-preupgrade-validation-script.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/aci-preupgrade-validation-script.py b/aci-preupgrade-validation-script.py index c9c59e4..c2eb421 100644 --- a/aci-preupgrade-validation-script.py +++ b/aci-preupgrade-validation-script.py @@ -151,6 +151,7 @@ def __init__(self, hostname): self._term_len = 0 # terminal length for cisco devices self._login = False # set to true at first successful login self._log = None # private variable for tracking logfile state + self.bind_ip = None # optional source IP to bind for SSH def __connected(self): # determine if a connection is already open @@ -207,6 +208,8 @@ def connect(self): "spawning new pexpect connection: ssh %s@%s -p %d" % (self.username, self.hostname, self.port)) no_verify = " -o StrictHostKeyChecking=no -o LogLevel=ERROR -o UserKnownHostsFile=/dev/null" if self.verify: no_verify = "" + if self.bind_ip: + no_verify += " -b %s" % self.bind_ip self.child = pexpect.spawn("ssh %s %s@%s -p %d" % (no_verify, self.username, self.hostname, self.port), searchwindowsize=self.searchwindowsize) elif self.protocol.lower() == "telnet":