diff --git a/aci-preupgrade-validation-script.py b/aci-preupgrade-validation-script.py index ebe0477..94c1541 100644 --- a/aci-preupgrade-validation-script.py +++ b/aci-preupgrade-validation-script.py @@ -6007,6 +6007,42 @@ def apic_vmm_inventory_sync_faults_check(**kwargs): recommended_action=recommended_action, doc_url=doc_url) + +@check_wrapper(check_title='NTP sync issue in Leaf as NTP server') +def leaf_ntp_sync_check(cversion, tversion, **kwargs): + result = PASS + headers = ['policy dn', 'pod group name', 'policy name'] + data = [] + recommended_action = 'NTP wont sync between leaf as NTP server and host. makesure to use in-band ip for NTP server in leaf or checkout the bug CSCwp92030 for fixed version details' + doc_url = 'https://datacenter.github.io/ACI-Pre-Upgrade-Validation-Script/validations/#ntp-sync-issue-in-leaf-as-ntp-server' + + fabricRsTimePol_api = 'fabricRsTimePol.json' + + if not tversion: + return Result(result=MANUAL, msg=TVER_MISSING) + + if tversion.newer_than('6.1(4.28)'): + return Result(result=NA, msg=VER_NOT_AFFECTED) + + if (cversion.newer_than('1.1(1a)') and cversion.older_than('6.1(4.20)')) or (tversion.newer_than('1.1(1a)') and tversion.older_than('6.1(4.20)')): + fabricRsTimePol = icurl('class', fabricRsTimePol_api) + for rstimepol in fabricRsTimePol: + rstimepol_attr = rstimepol['fabricRsTimePol']['attributes'] + pol_dn = rstimepol_attr['tDn'] + pol_name = rstimepol_attr['tnDatetimePolName'] + match = re.search(r'podpgrp-([^/]+)', rstimepol_attr['dn']) + pod_group = match.group(1) if match else None + pol_res = icurl('mo', pol_dn + '.json') + pol_attr = pol_res[0]['datetimePol']['attributes'] + if pol_attr['serverState'] == 'enabled' and pol_attr['masterMode'] == 'enabled': + data.append([pol_attr['dn'], pod_group, pol_name]) + + if data: + result = FAIL_O + + return Result(result=result, headers=headers, data=data, recommended_action=recommended_action, doc_url=doc_url) + + # ---- Script Execution ---- @@ -6168,6 +6204,7 @@ class CheckManager: standby_sup_sync_check, isis_database_byte_check, configpush_shard_check, + leaf_ntp_sync_check, ] ssh_checks = [ diff --git a/docs/docs/validations.md b/docs/docs/validations.md index fa1fc0e..4790930 100644 --- a/docs/docs/validations.md +++ b/docs/docs/validations.md @@ -191,6 +191,7 @@ Items | Defect | This Script [Stale pconsRA Object][d26] | CSCwp22212 | :warning:{title="Deprecated"} | :no_entry_sign: [ISIS DTEPs Byte Size][d27] | CSCwp15375 | :white_check_mark: | :no_entry_sign: [Policydist configpushShardCont Crash][d28] | CSCwp95515 | :white_check_mark: | +[NTP sync issue in Leaf as NTP server][d29] | CSCwq28721 | :white_check_mark: | [d1]: #ep-announce-compatibility [d2]: #eventmgr-db-size-defect-susceptibility @@ -220,7 +221,7 @@ Items | Defect | This Script [d26]: #stale-pconsra-object [d27]: #isis-dteps-byte-size [d28]: #policydist-configpushshardcont-crash - +[d29]: #NTP-sync-issue-in-Leaf-as-NTP-server ## General Check Details @@ -2614,6 +2615,18 @@ Due to [CSCwp95515][59], upgrading to an affected version while having any `conf If any instances of `configpushShardCont` are flagged by this script, Cisco TAC must be contacted to identify and resolve the underlying issue before performing the upgrade. +### NTP sync issue in Leaf as NTP server + +RCA: +After the ACI fabric upgraded to affected version, In setup which has leaf switch as NTP server, Destination Ip of NTP request coming from Host(NTP client) is not stored and resused as Source when reply back from leaf side. +Details information => Sendpkt in NTP(3rd party) code supports only the immediate source interface ip.Mechanism to store the starting source ip address must be present so that packets can be send to the starting source ip addr. + +IMPACT: +After the upgrade, NTP stopped working correctly between the endpoints and the master node (leaf switches). NTP request is being sent with the BD SVI IP as expected, but the leaf switch is responding with a different BD IP in the same VRF, leading to NTP response rejected from the endpoints. + +Suggestion: +Use IP address from a VRF which only has one IP address on the switch, example would be inband VRF(in-band ip) which would usually have only one IP address or move to fixed version refer [CSCwp92030][62]. + [0]: https://github.com/datacenter/ACI-Pre-Upgrade-Validation-Script [1]: https://www.cisco.com/c/dam/en/us/td/docs/Website/datacenter/apicmatrix/index.html [2]: https://www.cisco.com/c/en/us/support/switches/nexus-9000-series-switches/products-release-notes-list.html @@ -2676,3 +2689,4 @@ If any instances of `configpushShardCont` are flagged by this script, Cisco TAC [59]: https://bst.cloudapps.cisco.com/bugsearch/bug/CSCwp95515 [60]: https://www.cisco.com/c/en/us/solutions/collateral/data-center-virtualization/application-centric-infrastructure/white-paper-c11-743951.html#Inter [61]: https://www.cisco.com/c/en/us/solutions/collateral/data-center-virtualization/application-centric-infrastructure/white-paper-c11-743951.html#EnablePolicyCompression +[62]: https://bst.cloudapps.cisco.com/bugsearch/bug/CSCwp92030 \ No newline at end of file diff --git a/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_issue.json b/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_issue.json new file mode 100644 index 0000000..e2b8a82 --- /dev/null +++ b/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_issue.json @@ -0,0 +1,29 @@ +[ + { + "datetimePol": { + "attributes": { + "StratumValue": "8", + "adminSt": "enabled", + "annotation": "", + "authSt": "disabled", + "childAction": "", + "configIssues": "", + "descr": "", + "dn": "uni/fabric/time-default", + "extMngdBy": "", + "lcOwn": "local", + "masterMode": "enabled", + "modTs": "2024-12-20T07:45:21.917+00:00", + "monPolDn": "", + "name": "default", + "nameAlias": "", + "ownerKey": "", + "ownerTag": "", + "serverState": "enabled", + "status": "", + "uid": "0", + "userdom": "all" + } + } + } + ] \ No newline at end of file diff --git a/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_issue_2.json b/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_issue_2.json new file mode 100644 index 0000000..fe1ac1a --- /dev/null +++ b/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_issue_2.json @@ -0,0 +1,29 @@ +[ + { + "datetimePol": { + "attributes": { + "StratumValue": "8", + "adminSt": "enabled", + "annotation": "", + "authSt": "disabled", + "childAction": "", + "configIssues": "", + "descr": "", + "dn": "uni/fabric/time-NEW1", + "extMngdBy": "", + "lcOwn": "local", + "masterMode": "enabled", + "modTs": "2024-12-20T07:45:21.917+00:00", + "monPolDn": "", + "name": "NEW1", + "nameAlias": "", + "ownerKey": "", + "ownerTag": "", + "serverState": "enabled", + "status": "", + "uid": "15374", + "userdom": ":all:" + } + } + } + ] \ No newline at end of file diff --git a/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_no_issue.json b/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_no_issue.json new file mode 100644 index 0000000..c808568 --- /dev/null +++ b/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_no_issue.json @@ -0,0 +1,29 @@ +[ + { + "datetimePol": { + "attributes": { + "StratumValue": "8", + "adminSt": "enabled", + "annotation": "", + "authSt": "disabled", + "childAction": "", + "configIssues": "", + "descr": "", + "dn": "uni/fabric/time-default", + "extMngdBy": "", + "lcOwn": "local", + "masterMode": "disabled", + "modTs": "2024-12-20T07:45:21.917+00:00", + "monPolDn": "", + "name": "default", + "nameAlias": "", + "ownerKey": "", + "ownerTag": "", + "serverState": "disabled", + "status": "", + "uid": "0", + "userdom": "all" + } + } + } + ] \ No newline at end of file diff --git a/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_no_issue_2.json b/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_no_issue_2.json new file mode 100644 index 0000000..383582c --- /dev/null +++ b/tests/checks/leaf_ntp_sync_check/datetimePol_ntp_sync_no_issue_2.json @@ -0,0 +1,29 @@ +[ + { + "datetimePol": { + "attributes": { + "StratumValue": "8", + "adminSt": "enabled", + "annotation": "", + "authSt": "disabled", + "childAction": "", + "configIssues": "", + "descr": "", + "dn": "uni/fabric/time-NEW1", + "extMngdBy": "", + "lcOwn": "local", + "masterMode": "disabled", + "modTs": "2024-12-20T07:45:21.917+00:00", + "monPolDn": "", + "name": "NEW1", + "nameAlias": "", + "ownerKey": "", + "ownerTag": "", + "serverState": "disabled", + "status": "", + "uid": "15374", + "userdom": ":all:" + } + } + } + ] \ No newline at end of file diff --git a/tests/checks/leaf_ntp_sync_check/fabricRsTimePol_no_podgroup.json b/tests/checks/leaf_ntp_sync_check/fabricRsTimePol_no_podgroup.json new file mode 100644 index 0000000..0637a08 --- /dev/null +++ b/tests/checks/leaf_ntp_sync_check/fabricRsTimePol_no_podgroup.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/tests/checks/leaf_ntp_sync_check/fabricRsTimePol_ntp_sync_1pod.json b/tests/checks/leaf_ntp_sync_check/fabricRsTimePol_ntp_sync_1pod.json new file mode 100644 index 0000000..302532f --- /dev/null +++ b/tests/checks/leaf_ntp_sync_check/fabricRsTimePol_ntp_sync_1pod.json @@ -0,0 +1,28 @@ +[ + { + "fabricRsTimePol": { + "attributes": { + "annotation": "", + "childAction": "", + "dn": "uni/fabric/funcprof/podpgrp-default/rsTimePol", + "extMngdBy": "", + "forceResolve": "yes", + "lcOwn": "local", + "modTs": "2025-12-23T07:43:09.740+00:00", + "monPolDn": "", + "rType": "mo", + "state": "formed", + "stateQual": "none", + "status": "", + "tCl": "datetimePol", + "tContextDn": "", + "tDn": "uni/fabric/time-default", + "tRn": "time-default", + "tType": "name", + "tnDatetimePolName": "default", + "uid": "0", + "userdom": "all" + } + } + } + ] \ No newline at end of file diff --git a/tests/checks/leaf_ntp_sync_check/fabricRsTimePol_ntp_sync_2pod.json b/tests/checks/leaf_ntp_sync_check/fabricRsTimePol_ntp_sync_2pod.json new file mode 100644 index 0000000..0a8ab87 --- /dev/null +++ b/tests/checks/leaf_ntp_sync_check/fabricRsTimePol_ntp_sync_2pod.json @@ -0,0 +1,54 @@ +[ + { + "fabricRsTimePol": { + "attributes": { + "annotation": "", + "childAction": "", + "dn": "uni/fabric/funcprof/podpgrp-default/rsTimePol", + "extMngdBy": "", + "forceResolve": "yes", + "lcOwn": "local", + "modTs": "2025-12-23T07:43:09.740+00:00", + "monPolDn": "", + "rType": "mo", + "state": "formed", + "stateQual": "none", + "status": "", + "tCl": "datetimePol", + "tContextDn": "", + "tDn": "uni/fabric/time-default", + "tRn": "time-default", + "tType": "name", + "tnDatetimePolName": "default", + "uid": "0", + "userdom": "all" + } + } + }, + { + "fabricRsTimePol": { + "attributes": { + "annotation": "", + "childAction": "", + "dn": "uni/fabric/funcprof/podpgrp-TESt2/rsTimePol", + "extMngdBy": "", + "forceResolve": "yes", + "lcOwn": "local", + "modTs": "2025-12-23T07:41:09.236+00:00", + "monPolDn": "", + "rType": "mo", + "state": "formed", + "stateQual": "none", + "status": "", + "tCl": "datetimePol", + "tContextDn": "", + "tDn": "uni/fabric/time-NEW1", + "tRn": "time-NEW1", + "tType": "name", + "tnDatetimePolName": "NEW1", + "uid": "0", + "userdom": ":all:" + } + } + } + ] \ No newline at end of file diff --git a/tests/checks/leaf_ntp_sync_check/test_leaf_ntp_sync_check.py b/tests/checks/leaf_ntp_sync_check/test_leaf_ntp_sync_check.py new file mode 100644 index 0000000..afaeb00 --- /dev/null +++ b/tests/checks/leaf_ntp_sync_check/test_leaf_ntp_sync_check.py @@ -0,0 +1,94 @@ +import os +import pytest +import logging +import importlib +from helpers.utils import read_data + +script = importlib.import_module("aci-preupgrade-validation-script") + +log = logging.getLogger(__name__) +dir = os.path.dirname(os.path.abspath(__file__)) + +test_function = "leaf_ntp_sync_check" + +fabricRsTimePol_api = "fabricRsTimePol.json" +datetimePol_mo1 = "uni/fabric/time-default.json" +datetimePol_mo2 = "uni/fabric/time-NEW1.json" + +@pytest.mark.parametrize( + "icurl_outputs, cversion, tversion, expected_result", + [ + # no pod group scenario + ( { fabricRsTimePol_api: read_data(dir, "fabricRsTimePol_no_podgroup.json"), + datetimePol_mo1: read_data(dir, "datetimePol_ntp_sync_issue.json")}, + "6.1(4.10)", + "6.1(4.15)", + script.PASS, + ), + # single pod scenario + # Version not affected + ( { fabricRsTimePol_api: read_data(dir, "fabricRsTimePol_ntp_sync_1pod.json"), + datetimePol_mo1: read_data(dir, "datetimePol_ntp_sync_issue.json")}, + "6.1(4.28)", + "6.1(4.30)", + script.NA, + ), + # Affected version, no NTP sync issue + ( { fabricRsTimePol_api: read_data(dir, "fabricRsTimePol_ntp_sync_1pod.json"), + datetimePol_mo1: read_data(dir, "datetimePol_ntp_sync_no_issue.json")}, + "6.1(4.10)", + "6.1(4.15)", + script.PASS, + ), + # Affected version, NTP sync issue + ( { fabricRsTimePol_api: read_data(dir, "fabricRsTimePol_ntp_sync_1pod.json"), + datetimePol_mo1: read_data(dir, "datetimePol_ntp_sync_issue.json")}, + "6.1(4.10)", + "6.1(4.15)", + script.FAIL_O, + ), + # multi pod scenario + # Version not affected + ( + { fabricRsTimePol_api: read_data(dir, "fabricRsTimePol_ntp_sync_2pod.json"), + datetimePol_mo1: read_data(dir, "datetimePol_ntp_sync_issue.json"), + datetimePol_mo2: read_data(dir, "datetimePol_ntp_sync_issue_2.json")}, + "6.1(4.28)", + "6.1(4.30)", + script.NA, + ), + # Affected version, no NTP sync issue + ( + { fabricRsTimePol_api: read_data(dir, "fabricRsTimePol_ntp_sync_2pod.json"), + datetimePol_mo1: read_data(dir, "datetimePol_ntp_sync_no_issue.json"), + datetimePol_mo2: read_data(dir, "datetimePol_ntp_sync_no_issue_2.json")}, + "6.1(4.10)", + "6.1(4.15)", + script.PASS, + ), + # Affected version, one NTP sync issue + ( + { fabricRsTimePol_api: read_data(dir, "fabricRsTimePol_ntp_sync_2pod.json"), + datetimePol_mo1: read_data(dir, "datetimePol_ntp_sync_issue.json"), + datetimePol_mo2: read_data(dir, "datetimePol_ntp_sync_no_issue_2.json")}, + "6.1(4.10)", + "6.1(4.15)", + script.FAIL_O, + ), + # Affected version, multiple NTP sync issues + ( + { fabricRsTimePol_api: read_data(dir, "fabricRsTimePol_ntp_sync_2pod.json"), + datetimePol_mo1: read_data(dir, "datetimePol_ntp_sync_issue.json"), + datetimePol_mo2: read_data(dir, "datetimePol_ntp_sync_issue_2.json")}, + "6.1(4.10)", + "6.1(4.15)", + script.FAIL_O, + ), + ], +) +def test_leaf_ntp_sync_check(run_check, mock_icurl, cversion, tversion, expected_result): + result = run_check( + cversion=script.AciVersion(cversion) if cversion else None, + tversion=script.AciVersion(tversion) if tversion else None, + ) + assert result.result == expected_result \ No newline at end of file