From 393affc0a086b119b853276fcb645500efbabf58 Mon Sep 17 00:00:00 2001 From: PriyansheeSharma Date: Fri, 11 Jul 2025 14:14:01 +0530 Subject: [PATCH 1/7] Create json_converter.py --- MFT-Dashboard/json_converter.py | 320 ++++++++++++++++++++++++++++++++ 1 file changed, 320 insertions(+) create mode 100644 MFT-Dashboard/json_converter.py diff --git a/MFT-Dashboard/json_converter.py b/MFT-Dashboard/json_converter.py new file mode 100644 index 0000000..79d127e --- /dev/null +++ b/MFT-Dashboard/json_converter.py @@ -0,0 +1,320 @@ +import json +import re +from datetime import datetime + +def parse_log_line(line): + """ + Parse a single log line into a dictionary based on event type. + + Args: + line (str): A single line from the log file. + + Returns: + dict or None: Parsed event as a dictionary, or None if the line is malformed or unsupported. + """ + + parts = line.strip().split(";") + if len(parts) < 4: + print(f"Skipping malformed line: {line.strip()}") + return None + + match = re.match(r"\s*\[([A-Z]+)\]", parts[2]) + if match: + event_type = match.group(1) + else: + event_type = parts[2].strip() + if event_type.startswith("[") and event_type.endswith("]"): + event_type = event_type[1:-1].strip() + + try: + + # Common fields for all events + common = { + "timestamp": datetime.strptime(parts[0], "%Y-%m-%dT%H:%M:%S").isoformat(), + "reference_id": parts[1].strip(), + "type": event_type + } + + if event_type == "MACT": + common.update({ + "monitor_name": parts[4], + "agent": parts[5], + "qmgr": parts[6], + "action": parts[7] + }) + + elif event_type == "MCRT": + common.update({ + "monitor_name": parts[3], + "agent": parts[4], + "qmgr": parts[5], + "action": parts[6] + }) + + elif event_type == "MFIR": + common.update({ + "monitor_name": parts[4], + "agent": parts[5], + "qmgr": parts[6], + "result_code": parts[3], + "action": parts[7] + }) + + elif event_type == "AUTH": + common.update({ + "id": parts[1].strip(), + "result_code": parts[3], + "action": parts[4], + "authority": parts[5], + "user_id": parts[6], + "mqmd_user_id": parts[7] + }) + + elif event_type == "SDEL" : + common.update({ + "id": parts[1].strip(), + "result_code": parts[3], + "agent": parts[4], + "action": parts[5], + "user_id": parts[6], + + }) + elif event_type == "SEXP": + common.update({ + "id": parts[1].strip(), + "result_code": parts[3], + "agent": parts[4], + "action": "expire", + "user_id": parts[6] + }) + elif event_type == "SSKP": + common.update({ + "id": parts[1].strip(), + "result_code": parts[3], + "agent": parts[4], + "action": parts[5], + "user_id": parts[6] + }) + + elif event_type == "SSIN": + common.update({ + "id": parts[1].strip(), + "result_code": parts[3], + "agent": parts[4], + "action": parts[5], + "user_id": parts[6], + "timezone": parts[8], + + "expire_count": parts[9], + + }) + + elif event_type == "SSTR": + common.update({ + "id": parts[1].strip(), + "source_agent": parts[3], + "source_qmgr": parts[4], + "destination_agent": parts[5], + "destination_qmgr": parts[6] + }) + + elif event_type == "SSTS": + + while len(parts) < 11: + parts.append("") + common.update({ + "id": parts[1].strip(), + "source_file": parts[3].strip(), + "source_queue": parts[4].strip(), + "destination_file": parts[6].strip(), + "destination_type": parts[7].strip(), + + }) + + elif event_type == "TSTR": + common.update({ + "transfer_id": parts[1].strip(), + "source_agent": parts[4], + "source_qmgr": parts[5], + "destination_agent": parts[7], + "destination_qmgr": parts[8], + "user_id": parts[9], + "job_name": parts[10] + }) + + elif event_type == "TCOM": + common.update({ + "transfer_id": parts[1].strip(), + "result_code": parts[3], + "source_agent": parts[4], + "source_qmgr": parts[5], + "destination_agent": parts[7], + "destination_qmgr": parts[8], + "user_id": parts[10] + + }) + + elif event_type == "TCAN" or event_type == "TDEL": + common.update({ + "transfer_id": parts[1].strip(), + "result_code": parts[3], + "source_agent": parts[4], + "source_qmgr": parts[5], + "destination_agent": parts[7], + "destination_qmgr": parts[8], + "user_id": parts[10] + }) + + elif event_type == "TPRO": + common.update({ + "transfer_id": parts[1].strip(), + "source_file": parts[4], + "source_type": parts[6], + "destination_file": parts[13], + "destination_type":parts[15], + "result_code": parts[3] + }) + + else: + return None + + return common + + except Exception as e: + print(f"Error parsing line: {line.strip()}") + print(f"Exception: {e}") + return None + + +def process_log_file(file_path): + """ + Read the log file and parse each line into a structured event dictionary. + + Args: + file_path (str): Path to the log file. + + Returns: + list: List of parsed event dictionaries. + """ + events = [] + + with open(file_path, "r") as f: + for line in f: + if line.strip(): + document = parse_log_line(line) + if document: + events.append(document) + + return events + +log_file_path = "sample.log" +parsed_data = process_log_file(log_file_path) + + + +def categorize_entities(parsed_data): + """ + Categorize entities found in the parsed data. + + Args: + parsed_data (list): List of parsed event dictionaries. + + Returns: + dict: Categorized lists and counts of monitors, agents, queue managers, and transfer events. + """ + + monitors = set() + agents = set() + source_qmgrs = set() + destination_qmgrs = set() + transfer_event_count = 0 + + for doc in parsed_data: + if doc["type"] in {"MACT", "MCRT", "MFIR"}: + if "monitor_name" in doc: + monitors.add(doc["monitor_name"]) + if "agent" in doc: + agents.add(doc["agent"]) + if "source_agent" in doc: + agents.add(doc["source_agent"]) + if "destination_agent" in doc: + agents.add(doc["destination_agent"]) + if "source_qmgr" in doc: + source_qmgrs.add(doc["source_qmgr"]) + if "destination_qmgr" in doc: + destination_qmgrs.add(doc["destination_qmgr"]) + if doc["type"] in {"TSTR", "TCOM", "TPRO", "TCAN", "TDEL"}: + transfer_event_count += 1 + + return { + "monitors": sorted(monitors), + "agents": sorted(agents), + "source_qmgrs": sorted(source_qmgrs), + "destination_qmgrs": sorted(destination_qmgrs), + "transfer_event_count": transfer_event_count + } + + +def build_scheduled_transfers(parsed_data): + """ + Build a list of scheduled transfers by grouping SSIN, SSTR, SSTS, and SEXP events + by their reference_id. Mark as expired if SEXP event is present. + + Args: + parsed_data (list): List of parsed event dictionaries. + + Returns: + list: List of scheduled transfer dictionaries. + """ + + scheduled = {} + for event in parsed_data: + ref_id = event.get("reference_id") or event.get("id") + if not ref_id: + continue + event_type = event.get("type") + if event_type in {"SSIN", "SSTR", "SSTS", "SEXP"}: + if ref_id not in scheduled: + scheduled[ref_id] = { + "reference_id": ref_id, + "ssin": None, + "sstr": None, + "ssts": None, + "sexp": None, + "is_expired": False + } + if event_type == "SSIN": + scheduled[ref_id]["ssin"] = event + elif event_type == "SSTR": + scheduled[ref_id]["sstr"] = event + elif event_type == "SSTS": + scheduled[ref_id]["ssts"] = event + elif event_type == "SEXP": + scheduled[ref_id]["sexp"] = event + scheduled[ref_id]["is_expired"] = True + + + result = [] + for sched in scheduled.values(): + if sched["ssin"] and sched["sstr"] and sched["ssts"]: + result.append(sched) + return result +# === Main Execution === + +log_file_path = "sample.log" +parsed_data = process_log_file(log_file_path) + + +with open("merged_transfers.json", "w") as f: + json.dump(parsed_data, f, indent=4) + +scheduled_transfers = build_scheduled_transfers(parsed_data) +with open("scheduled_transfers.json", "w") as f: + json.dump(scheduled_transfers, f, indent=4) +print("Scheduled transfers saved to 'scheduled_transfers.json'.") + +categorized_data = categorize_entities(parsed_data) +with open("categorized_summary.json", "w") as f: + json.dump(categorized_data, f, indent=4) + +print("Categorized entity summary saved to 'categorized_summary.json'.") From b4bc80ada4297906be0e4be2f7854504e131aac7 Mon Sep 17 00:00:00 2001 From: PriyansheeSharma Date: Fri, 11 Jul 2025 14:14:32 +0530 Subject: [PATCH 2/7] Create mft_dashboard.py --- MFT-Dashboard/mft_dashboard.py | 424 +++++++++++++++++++++++++++++++++ 1 file changed, 424 insertions(+) create mode 100644 MFT-Dashboard/mft_dashboard.py diff --git a/MFT-Dashboard/mft_dashboard.py b/MFT-Dashboard/mft_dashboard.py new file mode 100644 index 0000000..e44d921 --- /dev/null +++ b/MFT-Dashboard/mft_dashboard.py @@ -0,0 +1,424 @@ +import streamlit as st +import pandas as pd +import plotly.graph_objects as go +from pymongo import MongoClient +import json +import os +import subprocess + +# --- Streamlit page config --- +st.set_page_config(page_title="MFT Dashboard", layout="wide") + +# --- Step 1: Use sample.log from local directory --- +log_path = "sample.log" +if not os.path.exists(log_path): + st.error("sample.log not found in tshe current directory.") + st.stop() + +# --- Step 2: Run json_converter.py to generate merged_transfers.json and scheduled_transfers.json --- +merged_json_path = "merged_transfers.json" +scheduled_json_path = "scheduled_transfers.json" + +for f in [merged_json_path, scheduled_json_path]: + if os.path.exists(f): + os.remove(f) + +subprocess.run([ + "python3", "json_converter.py", + "--input", log_path, + "--merged_output", merged_json_path, + "--scheduled_output", scheduled_json_path +], check=True) + +# --- Step 3: Upload JSONs to MongoDB --- +MONGO_URI = "" +DB_NAME = "mft_dashboard" +MERGED_COLLECTION = "merged_transfers" +SCHEDULED_COLLECTION = "scheduled_transfers" + +client = MongoClient(MONGO_URI) +db = client[DB_NAME] + +with open(merged_json_path) as f: + merged_data = json.load(f) +with open(scheduled_json_path) as f: + scheduled_data = json.load(f) +if isinstance(merged_data, list) and merged_data: + db[MERGED_COLLECTION].insert_many(merged_data) +if isinstance(scheduled_data, list) and scheduled_data: + db[SCHEDULED_COLLECTION].insert_many(scheduled_data) + +# --- Step 4: Pull data from MongoDB --- +merged_df = pd.DataFrame(list(db[MERGED_COLLECTION].find({}, {"_id": 0}))) +scheduled_df = pd.DataFrame(list(db[SCHEDULED_COLLECTION].find({}, {"_id": 0}))) + +# --- Step 5: Dashboard Components --- + +# Download buttons +col_dl1, col_dl2 = st.columns(2) +with col_dl1: + st.download_button( + label="Download All Transfers (CSV)", + data=merged_df.to_csv(index=False), + file_name="merged_transfers.csv", + mime="text/csv" + ) +with col_dl2: + st.download_button( + label="Download Scheduled Transfers (CSV)", + data=scheduled_df.to_csv(index=False), + file_name="scheduled_transfers.csv", + mime="text/csv" + ) + +# Result code mapping +MFT_RETURN_CODES = { + "0": ("Success", "The command was successful"), + "1": ("Command unsuccessful", "The command ended unsuccessfully."), + "2": ("Command timed out", "The agent did not reply with the status of the command within a specified timeout."), + "3": ("Acknowledgement timed out", "The agent did not acknowledge receipt of the command within a specified timeout."), + "4": ("Wrong agent", "The command was sent to the wrong agent."), + "20": ("Transfer partially successful", "The transfer completed with partial success and some files were transferred."), + "21": ("Transfer stopped", "The transfer was stopped by one of the user exits."), + "22": ("Cancel transfer timed out", "The agent received a request to cancel a transfer but the cancellation could not be completed within 30 seconds."), + "26": ("Cancel ID not found", "The agent received a request to cancel a transfer but the transfer cannot be found."), + "27": ("Cancel in progress", "The agent received a request to cancel a transfer, but the transfer is already in the process of being canceled."), + "40": ("Failed", "The transfer failed and none of the files specified were transferred."), + "41": ("Cancelled", "The transfer was canceled."), + "42": ("Trigger failed", "The transfer did not take place because the transfer was conditional and the required condition was not met."), + "43": ("Malformed XML", "An XML message was malformed."), + "44": ("Source agent capacity exceeded", "The source agent did not have sufficient capacity to carry out the transfer."), + "45": ("Destination agent capacity exceeded", "The destination agent did not have sufficient capacity to carry out the transfer."), + "46": ("Source agent maximum number of files exceeded", "The number of files being transferred exceeded the limit of the source agent."), + "47": ("Destination agent maximum number of files exceeded", "The number of files transferred exceeded the limit of the destination agent."), + "48": ("Invalid log message attributes", "A log message is malformed. This error is an internal error."), + "49": ("Destination unreachable", "The source agent is unable send a message to the destination agent due to an IBM MQ problem."), + "50": ("Trial version violation", "An attempt was made by a trial version agent to communicate with an agent that is not a trial version agent."), + "51": ("Source transfer not permitted", "The maxSourceTransfers agent property has been set to 0."), + "52": ("Destination transfer not permitted", "The maxDestinationTransfers agent property has been set to 0."), + "53": ("Not authorized", "The user is not authorized to perform the operation."), + "54": ("Authority levels do not match", "The authorityChecking agent property value of the source agent and destination agent do not match."), + "55": ("Trigger not supported", "An attempt has been made to create a transfer with a trigger on a protocol bridge agent."), + "56": ("Destination file to message not supported", "The destination agent does not support writing the file to a destination queue"), + "57": ("File space not supported", "The destination agent does not support file spaces."), + "58": ("File space rejected", "The file space transfer was rejected by the destination agent."), + "59": ("Destination message to file not supported", "The destination agent does not support message-to-file transfers."), + "64": ("Both queues disallowed", "The source and destination of a transfer is a queue."), + "65": ("General data queue error", "An error occurred when the Managed File Transfer Agent data queue was accessed."), + "66": ("Data queue put authorization error", "An error occurred when the Managed File Transfer Agent data queue was accessed. Advanced Message Security is not enabled."), + "67": ("Data queue put AMS error", "An authorization error occurred when the Managed File Transfer Agent data queue was accessed. Advanced Message Security is enabled."), + "69": ("Transfer Recovery Timed out", "Recovery of a transfer timed out after the specified transferRecoveryTimeout value."), + "70": ("Agent has ended abnormally", "Application has had an unrecoverable problem and is forcibly terminating."), + "75": ("Queue manager is unavailable", "The application cannot continue because the queue manager for the application is unavailable."), + "78": ("Problem with the startup configuration", "The application cannot continue because there is a problem with the startup configuration data."), + "85": ("Problem with the database server", "The application cannot continue because there is a problem with the database (typically only returned by a logger)"), + "100": ("Monitor substitution not valid", "The format of a variable substitution within a monitor task XML script was malformed."), + "101": ("Monitor resource incorrect", "The number of monitor resource definitions was not valid."), + "102": ("Monitor trigger incorrect", "The number of monitor trigger definitions was not valid."), + "103": ("Monitor task incorrect", "The number of monitor task definitions was not valid."), + "104": ("Monitor missing", "The requested monitor is not present."), + "105": ("Monitor already present", "The requested monitor is already present."), + "106": ("Monitor user exit error", "A monitor user exit has generated an error during a resource monitor poll."), + "107": ("Monitor user exit canceled", "A monitor user exit has requested a transaction to be canceled."), + "108": ("Monitor task failed", "A monitor task has failed to complete due to error in processing the task."), + "109": ("Monitor resource failed", "A monitor resource definition cannot be applied to the given resource."), + "110": ("Monitor task variable substitution failed", "A variable has been specified in a monitor task but no matching name has been found in the metadata."), + "111": ("Monitor task source agent not valid", "The source agent of the monitor transfer task does not match the agent of the resource monitor."), + "112": ("Monitor task source queue manager not valid", "The source agent queue manager of the monitor transfer task does not match the agent queue manager of the resource monitor."), + "113": ("Monitor not supported", "An attempt has been made to create or delete a resource monitor on a protocol bridge agent."), + "114": ("Monitor resource denied", "The directory that is scanned by the monitor resource is denied access."), + "115": ("Monitor resource queue in use", "The monitor resource queue is already open, and is not compatible for input with shared access."), + "116": ("Monitor resource queue unknown", "The monitor resource queue does not exist on the associated queue manager of the monitor."), + "118": ("Monitor resource expression invalid", "An error occurred evaluating the XPath expression."), + "119": ("Monitor task source agent queue manager missing", "The source agent name or source agent queue manager name is missing from the monitor task definition."), + "120": ("Monitor queue not enabled", "The monitor resource queue is not enabled."), + "121": ("Unexpected error when accessing monitor queue", "An unexpected error occurred when accessing the monitor resource queue."), + "122": ("Monitor command queue not enabled for context id", "The monitor agent command queue is not enabled for set context identification."), +} + +def get_result_message(code): + """ + Given a result code, return a message describing the result. + + Args: + code (str): The result code to look up. + + Returns: + str: A string containing the description of the result code, + or "Unknown code" if the code is not found. + """ + + code_str = str(code).strip() + + if code_str in MFT_RETURN_CODES: + short, desc = MFT_RETURN_CODES[code_str] + return f"{short}: {desc}" + else: + return "Unknown code" + + +tpro_map = {} +tstr_map = {} +ssts_map = {} + +for _, row in merged_df.iterrows(): + ref_id = row.get("reference_id") + if row.get("type") == "TPRO": + tpro_map[ref_id] = row + elif row.get("type") == "TSTR": + tstr_map[ref_id] = row + elif row.get("type") == "SSTS": + ssts_map[ref_id] = row + +# Monitor Status Overview +st.subheader("Monitor Status Overview") +monitor_df = merged_df[merged_df['type'] == 'MACT'] if 'type' in merged_df.columns else pd.DataFrame() +monitors_started = monitor_df[monitor_df['action'] == 'start'] if not monitor_df.empty else pd.DataFrame() +monitors_stopped = monitor_df[monitor_df['action'] == 'stop'] if not monitor_df.empty else pd.DataFrame() + +if not monitor_df.empty and 'monitor_name' in monitor_df.columns: + stopped_names = set(monitors_stopped['monitor_name']) + monitors_started = monitors_started[~monitors_started['monitor_name'].isin(stopped_names)] + +active_count = len(monitors_started) +stopped_count = len(monitors_stopped) + +if 'show_active' not in st.session_state: + st.session_state['show_active'] = False +if 'show_stopped' not in st.session_state: + st.session_state['show_stopped'] = False + +col1, col2 = st.columns(2) +with col1: + if st.button(f"🟢 Active Monitors: {active_count}", key="active_monitor_button"): + st.session_state['show_active'] = not st.session_state['show_active'] + if st.session_state['show_active'] and not monitors_started.empty: + cols = [c for c in ['monitor_name', 'qmgr', 'agent_name'] if c in monitors_started.columns] + st.dataframe(monitors_started[cols].reset_index(drop=True)) + +with col2: + if st.button(f"🔴 Stopped Monitors: {stopped_count}", key="stopped_monitor_button"): + st.session_state['show_stopped'] = not st.session_state['show_stopped'] + if st.session_state['show_stopped'] and not monitors_stopped.empty: + cols = [c for c in ['monitor_name', 'qmgr', 'agent_name'] if c in monitors_stopped.columns] + st.dataframe(monitors_stopped[cols].reset_index(drop=True)) + +# Donut Chart +st.subheader("Transfer Statistics Donut Chart") +if not merged_df.empty and 'result_code' in merged_df.columns: + merged_df['result_code'] = merged_df['result_code'].astype(str).str.strip() + total_transfers = merged_df.shape[0] + successful_transfers = merged_df[merged_df['result_code'].isin(['0', '0.0'])].shape[0] + failed_transfers = merged_df[~merged_df['result_code'].isin(['0', '0.0']) & merged_df['result_code'].notna()].shape[0] +else: + total_transfers = successful_transfers = failed_transfers = 0 + +if "is_expired" not in scheduled_df.columns: + scheduled_df["is_expired"] = False +scheduled_count = len(scheduled_df[scheduled_df["is_expired"] == False]) +expired_count = len(scheduled_df[scheduled_df["is_expired"] == True]) + +labels = [ + "Successful Transfers", + "Failed Transfers", + "Scheduled Transfers", + "Expired Scheduled Transfers", + "Other Transfers" +] +values = [ + successful_transfers, + failed_transfers, + scheduled_count, + expired_count, + total_transfers - (successful_transfers + failed_transfers + scheduled_count + expired_count) +] +colors = ['#43d13a', '#e53935', '#FFA500', '#808080', '#BDBDBD'] + +values = [max(0, v) for v in values] + +fig = go.Figure(data=[go.Pie( + labels=labels, + values=values, + hole=0.5, + marker=dict(colors=colors), + textinfo='label+percent', +)]) + +fig.update_layout( + title_text="Transfer Statistics", + annotations=[dict(text='Transfers', x=0.5, y=0.5, font_size=20, showarrow=False)] +) + +st.plotly_chart(fig, use_container_width=True) + +# Successful vs Failed Transfers per Agent +st.subheader("Successful vs Failed Transfers per Agent") + +if not merged_df.empty and 'result_code' in merged_df.columns and 'source_agent' in merged_df.columns: + agent_success = merged_df[merged_df['result_code'].isin(['0', '0.0'])].groupby('source_agent').size() + agent_failed = merged_df[~merged_df['result_code'].isin(['0', '0.0']) & merged_df['result_code'].notna()].groupby('source_agent').size() + agent_transfer_table = pd.DataFrame({ + 'Successful': agent_success, + 'Failed': agent_failed + }).fillna(0).astype(int) + bar_colors = ['#43d13a', '#e53935'] + + fig_col = go.Figure() + fig_col.add_bar( + name='Successful', + x=agent_transfer_table.index, + y=agent_transfer_table['Successful'], + marker_color=bar_colors[0] + ) + fig_col.add_bar( + name='Failed', + x=agent_transfer_table.index, + y=agent_transfer_table['Failed'], + marker_color=bar_colors[1] + ) + fig_col.update_layout( + barmode='stack', + xaxis_title='Source Agent', + yaxis_title='Number of Transfers', + legend_title='Transfer Status', + title='Successful vs Failed Transfers per Agent' + ) + st.plotly_chart(fig_col, use_container_width=True) + +# Table 1: Last 5 Transfers +st.subheader("Last 5 Transfers Overview") + +if not merged_df.empty and "type" in merged_df.columns: + completed_df = merged_df[merged_df["type"] == "TCOM"].copy() + completed_df["status"] = completed_df["result_code"].apply( + lambda x: "Successful" if str(x).strip() in ["0", "0.0"] else "Failed" + ) + completed_df["result_message"] = completed_df["result_code"].apply(get_result_message) + + + def get_source_file(row): + """ + Retrieve the source file for a transfer, searching related transfer maps if necessary. + + Args: + row (pd.Series): A row from the completed transfers DataFrame. + + Returns: + str: The source file path. + """ + + ref_id = row.get("reference_id") + if ref_id in tpro_map and pd.notnull(tpro_map[ref_id].get("source_file")) and tpro_map[ref_id].get("source_file") != "": + return tpro_map[ref_id].get("source_file") + elif ref_id in tstr_map and pd.notnull(tstr_map[ref_id].get("source_file")) and tstr_map[ref_id].get("source_file") != "": + return tstr_map[ref_id].get("source_file") + elif ref_id in ssts_map and pd.notnull(ssts_map[ref_id].get("source_file")) and ssts_map[ref_id].get("source_file") != "": + return ssts_map[ref_id].get("source_file") + else: + return row.get("source_file", "") + + def get_destination_file(row): + """ + Retrieve the destination file for a transfer, searching related transfer maps if necessary. + + Args: + row (pd.Series): A row from the completed transfers DataFrame. + + Returns: + str: The destination file path + """ + + ref_id = row.get("reference_id") + if ref_id in tpro_map and pd.notnull(tpro_map[ref_id].get("destination_file")) and tpro_map[ref_id].get("destination_file") != "": + return tpro_map[ref_id].get("destination_file") + elif ref_id in tstr_map and pd.notnull(tstr_map[ref_id].get("destination_file")) and tstr_map[ref_id].get("destination_file") != "": + return tstr_map[ref_id].get("destination_file") + elif ref_id in ssts_map and pd.notnull(ssts_map[ref_id].get("destination_file")) and ssts_map[ref_id].get("destination_file") != "": + return ssts_map[ref_id].get("destination_file") + else: + return row.get("destination_file", "") + + completed_df["source_file"] = completed_df.apply(get_source_file, axis=1) + completed_df["destination_file"] = completed_df.apply(get_destination_file, axis=1) + + if "timestamp" in completed_df.columns: + completed_df["timestamp_dt"] = pd.to_datetime(completed_df["timestamp"], errors="coerce") + completed_df = completed_df.sort_values(by="timestamp_dt", ascending=False) + display_cols = [ + "timestamp", "reference_id", "source_agent", "source_qmgr", + "destination_agent", "destination_qmgr", "file_count", "source_file", "destination_file", + "result_code", "status", "result_message" + ] + display_cols = [col for col in display_cols if col in completed_df.columns] + + last5_all = completed_df.head(5)[display_cols].reset_index(drop=True) + last5_success = completed_df[completed_df["result_code"].astype(str).str.strip().isin(["0", "0.0"])].head(5)[display_cols].reset_index(drop=True) + last5_failed = completed_df[~completed_df["result_code"].astype(str).str.strip().isin(["0", "0.0"])].head(5)[display_cols].reset_index(drop=True) + + table_option = st.radio( + "Show table:", + ("Last 5 Transfers", "Last 5 Successful Transfers", "Last 5 Failed Transfers"), + horizontal=True, + key="last5_table_radio" + ) + + if table_option == "Last 5 Transfers": + st.dataframe(last5_all) + elif table_option == "Last 5 Successful Transfers": + st.dataframe(last5_success) + elif table_option == "Last 5 Failed Transfers": + st.dataframe(last5_failed) +else: + st.info("No completed transfer data found.") + +# Table 2 & 3: Scheduled Transfers +def extract_sched_info(row): + """ + Extract key information from a scheduled transfer row for display. + + Args: + row (dict): A dictionary representing a scheduled transfer. + + Returns: + dict: A dictionary with selected fields for display in the dashboard. + """ + ssin = row.get("ssin", {}) or {} + sstr = row.get("sstr", {}) or {} + ssts = row.get("ssts", {}) or {} + return { + "reference_id": row.get("reference_id"), + "scheduled_by": ssin.get("user_id"), + "agent": ssin.get("agent"), + "timezone": ssin.get("timezone"), + "source_agent": sstr.get("source_agent"), + "destination_agent": sstr.get("destination_agent"), + "source_file": ssts.get("source_file"), + "destination_file": ssts.get("destination_file"), + } + +if "is_expired" not in scheduled_df.columns: + scheduled_df["is_expired"] = False + +scheduled_active = scheduled_df[scheduled_df["is_expired"] == False] +scheduled_expired = scheduled_df[scheduled_df["is_expired"] == True] + +col1, col2 = st.columns(2) + +with col1: + st.subheader("Next 5 Scheduled Transfers") + if not scheduled_active.empty: + sched_table = pd.DataFrame([extract_sched_info(row) for row in scheduled_active.to_dict(orient="records")]) + sched_table = sched_table.sort_values(by="timezone").head(5) + st.dataframe(sched_table.reset_index(drop=True)) + else: + st.info("No upcoming scheduled transfers found.") + +with col2: + st.subheader("Last 5 Expired Scheduled Transfers") + if not scheduled_expired.empty: + expired_table = pd.DataFrame([extract_sched_info(row) for row in scheduled_expired.to_dict(orient="records")]) + expired_table = expired_table.sort_values(by="timezone", ascending=False).head(5) + st.dataframe(expired_table.reset_index(drop=True)) + else: + st.info("No expired scheduled transfers found.") + From 92f325a08c05ba9f95e7549f13ee971af0ed10cc Mon Sep 17 00:00:00 2001 From: PriyansheeSharma Date: Fri, 11 Jul 2025 14:15:10 +0530 Subject: [PATCH 3/7] Create README.md --- MFT-Dashboard/README.md | 166 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 166 insertions(+) create mode 100644 MFT-Dashboard/README.md diff --git a/MFT-Dashboard/README.md b/MFT-Dashboard/README.md new file mode 100644 index 0000000..9e02ff7 --- /dev/null +++ b/MFT-Dashboard/README.md @@ -0,0 +1,166 @@ +# MFT-Dashboard + +**MFT-Dashboard** is a Streamlit-powered web dashboard for visualizing and monitoring IBM MQ Managed File Transfer (MFT) logs. It parses, structures, and visualizes transfer events, scheduled transfers, monitors, agents, and more—all with interactive charts and tables. + +--- + +## Features + +- **Log Parsing & Categorization:** + `json_converter.py` reads IBM MFT log files, parses each line, structures events, and categorizes entities (monitors, agents, queue managers, transfer events). +- **Data Transformation:** + Produces merged and scheduled transfer JSON summaries for further analysis or visualization. +- **MongoDB Integration:** + `mft_dashboard.py` loads parsed data into MongoDB, enabling persistent storage and fast retrieval. +- **Interactive Dashboard:** + Visualizes transfer statistics, monitor status, recent and scheduled transfers +- **Downloadable Data:** + Export all or scheduled transfers as CSV directly from the dashboard. + +--- + +## How It Works + +### 1. Parse and Structure Log Data + +- `json_converter.py` takes a raw MFT log file (e.g., `sample.log`), parses each line, and creates structured events. +- It creates three JSON outputs: + - `merged_transfers.json`: All parsed events. + - `scheduled_transfers.json`: Only scheduled/expired transfer events. + - `categorized_summary.json`: Counts and lists of detected monitors, agents, queue managers, and transfers. + +### 2. Load Data & Visualize + +- `mft_dashboard.py` runs the converter, loads the JSONs, uploads them to MongoDB, and then renders the dashboard. +- The dashboard shows: + - **Monitor Status Overview:** Active vs. stopped monitors. + - **Transfer Statistics:** Donut chart of successful, failed, scheduled, and expired transfers. + - **Successful vs. Failed Transfers per Agent:** Stacked bar chart by agent. + - **Recent Transfers:** Table of the last 5 overall, successful, and failed transfers. + - **Scheduled Transfers:** Upcoming and expired scheduled transfers. + +--- + +## Prerequisites +To follow this guide, ensure you have the following: +### 1. IBM MQ MFT Installed + - MFT must be installed with the Logger component included. + - Refer to the official [MQ Documentation](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=transfer-configuring-mft-first-use) + +### 2. Coordination Queue Manager Setup + - A running MQ Coordination Queue Manager is required. + +### 3. IBM MFT CLI Tools Available + - Commands such as fteCreateLogger, fteStartLogger, fteStopLogger, fteShowLoggerDetails and fteModifyLogger must be accessible. + - Refer to the official [MQ Documentation](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=transfer-configuring-mft-logger) + + +### 4. Python Environment + - Python 3.8+ with required libraries: + ```bash + pip install streamlit pandas plotly pymongo + ``` +(avoid using Python 3.13) + +### 5.MongoDB Instance + - Local or cloud [MongoDB](https://www.mongodb.com/docs/atlas/) + + +--- + +## Step-by-Step Setup Instructions + +### Step 1: Set Up the IBM MFT Stand-Alone File Logger + +1.1 **Create the Logger:** +```bash +fteCreateLogger -p -loggerType FILE -fileLoggerMode -fileSize -fileCount +``` +[IBM Documentation](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=reference-ftecreatelogger-create-mft-file-database-logger) + +1.2 **Create Required Queues:** +```bash +runmqsc < MQ_DATA_PATH/mqft/logs/config//loggers//_create.mqsc +``` + +1.3 **Optional Configuration:** +- Edit `logger.properties` as needed for customization. + +1.4 **Start the Logger:** +```bash +fteStartLogger -p +``` +Logs will be stored under: +`MQ_DATA_PATH/mqft/logs//loggers//logs` + +**Stop the logger with:** +```bash +fteStopLogger -p +``` + +--- + +### Step 2: Process Log Files Using the Dashboard + +- Ensure the log file (`sample.log`) is in the same directory as the Python scripts. +- The Streamlit app will automatically run `json_converter.py` to convert `sample.log` into: + - `merged_transfers.json` + - `scheduled_transfers.json` + - `categorized_summary.json` + +- `merged_transfers.json` and `scheduled_transfers.json` will be loaded into the MongoDB collections. + +--- + +### Step 3: Launch the Dashboard + +Run the dashboard using: +```bash +streamlit run mft_dashboard.py +``` + +The application will: +- Validate `sample.log` +- Parse logs into JSON +- Upload results to MongoDB +- Display interactive visualizations + +--- + + + +## File Overview + +- **json_converter.py** + - `parse_log_line(line)`: Parses an MFT log line to a structured dict. + - `categorize_entities(parsed_data)`: Summarizes entity counts and lists. + - `build_scheduled_transfers(parsed_data)`: Groups scheduled transfers and marks expired ones. + - Outputs: `merged_transfers.json`, `categorized_summary.json`, `scheduled_transfers.json`. +- **mft_dashboard.py** + - Runs `json_converter.py` automatically on dashboard launch. + - Loads JSON outputs, uploads to MongoDB. + - Visualizes the data in an interactive Streamlit dashboard. + - Provides CSV download, result code explanations, and transfer/monitor statistics. + +--- + +## Customization + +- Edit `log_file_path` in both scripts if your log isn’t named `sample.log`. +- Update MongoDB connection string in `mft_dashboard.py` as needed. + +--- + +## Notes + +- Make sure MongoDB is accessible using the provided connection string. +- The dashboard expects log lines in the IBM MFT standard format. +--- +## Addition Resources + +- [IBM MQ MFT Overview](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=configuring-managed-file-transfer) +- [fteCreateLogger](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=reference-ftecreatelogger-create-mft-file-database-logger) +- [fteStartLogger](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=reference-ftestartlogger-start-mft-logger) +- [fteStopLogger](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=reference-ftestoplogger-stop-mft-logger) +- [fteModifyLogger](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=reference-ftemodifylogger-run-mft-logger-as-windows-service) +- [fteShowLoggerDetails](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=reference-fteshowloggerdetails-display-mft-logger-details) From ac944af36c5e5f9e806a1b58e71a917c8e37ca86 Mon Sep 17 00:00:00 2001 From: PriyansheeSharma Date: Fri, 11 Jul 2025 14:16:23 +0530 Subject: [PATCH 4/7] Update json_converter.py --- MFT-Dashboard/json_converter.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/MFT-Dashboard/json_converter.py b/MFT-Dashboard/json_converter.py index 79d127e..15a006b 100644 --- a/MFT-Dashboard/json_converter.py +++ b/MFT-Dashboard/json_converter.py @@ -1,3 +1,4 @@ + import json import re from datetime import datetime @@ -307,6 +308,7 @@ def build_scheduled_transfers(parsed_data): with open("merged_transfers.json", "w") as f: json.dump(parsed_data, f, indent=4) +print("Merged Transfers saved to 'merged_transfers.json'.") scheduled_transfers = build_scheduled_transfers(parsed_data) with open("scheduled_transfers.json", "w") as f: From 63aff70c552e767a50d47b8a54c8bfd611fb0a52 Mon Sep 17 00:00:00 2001 From: PriyansheeSharma Date: Sat, 12 Jul 2025 10:20:36 +0530 Subject: [PATCH 5/7] Update mft_dashboard.py --- MFT-Dashboard/mft_dashboard.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/MFT-Dashboard/mft_dashboard.py b/MFT-Dashboard/mft_dashboard.py index e44d921..3b6d1d1 100644 --- a/MFT-Dashboard/mft_dashboard.py +++ b/MFT-Dashboard/mft_dashboard.py @@ -1,3 +1,17 @@ +# (c) Copyright IBM Corporation 2025 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import streamlit as st import pandas as pd import plotly.graph_objects as go From 89fe04122080f057d7f7e145d16ed9518b78ed89 Mon Sep 17 00:00:00 2001 From: PriyansheeSharma Date: Sat, 12 Jul 2025 10:28:21 +0530 Subject: [PATCH 6/7] Update README.md --- MFT-Dashboard/README.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/MFT-Dashboard/README.md b/MFT-Dashboard/README.md index 9e02ff7..85edb4e 100644 --- a/MFT-Dashboard/README.md +++ b/MFT-Dashboard/README.md @@ -45,14 +45,16 @@ To follow this guide, ensure you have the following: ### 1. IBM MQ MFT Installed - MFT must be installed with the Logger component included. - - Refer to the official [MQ Documentation](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=transfer-configuring-mft-first-use) + - Refer to the official [IBM MQ Documentation](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=transfer-configuring-mft-first-use) ### 2. Coordination Queue Manager Setup - A running MQ Coordination Queue Manager is required. + - Refer to the official [IBM MQ Documentation](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=mcr-ftesetupcoordination-set-up-properties-files-directories-coordination-queue-manager) + ### 3. IBM MFT CLI Tools Available - Commands such as fteCreateLogger, fteStartLogger, fteStopLogger, fteShowLoggerDetails and fteModifyLogger must be accessible. - - Refer to the official [MQ Documentation](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=transfer-configuring-mft-logger) + - Refer to the official [IBM MQ Documentation](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=transfer-configuring-mft-logger) ### 4. Python Environment @@ -76,7 +78,7 @@ To follow this guide, ensure you have the following: ```bash fteCreateLogger -p -loggerType FILE -fileLoggerMode -fileSize -fileCount ``` -[IBM Documentation](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=reference-ftecreatelogger-create-mft-file-database-logger) +[IBM MQ Documentation](https://www.ibm.com/docs/en/ibm-mq/9.4.x?topic=reference-ftecreatelogger-create-mft-file-database-logger) 1.2 **Create Required Queues:** ```bash From 3f5265cd48a449fcbd54456b9918394e08a081c0 Mon Sep 17 00:00:00 2001 From: PriyansheeSharma Date: Sat, 12 Jul 2025 10:29:36 +0530 Subject: [PATCH 7/7] Update json_converter.py --- MFT-Dashboard/json_converter.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/MFT-Dashboard/json_converter.py b/MFT-Dashboard/json_converter.py index 15a006b..7d268f8 100644 --- a/MFT-Dashboard/json_converter.py +++ b/MFT-Dashboard/json_converter.py @@ -1,3 +1,16 @@ +# (c) Copyright IBM Corporation 2025 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. import json import re