diff --git a/.vscode/launch.json b/.vscode/launch.json index a514352..a2be49f 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -316,6 +316,14 @@ "module": "sysdiagnose.__main__", "args": "-c public parse swcutil", "cwd": "${workspaceFolder}/" + }, + { + "name": "Python Debugger: parse ioservice", + "type": "debugpy", + "request": "launch", + "module": "sysdiagnose.__main__", + "args": "-c public -l DEBUG parse ioservice", + "cwd": "${workspaceFolder}/" } ] } \ No newline at end of file diff --git a/src/sysdiagnose/parsers/ioacpiplane.py b/src/sysdiagnose/parsers/ioacpiplane.py new file mode 100644 index 0000000..18f6d75 --- /dev/null +++ b/src/sysdiagnose/parsers/ioacpiplane.py @@ -0,0 +1,31 @@ +#! /usr/bin/env python3 + +import os +from sysdiagnose.utils.base import BaseParserInterface, SysdiagnoseConfig, logger +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser + + +class IOACPIPlaneParser(BaseParserInterface): + description = "IOACPIPlane.txt file parser" + format = "json" + + def __init__(self, config: SysdiagnoseConfig, case_id: str): + super().__init__(__file__, config, case_id) + + def get_log_files(self) -> list: + log_file = "ioreg/IOACPIPlane.txt" + return [os.path.join(self.case_data_subfolder, log_file)] + + def execute(self) -> list | dict: + log_file = self.get_log_files()[0] + data_tree = {} + + try: + logger.info(f"Processing file {log_file}, new entry added", extra={'log_file': log_file}) + p = IORegStructParser() + data_tree = p.parse(log_file) + + except Exception: + logger.exception("IOACPIPlane parsing crashed") + + return data_tree diff --git a/src/sysdiagnose/parsers/iodevicetree.py b/src/sysdiagnose/parsers/iodevicetree.py new file mode 100644 index 0000000..b803adb --- /dev/null +++ b/src/sysdiagnose/parsers/iodevicetree.py @@ -0,0 +1,32 @@ +#! /usr/bin/env python3 + +import os +from sysdiagnose.utils.base import BaseParserInterface, SysdiagnoseConfig, logger +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser + + +class IODeviceTreeParser(BaseParserInterface): + description = "IODeviceTree.txt file parser" + format = "json" + + def __init__(self, config: SysdiagnoseConfig, case_id: str): + super().__init__(__file__, config, case_id) + + def get_log_files(self) -> list: + log_file = "ioreg/IODeviceTree.txt" + return [os.path.join(self.case_data_subfolder, log_file)] + + def execute(self) -> list | dict: + log_files = self.get_log_files() + data_tree = {} + + for log_file in log_files: + try: + logger.info(f"Processing file {log_file}, new entry added", extra={'log_file': log_file}) + p = IORegStructParser() + data_tree = p.parse(log_file) + + except Exception: + logger.exception("IODeviceTree parsing crashed") + + return data_tree diff --git a/src/sysdiagnose/parsers/iofirewire.py b/src/sysdiagnose/parsers/iofirewire.py new file mode 100644 index 0000000..639f2e2 --- /dev/null +++ b/src/sysdiagnose/parsers/iofirewire.py @@ -0,0 +1,32 @@ +#! /usr/bin/env python3 + +import os +from sysdiagnose.utils.base import BaseParserInterface, SysdiagnoseConfig, logger +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser + + +class IOFireWireParser(BaseParserInterface): + description = "IOFireWire.txt file parser" + format = "json" + + def __init__(self, config: SysdiagnoseConfig, case_id: str): + super().__init__(__file__, config, case_id) + + def get_log_files(self) -> list: + log_file = "ioreg/IOFireWire.txt" + return [os.path.join(self.case_data_subfolder, log_file)] + + def execute(self) -> list | dict: + log_files = self.get_log_files() + data_tree = {} + + for log_file in log_files: + try: + logger.info(f"Processing file {log_file}, new entry added", extra={'log_file': log_file}) + p = IORegStructParser() + data_tree = p.parse(log_file) + + except Exception: + logger.exception("IOFireWire parsing crashed") + + return data_tree diff --git a/src/sysdiagnose/parsers/iopower.py b/src/sysdiagnose/parsers/iopower.py new file mode 100644 index 0000000..08a9087 --- /dev/null +++ b/src/sysdiagnose/parsers/iopower.py @@ -0,0 +1,32 @@ +#! /usr/bin/env python3 + +import os +from sysdiagnose.utils.base import BaseParserInterface, SysdiagnoseConfig, logger +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser + + +class IOPowerParser(BaseParserInterface): + description = "IOPower.txt file parser" + format = "json" + + def __init__(self, config: SysdiagnoseConfig, case_id: str): + super().__init__(__file__, config, case_id) + + def get_log_files(self) -> list: + log_file = "ioreg/IOPower.txt" + return [os.path.join(self.case_data_subfolder, log_file)] + + def execute(self) -> list | dict: + log_files = self.get_log_files() + data_tree = {} + + for log_file in log_files: + try: + logger.info(f"Processing file {log_file}, new entry added", extra={'log_file': log_file}) + p = IORegStructParser() + data_tree = p.parse(log_file) + + except Exception: + logger.exception("IOPower parsing crashed") + + return data_tree diff --git a/src/sysdiagnose/parsers/ioservice.py b/src/sysdiagnose/parsers/ioservice.py new file mode 100644 index 0000000..26a84ef --- /dev/null +++ b/src/sysdiagnose/parsers/ioservice.py @@ -0,0 +1,44 @@ +#! /usr/bin/env python3 + +import os +from sysdiagnose.utils.base import BaseParserInterface, SysdiagnoseConfig, logger +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser + + +class IOServiceParser(BaseParserInterface): + description = "IOService.txt file parser" + format = "json" + + def __init__(self, config: SysdiagnoseConfig, case_id: str): + super().__init__(__file__, config, case_id) + + def get_log_files(self) -> list: + log_file = "ioreg/IOService.txt" + return [os.path.join(self.case_data_subfolder, log_file)] + + def execute(self) -> list | dict: + """ IOService file notes + + # Regex for +-o starting at start of file -> 1213 results + (\s|\|)*\+-o + + # Regex for ALL +-o - 1213 results + \+-o + + So we know that the data doesn't contain the node identifier ('+-o') + + """ # noqa: W605 + + log_files = self.get_log_files() + data_tree = {} + + for log_file in log_files: + try: + logger.info(f"Processing file {log_file}, new entry added", extra={'log_file': log_file}) + p = IORegStructParser() + data_tree = p.parse(log_file) + + except Exception: + logger.exception("IOService parsing crashed") + + return data_tree diff --git a/src/sysdiagnose/parsers/iousb.py b/src/sysdiagnose/parsers/iousb.py new file mode 100644 index 0000000..1b1739a --- /dev/null +++ b/src/sysdiagnose/parsers/iousb.py @@ -0,0 +1,32 @@ +#! /usr/bin/env python3 + +import os +from sysdiagnose.utils.base import BaseParserInterface, SysdiagnoseConfig, logger +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser + + +class IOUSBParser(BaseParserInterface): + description = "IOUSB.txt file parser" + format = "json" + + def __init__(self, config: SysdiagnoseConfig, case_id: str): + super().__init__(__file__, config, case_id) + + def get_log_files(self) -> list: + log_file = "ioreg/IOUSB.txt" + return [os.path.join(self.case_data_subfolder, log_file)] + + def execute(self) -> list | dict: + log_files = self.get_log_files() + data_tree = {} + + for log_file in log_files: + try: + logger.info(f"Processing file {log_file}, new entry added", extra={'log_file': log_file}) + p = IORegStructParser() + data_tree = p.parse(log_file) + + except Exception: + logger.exception("IOUSB parsing crashed") + + return data_tree diff --git a/src/sysdiagnose/utils/ioreg_parsers/structure_parser.py b/src/sysdiagnose/utils/ioreg_parsers/structure_parser.py new file mode 100644 index 0000000..a174de3 --- /dev/null +++ b/src/sysdiagnose/utils/ioreg_parsers/structure_parser.py @@ -0,0 +1,200 @@ +from sysdiagnose.utils.base import logger +from sysdiagnose.utils import string_parser +import re + +class IORegStructParser: + __rollback_addr = None + __curr_line = None + + def __init__(self): + pass + + def parse(self, file_path): + data_tree = {} + + with open(file_path, 'r', errors='backslashreplace') as f: + self.open_file = f + self.recursive_fun(data_tree) + + return data_tree + + def get_line(self): + self.__rollback_addr = self.open_file.tell() + self.__curr_line = self.open_file.readline() + self.__curr_line = self.__curr_line.replace('\n', '') + + def recursive_call(self, data_tree: dict): + self.open_file.seek(self.__rollback_addr) + self.recursive_fun(data_tree) + + def check_start_node(self): + if '+-o' not in self.__curr_line: + logger.error('This is not normal. Recursive function called on random line.') + raise Exception("File has an invalid structure, '+-o' tag was not found in first line") + + def check_key_uniqueness(self, dictio: dict, key: str): + if dictio.get(key): + logger.warning('Key is already in dictionary, data may be lost\n\tKey : ' + key) + + def fetch_node_data(self, data_tree: dict) -> bool: + node_data = [] # array of lines, to be transformed in json + res = True + + while '+-o' not in self.__curr_line: + if not self.__curr_line: # end of file + res = False + break + + node_data.append(self.__curr_line) + self.get_line() + + data_dict = self.node_data_to_json(node_data) + self.parse_values(data_dict) + self.dict_update(data_tree, data_dict) + + return res + + def parse_values(self, data_dict: dict): + for key in data_dict: + value = data_dict[key] + constructed = string_parser.parse(value) + if constructed: + data_dict[key] = constructed + + def dict_update(self, main_dict: dict, data_dict: dict): + """ Redefining the dict.update function to handle key collisions """ + + for key in data_dict: + if main_dict.get(key): + if isinstance(main_dict[key], list): + main_dict[key].append(data_dict[key]) + else: + main_dict[key] = [main_dict[key], data_dict[key]] + else: + main_dict[key] = data_dict[key] + + def parse_title(self) -> tuple: + if "+-o" not in self.__curr_line: + logger.warning("'non-title' line given to title parser, should not happen") + return "", "" + + whole_title = self.__curr_line.split("+-o", 1)[1].strip() + + if " format, to invesstigate") + + name = whole_title.split(' " + data) + + def handle_anomalies(self, dictio: dict, data: str, key: str) -> bool: + """ + some values overflow on the few next lines + this condition assumes there is no '=' in the exceeding data + (which was the case up to what I saw) + + p.s. : if you wonder why cond4 is necessary, it is only for + the last leaf, which has no '|' symbols. without cond4, + these lines would be seen as anomalies + """ + cond1 = not re.search(r'^\s*\|+', data) + cond2 = len(data.strip()) > 0 + cond3 = data.strip() not in ('{', '}') + cond4 = '=' not in data + + if cond1 and cond2 and cond3 and cond4: + dictio[key] += data.strip() + return True + return False + + def node_data_to_json(self, data_array: list[str]) -> dict: + res = {} + key = None + + for data in data_array: + self.handle_anomalies(res, data, key) + + # remove spaces and pipes at start + clean_line = re.sub(r'^(\s|\|)*', '', data) + + if '=' not in clean_line: + continue + + # split at the first equal only + key, value = clean_line.split('=', 1) + + # remove first and last " (in case the key has more quotes inside) + key = key.replace('"', '', 1) + key = key[::-1].replace('"', '', 1)[::-1] + key = key.strip() + + self.check_key_uniqueness(res, key) + res[key] = value.strip() + + return res + + def iterate_children(self, depth: int, data_tree: dict): + while self.__curr_line and (self.__curr_line[depth] == '|' or self.__curr_line[depth: depth + 3] == '+-o'): + if self.__curr_line[depth: depth + 3] == '+-o': + name = self.parse_title()[0] + new_child = self.setup_new_child(data_tree, name) + self.recursive_call(new_child) + + else: + self.get_line() + + def setup_new_child(self, data_tree: dict, key: str) -> dict: + """ This function is dedicated to iterate_children, it handles the special cases + where a node name is already present for the same parent """ + + if data_tree.get(key): + if isinstance(data_tree[key], list): + # case already list of data nodes + data_tree[key].append({}) + else: + # case currently single data node + data_tree[key] = [data_tree[key], {}] + return data_tree[key][-1] + + else: + # case new key + data_tree[key] = {} + return data_tree[key] + + def recursive_fun(self, data_tree: dict): + is_leaf = False + self.get_line() + + # check if we're at the start of a node + self.check_start_node() + + # try to get a struct out of the data + title_data = self.parse_title()[1] + additional_data = string_parser.parse(title_data) or title_data + self.warn_if_no_struct(additional_data) + + self.dict_update(data_tree, additional_data) + + depth = self.__curr_line.index('o') # to identify the other nodes that have the same parent + self.get_line() + + # check if its a leaf + if self.__curr_line[depth] != '|': + is_leaf = True + + # Fetch the data of the node + if not self.fetch_node_data(data_tree): + return # EOF + + # stop if we're a leaf + if is_leaf: + self.open_file.seek(self.__rollback_addr) + return + + # Iterates over each child to call the current function + self.iterate_children(depth, data_tree) diff --git a/src/sysdiagnose/utils/string_parser.py b/src/sysdiagnose/utils/string_parser.py new file mode 100644 index 0000000..1c484bd --- /dev/null +++ b/src/sysdiagnose/utils/string_parser.py @@ -0,0 +1,369 @@ +import re +from enum import Enum +import sys +from sysdiagnose.utils.base import logger +import uuid + +class DataType(Enum): + XML_DICT = 1 + CURLY_DICT = 2 + LIST = 3 + STRING = 4 + UNKNOWN = 5 + +class Detect: + _best_len = float('inf') + _best_type = DataType.UNKNOWN + _best_whole = "" # whole match, for example : + _best_content = "" # content, for example : data1, data2 + _found = False + + def __init__(self, input_string: str): + self.detect_type(input_string) + + def detect_type(self, input: str): + """ Note on the match types + + XML_DICT : data inside <> with at least a comma or space between chars + excluded : <> , < > , < > + + CURLY_DICT : like xml_dict but with {} instead of <> + + LIST : data in parentheses ('[]', '()') or d-quotes with at least one comma + Note : most of basic d-quotes have been sinitized in prepare_data() + + STRING : parentheses that dont contain any comma. + example : I'm good at coding (not really) <-- shouldn't be a list, simply text + + """ # noqa: W605 + + # find xml like dict ex : + hit = self.find_smallest(r'<([^<>]*([,]|[^\s<>][\s]+[^\s<>])[^<>]*)>', input) + if hit and len(hit.group(0)) < self._best_len: + self.assign_best(hit, DataType.XML_DICT) + + # find dict in {} ex : {key1=val1, k2=v2} + hit = self.find_smallest(r'{([^{}]*)}', input) + if hit and len(hit.group(0)) < self._best_len: + self.assign_best(hit, DataType.CURLY_DICT) + + # find list in parentheses ex : (a, b, c) + hit = self.find_smallest(r'\(([^()]*,[^()]*)\)', input) + if hit and len(hit.group(0)) < self._best_len: + self.assign_best(hit, DataType.LIST) + + # find simple string data in <> ex : <648a4c> + hit = re.search(r'(<[^,<>\s]*>)', input) + if hit and len(hit.group(0)) < self._best_len: + self.assign_best(hit, DataType.STRING) + + # find simple parentheses without ',' ex : (hello world) + hit = re.search(r'(\([^,)(]*\))', input) + if hit and len(hit.group(0)) < self._best_len: + self.assign_best(hit, DataType.STRING) + + # find [] parentheses without ',' nor '=' ex : [hello world] + hit = re.search(r'(\[[^,=\[\]]*\])', input) + if hit and len(hit.group(0)) < self._best_len: + self.assign_best(hit, DataType.STRING) + + # find simple double-quotes ex : "hello world" + hit = re.search(r'"([^"]*)"', input) + if hit and len(hit.group(0)) < self._best_len: + self.assign_best(hit, DataType.STRING) + + self.warn_unknown_struct(input) + + def assign_best(self, hit: re.Match, type: DataType): + self._best_len = len(hit.group(0)) + self._best_type = type + self._best_whole = hit.group(0) + self._best_content = hit.group(1) + self._found = True + + def find_smallest(self, regex: str, data: str) -> re.Match: + pattern = re.compile(regex) + matches = list(pattern.finditer(data)) + if not matches: + return None + return min(matches, key=lambda m: len(m.group(0))) + + def warn_unknown_struct(self, input: str): + main_cond = self._best_type is DataType.UNKNOWN + cond_exceptions = input != '{}' and input != '<>' and input != '()' + cond_1 = '<' in input and '>' in input + cond_2 = '(' in input and ')' in input + cond_3 = '{' in input and '}' in input + + if (main_cond and cond_exceptions and (cond_1 or cond_2 or cond_3)): + logger.warning('Warning : A structure might have been recognized ' + 'in here, if so please consider adding it to the ' + 'string_parser.py file\n---> ' + input) + + @property + def len(self) -> int: + return self._best_len + + @property + def type(self) -> DataType: + return self._best_type + + @property + def whole_match(self) -> str: + return self._best_whole + + @property + def content(self) -> str: + return self._best_content + + @property + def found(self) -> bool: + return self._found + + +def generate_tag() -> str: + return str(uuid.uuid4()) + + +def check_anomaly(s: str, tag: str): + diff = s.replace(tag, '') + structured = s.replace(tag, '[STRUCT]') + # cases we dont have to warn about. ex : (()) is same as + + if tag in s and diff and not is_redundent_syntax_regex(diff): + logger.warning("Warning : Anomaly: some data was right next to " + "the struct (without space), this data is thus lost\n---> " + structured) + +def is_redundent_syntax_regex(s: str) -> re.Match: + """ If we have for example ([ ]) around a struct, we consider it useless + Example : "[()]" is the same as """ + return re.search(r'^[(){}\[\]<>""]+$', s) + +def prepare_line(line: str) -> str: + """ remove unnecessary double-quotes + quotes are needed when a comma is inside. + example : + != + + Note : regex cant be used, need to be statefull i.e. consider opening and closing quotes + example that doesnt work with regex: "a,"b"c," + gives : '"a,bc,"' + should give : '"a,"b"c,"' + (the quotes in "a," aren't removed bcs of the comma, so "b" is detected as a string) + """ + inside = False + opening_pos = None + skipping = False + parse_char = (',', '=', '{', '}', '(', ')') + line = line.strip() + + i = 0 + while i < len(line): + if line[i] == '"': + if inside: + if not skipping: + line = line[:i] + line[i + 1:] # remove last " + line = line[:opening_pos] + line[opening_pos + 1:] # remove first " + i -= 1 + else: + i += 1 + inside = False + + else: + inside = True + opening_pos = i + skipping = False + i += 1 + continue + + if inside and line[i] in parse_char: + skipping = True + + i += 1 + + return line + +def check_key_uniqueness(dictio: dict, key: str): + if dictio.get(key): + logger.warning('Warning : Key is already in dictionary, data may be lost\n---> ' + key) + + +def parse_list(input_string: str) -> list: + list_of_elements = input_string.split(',') + res = [] + + for element in list_of_elements: + res.append(element.strip()) + + return res + + +def parse_dict(input_string: str, separator: str) -> dict: + list_of_elements = input_string.split(',') + res = {} + + if list_of_elements == ['']: + return res + + for element in list_of_elements: + element = element.strip() + splitted = element.split(separator, 1) + key = splitted[0] + + # value is true/false if there is only a key + if len(splitted) > 1: + value = splitted[1].strip() + elif key[0] == '!': + value = 'false' + key = key[1:] + else: + value = 'true' + + check_key_uniqueness(res, key) + res[key] = value + + return res + + +def parse_type(input_string: str, type: DataType) -> dict | list | str: + match type: + case DataType.XML_DICT: + return parse_dict(input_string, ' ') + + case DataType.CURLY_DICT: + return parse_dict(input_string, '=') + + case DataType.LIST: + return parse_list(input_string) + + case DataType.STRING: + return input_string + + case _: + logger.error("Error : Type not found in parse_type(). (Note : " + "you probably forgot to add it to the match case)") + +def resolve_tag_list_dict(final_struct: list | dict, elem: list | dict | str, key: str, tag: str, constructed: dict | list | str) -> bool: + if isinstance(elem, str) and tag in elem: + if isinstance(constructed, str): + final_struct[key] = final_struct[key].replace(tag, constructed) + else: + check_anomaly(elem, tag) + final_struct[key] = constructed + return True + + elif isinstance(key, str) and tag in key: # only for dict, key is int for list + if isinstance(constructed, str): + new_key = key.replace(tag, constructed) + value = final_struct[key] + del final_struct[key] + final_struct[new_key] = value + else: + logger.error("Error : Trying to use a struct as a key in a dict") + final_struct[key] = constructed + return True + + elif isinstance(elem, list): + if resolve_tag_list(elem, tag, constructed): + return True + + elif isinstance(elem, dict): + if resolve_tag_dict(elem, tag, constructed): + return True + + return False + +def resolve_tag_dict(final_struct: dict, tag: str, constructed: dict | list | str) -> bool: + for key in final_struct: + elem = final_struct[key] + if resolve_tag_list_dict(final_struct, elem, key, tag, constructed): + return True + + return False + +def resolve_tag_list(final_struct: list, tag: str, constructed: dict | list | str): + for i in range(len(final_struct)): + elem = final_struct[i] + if resolve_tag_list_dict(final_struct, elem, i, tag, constructed): + return True + + return False + +def resolve_tag_str(final_struct: dict | list | str, tag: str, constructed: dict | list | str) -> dict | list | str: + if not isinstance(constructed, str): + if final_struct.replace(tag, "") == '()': + final_struct = constructed + else: + user_friendly = final_struct.replace(tag, "[STRUCT]") + lost_data = final_struct.replace(tag, "") + if not is_redundent_syntax_regex(lost_data) and lost_data: + logger.warning("Warning : trying to incorporate dict/list in a string :\n---> " + user_friendly) + final_struct = constructed + else: + final_struct = final_struct.replace(tag, constructed) + + return final_struct + +def resolve_tag(final_struct: dict | list | str, tag: str, constructed: dict | list | str) -> dict | list | str: + if isinstance(final_struct, dict): + resolve_tag_dict(final_struct, tag, constructed) + + elif isinstance(final_struct, list): + resolve_tag_list(final_struct, tag, constructed) + + elif isinstance(final_struct, str): + final_struct = resolve_tag_str(final_struct, tag, constructed) + + else: + logger.error('Error : struct type not found') + raise ValueError("Structure passed has to be a dict, a list or a string. Type : " + str(type(final_struct))) + + # return is necessary, strings are not passed by reference in python + return final_struct + + +def parse_main_loop(data_string: str, depth: dict) -> dict | list | str: + depth['value'] += 1 + + # Detection + hit = Detect(data_string) + final_struct = None + + # recursion stop + if not hit.found: + return data_string + + # form basic struct + constructed = parse_type(hit.content, hit.type) + + # replace struct by an unique tag + tag = generate_tag() + data_string = data_string.replace(hit.whole_match, tag, 1) + + # recursion + final_struct = parse_main_loop(data_string, depth) + + # reconstruct data structure + final_struct = resolve_tag(final_struct, tag, constructed) + + return final_struct + +def parse(data_string: str) -> dict | list | str: + # make it a struct so it is passed by reference + depth = {'value': 0} + + # increase recursion depth, default is at 1000 + sys.setrecursionlimit(3000) + + # greatly reduce recursion depth i.e. 80 000+ chars parsed against max 10 000 chars before + data_string = prepare_line(data_string) + + try: + data_string = parse_main_loop(data_string, depth) or data_string + except RecursionError: + logger.warning("Skipped line with " + str(len(data_string)) + " characters. " + "Recursion depth : " + str(depth['value']) + "\n" + "--> max recursion depth can be increased in utils/string_parser.py" + " in parse(). Feel free to try as high as needed to parse this line.") + + return data_string diff --git a/tests/test_parsers_ioacpiplane.py b/tests/test_parsers_ioacpiplane.py new file mode 100644 index 0000000..e2ebb92 --- /dev/null +++ b/tests/test_parsers_ioacpiplane.py @@ -0,0 +1,320 @@ +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser +from sysdiagnose.parsers.ioacpiplane import IOACPIPlaneParser +from tests import SysdiagnoseTestCase +import unittest +import io +import os + + +class TestParsersIOACPIPlane(SysdiagnoseTestCase): + + def test_parse_case(self): + for case_id, case in self.sd.cases().items(): + p = IOACPIPlaneParser(self.sd.config, case_id=case_id) + files = p.get_log_files() + self.assertTrue(len(files) > 0) + + p.save_result(force=True) + self.assertTrue(os.path.isfile(p.output_file)) + + def test_basic_structure(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value l4" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value l4' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_value_overflow_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value aaaa +bbbb +cccc +dddd +" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value aaaabbbbccccdddd' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_non_ascii_byte_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value -->\xbf<--" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value -->\xbf<--' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_parsers_iodevicetree.py b/tests/test_parsers_iodevicetree.py new file mode 100644 index 0000000..ee1e560 --- /dev/null +++ b/tests/test_parsers_iodevicetree.py @@ -0,0 +1,320 @@ +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser +from sysdiagnose.parsers.iodevicetree import IODeviceTreeParser +from tests import SysdiagnoseTestCase +import unittest +import io +import os + + +class TestParsersIODeviceTree(SysdiagnoseTestCase): + + def test_parse_case(self): + for case_id, case in self.sd.cases().items(): + p = IODeviceTreeParser(self.sd.config, case_id=case_id) + files = p.get_log_files() + self.assertTrue(len(files) > 0) + + p.save_result(force=True) + self.assertTrue(os.path.isfile(p.output_file)) + + def test_basic_structure(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value l4" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value l4' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_value_overflow_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value aaaa +bbbb +cccc +dddd +" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value aaaabbbbccccdddd' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_non_ascii_byte_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value -->\xbf<--" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value -->\xbf<--' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_parsers_iofirewire.py b/tests/test_parsers_iofirewire.py new file mode 100644 index 0000000..bb49df8 --- /dev/null +++ b/tests/test_parsers_iofirewire.py @@ -0,0 +1,320 @@ +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser +from sysdiagnose.parsers.iofirewire import IOFireWireParser +from tests import SysdiagnoseTestCase +import unittest +import io +import os + + +class TestParsersIOFireWire(SysdiagnoseTestCase): + + def test_parse_case(self): + for case_id, case in self.sd.cases().items(): + p = IOFireWireParser(self.sd.config, case_id=case_id) + files = p.get_log_files() + self.assertTrue(len(files) > 0) + + p.save_result(force=True) + self.assertTrue(os.path.isfile(p.output_file)) + + def test_basic_structure(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value l4" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value l4' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_value_overflow_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value aaaa +bbbb +cccc +dddd +" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value aaaabbbbccccdddd' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_non_ascii_byte_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value -->\xbf<--" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value -->\xbf<--' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_parsers_iopower.py b/tests/test_parsers_iopower.py new file mode 100644 index 0000000..7bd0406 --- /dev/null +++ b/tests/test_parsers_iopower.py @@ -0,0 +1,320 @@ +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser +from sysdiagnose.parsers.iopower import IOPowerParser +from tests import SysdiagnoseTestCase +import unittest +import io +import os + + +class TestParsersIOPower(SysdiagnoseTestCase): + + def test_parse_case(self): + for case_id, case in self.sd.cases().items(): + p = IOPowerParser(self.sd.config, case_id=case_id) + files = p.get_log_files() + self.assertTrue(len(files) > 0) + + p.save_result(force=True) + self.assertTrue(os.path.isfile(p.output_file)) + + def test_basic_structure(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value l4" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value l4' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_value_overflow_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value aaaa +bbbb +cccc +dddd +" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value aaaabbbbccccdddd' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_non_ascii_byte_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value -->\xbf<--" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value -->\xbf<--' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_parsers_ioservice.py b/tests/test_parsers_ioservice.py new file mode 100644 index 0000000..ab54678 --- /dev/null +++ b/tests/test_parsers_ioservice.py @@ -0,0 +1,320 @@ +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser +from sysdiagnose.parsers.ioservice import IOServiceParser +from tests import SysdiagnoseTestCase +import unittest +import io +import os + + +class TestParsersIOService(SysdiagnoseTestCase): + + def test_parse_case(self): + for case_id, case in self.sd.cases().items(): + p = IOServiceParser(self.sd.config, case_id=case_id) + files = p.get_log_files() + self.assertTrue(len(files) > 0) + + p.save_result(force=True) + self.assertTrue(os.path.isfile(p.output_file)) + + def test_basic_structure(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value l4" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value l4' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_value_overflow_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value aaaa +bbbb +cccc +dddd +" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value aaaabbbbccccdddd' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_non_ascii_byte_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value -->\xbf<--" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value -->\xbf<--' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_parsers_iousb.py b/tests/test_parsers_iousb.py new file mode 100644 index 0000000..a58e625 --- /dev/null +++ b/tests/test_parsers_iousb.py @@ -0,0 +1,320 @@ +from sysdiagnose.utils.ioreg_parsers.structure_parser import IORegStructParser +from sysdiagnose.parsers.iousb import IOUSBParser +from tests import SysdiagnoseTestCase +import unittest +import io +import os + + +class TestParsersIOService(SysdiagnoseTestCase): + + def test_parse_case(self): + for case_id, case in self.sd.cases().items(): + p = IOUSBParser(self.sd.config, case_id=case_id) + files = p.get_log_files() + self.assertTrue(len(files) > 0) + + p.save_result(force=True) + self.assertTrue(os.path.isfile(p.output_file)) + + def test_basic_structure(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value l4" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value l4' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_value_overflow_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value aaaa +bbbb +cccc +dddd +" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value aaaabbbbccccdddd' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + def test_non_ascii_byte_anomaly(self): + p = IORegStructParser() + + # careful, spaces and structure is important + # This simulates an open file object, as if we opened it with open(path, 'rb') + start_file = io.StringIO("""+-o Root node + | { + | "data 1" = "value 1" + | "data 2" = "value 2" + | } + | + +-o Node 2 + | { + | "#address-cells" = <02000000> + | "AAPL,phandle" = <01000000> + | } + | + +-o Node 3 + | | { + | | "data 31" = "value 31" + | | "data 32" = "value 32" + | | } + | | + | +-o Leaf 1 + | | { + | | "data l1" = "value l1" + | | "data l2" = "value l2" + | | } + | | + | +-o Leaf 2 + | { + | "data l3" = "value l3" + | "data l4" = "value -->\xbf<--" + | } + | + +-o Leaf 3 + | { + | "data l5" = "value L5" + | "data l6" = "value l6" + | } + | + +-o Leaf 4 + { + "data 51" = "value 51" + "data 52" = "value 52" + } + +""") # noqa: W291, W293 + + expected = { + 'class': 'test1', + 'key1': 'val1', + 'data 1': 'value 1', + 'data 2': 'value 2', + 'Node 2': { + 'class': 'test2', + 'key2': 'val2', + '#address-cells': '<02000000>', + 'AAPL,phandle': '<01000000>', + 'Node 3': { + 'class': 'test3', + 'key3': 'val3', + 'data 31': 'value 31', + 'data 32': 'value 32', + 'Leaf 1': { + 'class': 'test11', + 'key11': 'val11', + 'data l1': 'value l1', + 'data l2': 'value l2' + }, + 'Leaf 2': { + 'class': 'test22', + 'key22': 'val22', + 'data l3': 'value l3', + 'data l4': 'value -->\xbf<--' + } + }, + 'Leaf 3': { + 'class': 'test33', + 'key33': 'val33', + 'data l5': 'value L5', + 'data l6': 'value l6' + }, + 'Leaf 4': { + 'class': 'test44', + 'key44': 'val44', + 'data 51': 'value 51', + 'data 52': 'value 52' + } + } + } + + p.open_file = start_file + result = {} + p.recursive_fun(result) + + self.assertTrue(result == expected) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_string_parser.py b/tests/test_string_parser.py new file mode 100644 index 0000000..3f5da07 --- /dev/null +++ b/tests/test_string_parser.py @@ -0,0 +1,60 @@ +from tests import SysdiagnoseTestCase +import unittest +import sysdiagnose.utils.string_parser as sp + + +class TestStringParser(SysdiagnoseTestCase): + + test_list = [ + '', + '(li1, li2, li3, li4)', + '< k1 v1 , k2 v2, k3 v3 ,k4 v4 >', + '>', + '( li 1, li 2 , li3)', + '', + '', + ' , k4 (li111, li222, li333) >', + '), m >', + ' ,k3 (>, (li111), (li8, li9)) , k4 (li111, li222, li333) >' + ] + + expected_parsed = [ + {'key': 'val', 'k2': 'v2'}, + ['li1', 'li2', 'li3', 'li4'], + {'k1': 'v1', 'k2': 'v2', 'k3': 'v3', 'k4': 'v4'}, + {'k1': {'k11': 'v11'}}, + ['li 1', 'li 2', 'li3'], + {'k1': 'v1', 'k2': 'v2', 'k:3': ['li1', 'li2', 'li3', 'li4'], 'k4': 'v4'}, + {'k1': 'v1', 'k2': ['li1', 'li2', 'li3', 'li4'], 'k3': ['li11', 'li22'], 'k4': ['li111', 'li222', 'li333']}, + {'k1': 'v1', 'k2': ['li1', 'li2', 'li3', 'li4'], 'k3': {'k11': 'v11', 'k22': 'v22'}, 'k4': ['li111', 'li222', 'li333']}, + {'l': ['1', '2', {'k': ['', '', '', '']}], 'm': {'g': '()', 'k': ['', ''], 'm': ['()', '(())']}}, + {'k1': 'v1', 'k2': {'k11': 'v11', 'k22': 'v22'}, 'k3': [{'k111': {'a': 'b', 'c': ['l1', 'l2']}}, '(li111)', ['li8', 'li9']], 'k4': ['li111', 'li222', 'li333']} + ] + + expected_detect = [ + ('key val, k2 v2', sp.DataType.XML_DICT), + ('li1, li2, li3, li4', sp.DataType.LIST), + (' k1 v1 , k2 v2, k3 v3 ,k4 v4 ', sp.DataType.XML_DICT), + ('k11 v11', sp.DataType.XML_DICT), + (' li 1, li 2 , li3', sp.DataType.LIST), + ('li1 , li2 ,li3, li4 ', sp.DataType.LIST), + ('li11, li22', sp.DataType.LIST), + ('k11 v11,k22 v22', sp.DataType.XML_DICT), + ('()', sp.DataType.STRING), + ('(li111)', sp.DataType.STRING) + ] + + def test_detect(self): + for test_val, (exp_cont, exp_type) in zip(self.test_list, self.expected_detect): + d = sp.Detect(test_val) + self.assertTrue(d.content == exp_cont) + self.assertTrue(d.type == exp_type) + + def test_parsing(self): + for test_val, expected in zip(self.test_list, self.expected_parsed): + result = sp.parse(test_val) + self.assertTrue(result == expected) + + +if __name__ == '__main__': + unittest.main()