#!/usr/bin/python3 -B # coding=utf-8 ### ### Utility for displaying OpenWRT DSL-stat JSON files ### by Matti 'ccr' Hämäläinen ### (C) Copyright 2021-2023 Tecnic Software productions (TNSP) ### ### Redistribution and use in source and binary forms, with or without ### modification, are permitted provided that the following conditions ### are met: ### ### 1. Redistributions of source code must retain the above copyright ### notice, this list of conditions and the following disclaimer. ### ### 2. Redistributions in binary form must reproduce the above copyright ### notice, this list of conditions and the following disclaimer in the ### documentation and/or other materials provided with the distribution. ### ### 3. Neither the name of the copyright holder nor the names of its ### contributors may be used to endorse or promote products derived from ### this software without specific prior written permission. ### ### THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ### "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ### LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS ### FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE ### COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, ### INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, ### BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS ### OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ### ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR ### TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE ### USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ### ### Parses JSON files produced by OpenWRT "ubus call dsl metrics" ### and displays the data in a more readable format. ### Python 3.6+ required! ### import sys import signal import functools import argparse import json assert sys.version_info >= (3, 6), 'Python >= 3.6 required' ### ### Field types ### PF_FIELD_NORMAL = 0 PF_FIELD_LIST = 1 PF_FIELD_AGGREGATE = 2 ### ### List of known fields and their prettyprint formatting etc ### pf_lst_fields = [ { "name": "up" , "title": "Line status", "func": lambda x: pf_updown(x) }, { "name": "uptime" , "title": "Line uptime", "func": lambda x: pf_time(x) }, { "name": "state" , "title": "State", "companion": [ "state_num" ], "fmt": "{} (#{})" }, { "name": "power_state" , "title": "Power state", "companion": [ "power_state_num" ], "fmt": "{} (#{})" }, { "name": "mode" , "title": "Mode" }, { "name": "standard" , "title": "Standard" }, { "name": "annex" , "title": "Annex" }, { "name": "profile" , "title": "Profile" }, { "name": "chipset" , "title": "Chipset" }, { "name": "driver_version" , "title": "Driver version" }, { "name": "firmware_version" , "title": "Firmware version" }, { "name": "api_version" , "title": "API version" }, { "name": "xtse" , "title": "Extensions", "func": lambda x: pf_list_to_hex(x) }, { "type": PF_FIELD_LIST, "name": "atu_c", "title": "DSLAM / ATU-C information:", "indent": 1, "list": [ { "name": "vendor" , "title": "Vendor" }, { "name": "vendor_id" , "title": "Vendor ID", "func": lambda x: pf_list_to_hex(x) }, { "name": "system_vendor" , "title": "System vendor" }, { "name": "system_vendor_id" , "title": "System vendor ID", "func": lambda x: pf_list_to_hex(x) }, { "name": "version" , "title": "Version", "func": lambda x: pf_list_to_hex(x) }, { "name": "serial" , "title": "Serial number", "func": lambda x: pf_list_to_hex(x) }, ] }, { "type": PF_FIELD_AGGREGATE, "columns": [ "downstream", "upstream" ], "title": "Downstream / Upstream:", "indent": 1, "fields": [ { "name": "interleave_delay" , "title": "Latency", "fmt": "{:1.2f} ms", "func": lambda x : x / 1000.0 }, { "name": "data_rate" , "title": "Data rate", "func": lambda x: pf_speed(x) }, { "name": "attndr" , "title": "Max. Attainable Data Rate (ATTNDR)", "func": lambda x: pf_speed(x) }, { "name": "latn" , "title": "Line Attenuation (LATN)", "fmt": "{:1.2f} dB" }, { "name": "satn" , "title": "Signal Attenuation (SATN)", "fmt": "{:1.2f} dB" }, { "name": "snr" , "title": "Noise Margin (SNR)", "fmt": "{:1.2f} dB" }, # { "name": "actps" , "title": "ACTPS", "fmt": "{:1.2f} dB" }, { "name": "actatp" , "title": "Aggregate Transmit Power (ACTATP)", "fmt": "{:1.2f} dB" }, { "name": "vector" , "title": "Vectoring" }, { "name": "trellis" , "title": "Trellis encoding" }, { "name": "bitswap" , "title": "Bitswap" }, { "name": "retx" , "title": "RetX" }, { "name": "virtual_noise" , "title": "Virtual noise" }, # { "name": "" , "title": "" }, # { "name": "" , "title": "", "fmt": "{:1.2f} dB" }, ] }, { "type": PF_FIELD_LIST, "name": "errors", "title": "Near / Far:", "indent": 1, "list": [ { "type": PF_FIELD_AGGREGATE, "columns": [ "near", "far" ], "fields": [ { "name": "fecs" , "title": "Forward Error Correction Seconds (FECS)" }, { "name": "es" , "title": "Errored seconds (ES)" }, { "name": "ses" , "title": "Severely Errored Seconds (SES)" }, { "name": "loss" , "title": "Loss of Signal Seconds (LOSS)" }, { "name": "uas" , "title": "Unavailable Seconds (UAS)" }, { "name": "hec" , "title": "Header Error Code Errors (HEC)" }, { "name": "crc_p" , "title": "Non Pre-emptive CRC errors (CRC_P)" }, { "name": "crcp_p" , "title": "Pre-emptive CRC errors (CRCP_P)" }, { "name": "lofs" , "title": "Loss of Frame Seconds (LOFS)" }, # { "name": "ibe" , "title": "" }, # { "name": "cv_p" , "title": "" }, # { "name": "cvp_p" , "title": "" }, # { "name": "" , "title": "" }, # { "name": "" , "title": "" }, ] }, ] }, ] ### ### Helper functions ### def pf_fatal(msg): print(f"ERROR: {msg}", file=sys.stderr) sys.exit(1) def pf_signal_handler(signal, frame): print("\nQuitting due to SIGINT / Ctrl+C!", file=sys.stderr) sys.exit(2) def pf_arg_range(vstr, vmin, vmax): try: value = int(vstr) except Exception as e: raise argparse.ArgumentTypeError(f"value '{vstr}' is not an integer") if value < vmin or value > vmax: raise argparse.ArgumentTypeError(f"value {value} not in range {vmin}-{vmax}") else: return value def pf_updown(value): if isinstance(value, bool): return { True: "UP", False: "DOWN" }[value] else: return "ERROR: Not a boolean: "+ str(value) def pf_list_to_hex(value): if isinstance(value, list): return " ".join(map(lambda xval : f"{xval:02x}", value)) else: return "ERROR: Not a list: "+ str(value) def pf_speed(value): if not isinstance(value, int) and not isinstance(value, float): return "ERROR: Not numeric value: "+ str(value) if (value > 1000000): value /= 1000000.0 post = "Mbps" elif (value > 1000): value /= 1000.0 post = "kbps" else: post = "bps" return f"{value:1.2f} {post}" def pf_time(value): if not isinstance(value, int) and not isinstance(value, float): return "ERROR: Not numeric value: "+ str(value) m_days = int(value / (3600 * 24)) m_tmp = int(value % (3600 * 24)) m_hours = int(m_tmp / 3600) m_tmp = int(m_tmp % 3600) m_minutes = int(m_tmp / 60) m_seconds = int(m_tmp % 60) m_list = [] if m_days > 0: m_list.append(f"{m_days}d") if m_hours > 0: m_list.append(f"{m_hours}h") m_list.append(f"{m_minutes}m") m_list.append(f"{m_seconds}s") return " ".join(m_list) class PFPrettyPrinter(): def __init__(self, opts): self.opts = opts def geti(self, indent): return " " * (self.opts.indent * indent) def getw(self, indent): tmp = self.opts.width - (self.opts.indent * indent) if tmp < 0: tmp = 0 return tmp def print_indent(self, indent, smsg): print(self.geti(indent) + smsg) def print_title(self, indent, mfield): if "title" in mfield: print("") self.print_indent(indent, f"{mfield['title']}") def print_field(self, qindent, qtitle, qdata): self.print_indent(qindent, ("{:<"+ str(self.getw(qindent)) +"}: {}").format(qtitle, qdata)) def get_column(self, qstr): return ("{:<"+ str(self.opts.cwidth) +"}").format(qstr) def get_field(self, mdata, mfield, mhandled): # Check requisites if "name" not in mfield or "title" not in mfield: raise Exception("Invalid normal field definition: "+ str(mfield)) # Does the field exist in data? if mfield["name"] in mdata: # Get data mhandled[mfield["name"]] = True qdata = mdata[mfield["name"]] # Handle companion fields if "companion" in mfield: if not isinstance(mfield["companion"], list): raise Exception("Field 'extra' is not a list: "+ str(mfield)) qdata = [ qdata ] for ename in mfield["companion"]: if ename in mdata: mhandled[ename] = True qdata.append(mdata[ename]) else: qdata.append(None) # Call the processing function, if any defined if "func" in mfield: if not callable(mfield["func"]): raise Exception("Field formatting function is not callable: "+ str(mfield)) qdata = mfield["func"](qdata) # Format if required if "fmt" in mfield: if isinstance(qdata, list): return mfield["fmt"].format(*qdata) else: return mfield["fmt"].format(qdata) else: return str(qdata) else: return None def print_aggregate(self, indent, mdata, mfield, uhandled): # Check requisites if "columns" not in mfield or "fields" not in mfield: raise Exception("Invalid aggregate field definition: "+ str(mfield)) if not isinstance(mfield["columns"], list): raise Exception("Aggregate columns list is not a list! "+ str(mfield)) if not isinstance(mfield["fields"], list): raise Exception("Aggregate fields list not a list! "+ str(mfield)) if "indent" in mfield: tindent = indent + mfield["indent"] else: tindent = indent # Collect the aggregate data maggregate = {} mhandled = {} naggregate = 0 for agroup in mfield["columns"]: uhandled[agroup] = True if agroup not in mdata: raise Exception("Aggregate fields list not a list! "+ str(mfield)) maggregate[agroup] = {} for afield in mfield["fields"]: aname = afield["name"] mhandled[aname] = True mtmp = self.get_field(mdata[agroup], afield, mhandled) if mtmp is not None: maggregate[agroup][aname] = mtmp naggregate += 1 # Print it if naggregate > 0: self.print_title(indent, mfield) for afield in mfield["fields"]: ptmp = [] for mgroup in mfield["columns"]: ptmp.append(self.get_column(maggregate[mgroup][afield["name"]])) self.print_field(tindent, afield["title"], " / ".join(ptmp)) # Print out non-handled fields munhandled = {} mundata = {} for agroup in mfield["columns"]: for afield in mdata[agroup]: if afield not in mhandled: if afield not in munhandled: munhandled[afield] = 0 munhandled[afield] += 1 if afield not in mundata: mundata[afield] = [] mundata[afield].append(self.get_column(mdata[agroup][afield])) for afield in munhandled: if munhandled[afield] == len(mfield["columns"]): #qtitle = afield.replace("_", " ").capitalize() qtitle = afield self.print_field(tindent, "*"+ qtitle, " / ".join(mundata[afield])) else: # Print a warning if there is unbalanced amount of data, e.g. # value exists in one of the aggregated lists, but not in other(s) self.print_indent(tindent, "WARNING: Field '{}' is in aggregate list '{}', but is unbalanced!".format( afield, " | ".join(mfield["columns"]))) def print_dict(self, indent, mdata, mlist): mhandled = {} for afield in mlist: if isinstance(afield, dict): # Get field type, default to "normal" if not set if "type" in afield: ftype = afield["type"] else: ftype = PF_FIELD_NORMAL # Act according to field type if ftype == PF_FIELD_NORMAL: # Get field data, formatted, if it exists qtmp = self.get_field(mdata, afield, mhandled) if qtmp is not None: self.print_field(indent, afield["title"], qtmp) elif ftype == PF_FIELD_LIST: # Check requisites if "name" not in afield or "list" not in afield: raise Exception("INVALID LIST DEF: "+ str(afield)) if "indent" in afield: tindent = indent + afield["indent"] else: tindent = indent if afield["name"] in mdata: mhandled[afield["name"]] = True self.print_title(indent, afield) self.print_dict(tindent, mdata[afield["name"]], afield["list"]) elif ftype == PF_FIELD_AGGREGATE: self.print_aggregate(indent, mdata, afield, mhandled) else: raise Exception("INVALID FIELD TYPE! "+ str(afield)) else: raise Exception("INVALID FIELD! "+ str(afield)) # Print out non-handled fields for afield in mdata: if afield not in mhandled: #qtitle = afield.replace("_", " ").capitalize() qtitle = afield pf_print_field(indent, qtitle, str(mdata[afield])) class PFArgumentParser(argparse.ArgumentParser): def print_help(self): print("Utility for displaying OpenWRT DSL-stat JSON files\n" "by Matti 'ccr' Hämäläinen \n" "(C) Copyright 2021-2023 Tecnic Software productions (TNSP)\n") super().print_help() def error(self, msg): self.print_help() print(f"\nERROR: {msg}", file=sys.stderr) sys.exit(2) ### ### Main program starts ### if __name__ == "__main__": signal.signal(signal.SIGINT, pf_signal_handler) ### Parse arguments optparser = PFArgumentParser( usage="%(prog)s [options] " ) optparser.add_argument("filename", type=str, action="store", metavar="filename", help="JSON filename or '-' for stdin") optparser.add_argument("-i", "--indent", dest="indent", type=functools.partial(pf_arg_range, vmin=0, vmax=32), default=4, metavar="N", help='set indentation width (default: %(default)s)') optparser.add_argument("-w", "--width", dest="width", type=functools.partial(pf_arg_range, vmin=0, vmax=255), default=45, metavar="N", help='set title width (default: %(default)s)') optparser.add_argument("-c", "--cwidth", dest="cwidth", type=functools.partial(pf_arg_range, vmin=0, vmax=255), default=15, metavar="N", help='set aggregate column width (default: %(default)s)') opts = optparser.parse_args() try: if opts.filename != "-": with open(opts.filename, "rb") as fh: data = json.load(fh) else: data = json.load(sys.stdin) except Exception as e: pf_fatal(f"JSON parsing failed: {str(e)}") if data is None or not isinstance(data, dict): pf_fatal("The input JSON data is invalid!") try: printer = PFPrettyPrinter(opts) printer.print_dict(0, data, pf_lst_fields) except Exception as e: pf_fatal(str(e))