Mercurial > hg > lxmldump
view lxmldump.py @ 84:1cb41a6cd1cd default tip
Cleanups.
author | Matti Hamalainen <ccr@tnsp.org> |
---|---|
date | Tue, 31 Oct 2023 20:19:14 +0200 |
parents | a42e0ca2277f |
children |
line wrap: on
line source
#!/usr/bin/python3 -B # coding=utf-8 ### ### lxmldump - Convert and dump ISO/FDIS 1951 XML file data ### Programmed and designed by Matti 'ccr' Hämäläinen <ccr@tnsp.org> ### (C) Copyright 2021-2022 Tecnic Software productions (TNSP) ### ### Released / distributed under 3-clause BSD license ### (see file "COPYING" for more information) ### ### Python 3.7+ required! ### import sys import signal import functools import re import xml.etree.ElementTree as xmlET import unicodedata import argparse assert sys.version_info >= (3, 7), 'Python >= 3.7 required' ### ### Default settings ### # Operation modes PKK_MODE_NORMAL = 0 PKK_MODE_DUMP = 1 PKK_MODE_XML = 2 PKK_MODE_ANKI = 3 pkk_modes_list = { PKK_MODE_NORMAL: "normal", PKK_MODE_DUMP: "dump", PKK_MODE_XML: "xml", PKK_MODE_ANKI: "anki", } pkk_mode_defaults = { # Default Ptr URL format strings "ptr_fmt": { PKK_MODE_NORMAL: "<PTR:{href}>{text}</PTR>", PKK_MODE_ANKI: "<a href='https://kaino.kotus.fi/cgi-bin/kks/karjala.cgi?a={href}'>{text}</a>", }, "word_item": { PKK_MODE_NORMAL: "{word}{search}{attr}{hyphenation}:{main_sense}{other_senses}\n", PKK_MODE_ANKI: "\"{word}\"{search}{attr}{hyphenation};{main_sense}{other_senses}\n", }, "word_attr_list": { PKK_MODE_NORMAL: " ({alist})", PKK_MODE_ANKI: " ({alist})", }, "word_attr_list_empty": { PKK_MODE_NORMAL: "", PKK_MODE_ANKI: "", }, "word_attr_list_item": { PKK_MODE_NORMAL: "{text}", }, "word_attr_list_sep": { PKK_MODE_NORMAL: "; ", PKK_MODE_ANKI: " : ", }, "hyphenation": { PKK_MODE_NORMAL: " [hyph: {text}]", PKK_MODE_ANKI: " [hyph: {text}]", }, "no_hyphenation": { PKK_MODE_NORMAL: "", }, "search_list": { PKK_MODE_NORMAL: ", {alist}", PKK_MODE_ANKI: ", {alist}", }, "search_list_empty": { PKK_MODE_NORMAL: "", }, "search_list_item": { PKK_MODE_NORMAL: "{text}", PKK_MODE_ANKI: "\"{text}\"", }, "search_list_sep": { PKK_MODE_NORMAL: ", ", }, "main_sense_item": { PKK_MODE_NORMAL: "{definition}{example_list}", PKK_MODE_ANKI: "{definition}{example_list}", }, "sense_list": { PKK_MODE_NORMAL: "{alist}", PKK_MODE_ANKI: " | {alist}", }, "sense_list_empty": { PKK_MODE_NORMAL: "", }, "sense_list_item": { PKK_MODE_NORMAL: "#{index}:{definition}{example_list}", PKK_MODE_ANKI: "#{index}:{definition}{example_list}", }, "sense_list_sep": { PKK_MODE_NORMAL: " | ", PKK_MODE_ANKI: " | ", }, "definition_item": { PKK_MODE_NORMAL: " {text}", PKK_MODE_ANKI: " ? \"{text}\"", }, "example_list": { PKK_MODE_NORMAL: " | {alist}", }, "example_list_empty": { PKK_MODE_NORMAL: "", }, "example_list_item": { PKK_MODE_NORMAL: "\"{text}\"{geo_list}", PKK_MODE_ANKI: " * \"{text}\"{geo_list}", }, "example_list_sep": { PKK_MODE_NORMAL: " ; ", PKK_MODE_ANKI: "", }, "example_geo_list": { PKK_MODE_NORMAL: " ({alist})", }, "example_geo_list_empty": { PKK_MODE_NORMAL: "", }, "example_geo_list_item": { PKK_MODE_NORMAL: "{text} [{tclass}]", }, "example_geo_list_sep": { PKK_MODE_NORMAL: ", ", }, } # Element annotation mappings pkk_element_annotation_map = { "Fragment" : { PKK_MODE_NORMAL: ["<", ">"], PKK_MODE_ANKI: ["<", ">"], }, } # List of words in kks1/ useful for debugging, option -p pkk_debug_list = [ "ahavakkaine", "ahavakala", "ahavakoittuo", "ahava", "ahvaliha", "aloilleh", "hanjahtoakseh", "akkalisto", "alto-", "allot-", ] pkk_settings = {} ### ### Misc. helper functions, etc ### ## Print string to stdout using normalized Unicode if enabled def pkk_print(smsg): try: if pkk_cfg.normalize: sys.stdout.write(unicodedata.normalize("NFC", smsg)) else: sys.stdout.write(smsg) except (BrokenPipeError, IOError) as e: sys.stderr.close() ## Get indentation string def pkk_geti(indent): return " " * pkk_cfg.indent * indent ## Print string with indentation def pkk_printi(indent, smsg): pkk_print(pkk_geti(indent) + smsg) ## Fatal error handler def pkk_fatal(msg): print(f"ERROR: {msg}", file=sys.stderr) sys.exit(1) ## Handler for SIGINT signals def pkk_signal_handler(signal, frame): print("\nQuitting due to SIGINT / Ctrl+C!", file=sys.stderr) sys.exit(2) def pkk_arg_range(vstr, vmin, vmax): try: value = int(vstr) except Exception as e: raise argparse.ArgumentTypeError(f"value '{vstr}' is not an integer") value = int(vstr) if value < vmin or value > vmax: raise argparse.ArgumentTypeError(f"value {value} not in range {vmin}-{vmax}") else: return value class pkk_set_mode(argparse.Action): def __call__(self, parser, namespace, value, option_string=None): lvalue = value.strip().lower() for mode in pkk_modes_list: if pkk_modes_list[mode] == lvalue: setattr(namespace, self.dest, mode) return pkk_fatal(f"Invalid output mode '{lvalue}'.") class pkk_set_value(argparse.Action): rexpr = re.compile(r'\s*(\w+)\s*=\s*(.*)\s*') def __call__(self, parser, namespace, values, option_string=None): rmatch = re.match(self.rexpr, values) if rmatch: rid = rmatch.group(1).lower().replace("-", "_") rval = rmatch.group(2) if rid in pkk_mode_defaults: pkk_settings[rid] = rval else: pkk_fatal(f"Invalid option '{option_string} {values}': No such ID '{rid}'.") else: pkk_fatal(f"Invalid option '{option_string} {values}': Expected id=value.") ## Get mode if it exists def pkk_test_value(mid): if mid in pkk_mode_defaults: if pkk_cfg.mode in pkk_mode_defaults[mid]: mmode = pkk_cfg.mode else: mmode = PKK_MODE_NORMAL if mmode in pkk_mode_defaults[mid]: return mmode else: return None else: return None ## Get default value per mode def pkk_get_value(mid): if mid in pkk_settings and pkk_settings[mid] is not None: return pkk_settings[mid] mmode = pkk_test_value(mid) if mmode is None: pkk_fatal(f"Internal error: No mode for ID '{mid}'.") return pkk_mode_defaults[mid][mmode] def pkk_get_fmt(mid): return pkk_get_value(mid).replace("\\n", "\n") class PKKArgumentParser(argparse.ArgumentParser): def print_help(self): print("lxmldump - Convert and dump ISO/FDIS 1951 XML file data\n" "(C) Copyright 2021-2022 Matti 'ccr' Hämäläinen <ccr@tnsp.org>\n") super().print_help() print("\nAvailable output modes:") print(" " + ", ".join(pkk_modes_list.values())) def error(self, msg): self.print_help() print(f"\nERROR: {msg}", file=sys.stderr) sys.exit(2) ## Annotate given string with prefix and suffix based on tag def pkk_str_annotate(mtag, mstr): if pkk_cfg.annotate and mtag in pkk_element_annotation_map: if pkk_cfg.mode in pkk_element_annotation_map[mtag]: mmode = pkk_cfg.mode else: mmode = PKK_MODE_NORMAL return pkk_element_annotation_map[mtag][mmode][0] + mstr + pkk_element_annotation_map[mtag][mmode][1] else: return mstr ## Clean string by removing tabs and newlines def pkk_str_clean(mstr): return re.sub(r'[\n\r\t]', '', mstr).strip() ## Format a "Ptr" node as text def pkk_ptr_to_text(pnode): pfmt = pkk_get_fmt("ptr_fmt") ptext = ("".join(pnode.itertext())).strip() if pkk_cfg.annotate: ptext = re.sub(r'\s*\.\s*$', '', ptext) phref = pnode.attrib["{http://www.w3.org/TR/xlink}href"].replace(" ", "+") return pfmt.format( text=ptext, href=phref) ## Get text inside a given node def pkk_node_to_text(lnode): stmp = "" for pnode in lnode.iter(): if pnode.tag == "Ptr": stmp += pkk_ptr_to_text(pnode) else: if isinstance(pnode.text, str): stmp += pkk_str_annotate(pnode.tag, pnode.text) if isinstance(pnode.tail, str): stmp += pnode.tail return pkk_str_clean(re.sub(r'\s*\.\s*$', '', stmp)) ## Simple recursive dump starting at given node def pkk_dump_recursive(indent, lnode): if lnode.tag in ["Example"]: stmp = pkk_node_to_text(lnode) pkk_printi(indent, f"{lnode.tag} \"{stmp}\"\n") else: if isinstance(lnode.text, str): textstr = pkk_str_clean(lnode.text) if textstr != "": textstr = " \""+ textstr +"\"" else: textstr = "" if len(lnode.attrib) > 0: attrstr = " "+ str(lnode.attrib) else: attrstr = "" pkk_printi(indent, f"{lnode.tag}{attrstr}{textstr}\n") for qnode in lnode.findall("./*"): pkk_dump_recursive(indent + 1, qnode) ## Output item(s) under given node with given format string def pkk_get_subs(indent, dnode, dsub, dfmtname): dfmt = pkk_get_fmt(dfmtname) ostr = "" for qnode in dnode.findall(dsub): ostr += dfmt.format( text=pkk_node_to_text(qnode), indent=pkk_geti(indent)) return ostr def pkk_get_list_str(dindent, dlist, dprefix, dfilter): if len(dlist) > 0: if dfilter: tfmt = pkk_get_fmt(dprefix + "_list_item") tlist = [tfmt.format(text=i) for i in dlist] else: tlist = dlist return pkk_get_fmt(dprefix + "_list").format( alist=pkk_get_fmt(dprefix + "_list_sep").join(tlist), indent=pkk_geti(dindent)) else: return pkk_get_fmt(dprefix + "_list_empty").format( indent=pkk_geti(dindent)) ## Get definition nand examples from node def pkk_get_sense(indent, dnode, dname, dindex): exlist = [] index = 1 for wnode in dnode.findall("./ExampleBlock/ExampleCtn"): geolist = [] for qnode in wnode.findall("./FreeTopic[@type='levikki']/GeographicalUsage"): geolist.append(pkk_get_fmt("example_geo_list_item").format( text=pkk_node_to_text(qnode), tclass=qnode.attrib["class"], indent=pkk_geti(indent + 2))) exlist.append(pkk_get_fmt("example_list_item").format( text=pkk_node_to_text(wnode.find("./Example")), geo_list=pkk_get_list_str(indent + 1, geolist, "example_geo", False), indent=pkk_geti(indent + 1), index=index)) index += 1 return pkk_get_fmt(dname).format( definition=pkk_get_subs(indent, dnode, "./Definition", "definition_item"), example_list=pkk_get_list_str(indent, exlist, "example", False), indent=pkk_geti(indent), index=dindex) ## Output one "DictionaryEntry" node def pkk_output_node(indent, dnode): for wnode in dnode.findall("./HeadwordCtn"): # Get head word headword = pkk_node_to_text(wnode.find("./Headword")) # Collect search forms srchlist = [] for qnode in wnode.findall("./SearchForm"): srchlist.append(pkk_node_to_text(qnode)) # Remove dupe if headword is also in srchlist if headword in srchlist: srchlist.remove(headword) # Remove other duplicates and sort srchlist = list(set(srchlist)) srchlist.sort(reverse=False, key=lambda attr: (attr, len(attr))) # Get hyphenation note, if any hnode = wnode.find("./Hyphenation") if hnode is not None: hyphenation = pkk_get_fmt("hyphenation").format( text=pkk_node_to_text(hnode), indent=pkk_geti(indent + 1)) else: hyphenation = pkk_get_fmt("no_hyphenation").format( indent=pkk_geti(indent + 1)) # Create list with grammatical attributes (noun, verb, etc.) attrlist = [] for pnode in wnode.findall("./PartOfSpeechCtn/PartOfSpeech"): attrlist.append(pnode.attrib["freeValue"]) for pnode in wnode.findall("./GrammaticalNote"): attrlist.append(pkk_node_to_text(pnode)) # Remove duplicates and sort the list attrlist = list(set(attrlist)) attrlist.sort(reverse=False, key=lambda attr: (attr, len(attr))) # Get main "sense" msense = pkk_get_sense(indent + 1, wnode, "main_sense_item", 0) # Print any other "senses" index = 1 senselist = [] for znode in dnode.findall("./SenseGrp"): senselist.append(pkk_get_sense(indent + 1, znode, "sense_list_item", index)) index += 1 # Print the headword and attributes if any pkk_print(pkk_get_fmt("word_item").format( word=headword, attr=pkk_get_list_str(indent + 1, attrlist, "word_attr", True), search=pkk_get_list_str(indent + 1, srchlist, "search", True), hyphenation=hyphenation, main_sense=msense, other_senses=pkk_get_list_str(indent + 1, senselist, "sense", False), indent=pkk_geti(indent))) ### ### Main program starts ### if __name__ == "__main__": signal.signal(signal.SIGINT, pkk_signal_handler) ### Parse arguments optparser = PKKArgumentParser( usage="%(prog)s [options] <input xml file(s)>" ) optparser.add_argument("filenames", type=str, action="extend", nargs="*", metavar="filename", help="XML filename(s)") optparser.add_argument("-m", "--mode", dest="mode", action=pkk_set_mode, default=PKK_MODE_NORMAL, help="set output mode (see below)") optparser.add_argument("-s", "--set", action=pkk_set_value, metavar="ID=STR", help='set format string (see below)') optparser.add_argument("-n", "--normalize", dest="normalize", action="store_true", help="output NFC normalized Unicode") optparser.add_argument("-a", "--annotate", dest="annotate", action="store_true", help="annotate strings") optparser.add_argument("-i", "--indent", dest="indent", type=functools.partial(pkk_arg_range, vmin=0, vmax=32), default=4, metavar="N", help='set indentation width (default: %(default)s)') optparser.add_argument("-l", "--list", dest="list_values", action="store_true", help="show list of format strings for current mode") optparser.add_argument("-p", "--debug", dest="debug", action="store_true", help=argparse.SUPPRESS) pkk_cfg = optparser.parse_args() if pkk_cfg.list_values: if pkk_cfg.mode in [PKK_MODE_NORMAL, PKK_MODE_ANKI]: print("Available format strings and values (mode '{}'):".format( pkk_modes_list[pkk_cfg.mode])) for mid in pkk_mode_defaults: stmp = pkk_get_value(mid).replace("\\", "\\\\").replace("\n", "\\n") print(" {mid:22s} : '{stmp}'") else: print("Mode '{}' does not use format strings.".format( pkk_modes_list[pkk_cfg.mode])) sys.exit(0) elif len(pkk_cfg.filenames) == 0: optparser.print_help() ### Handle each input file for filename in pkk_cfg.filenames: # Parse XML file into element tree try: uxml = xmlET.parse(filename) except Exception as e: pkk_fatal(f"SVG/XML parsing failed: {str(e)}") # Dump output try: xroot = uxml.getroot() for dnode in xroot.findall("./DictionaryEntry"): if pkk_cfg.debug and dnode.attrib["identifier"] not in pkk_debug_list: continue if pkk_cfg.mode in [PKK_MODE_NORMAL, PKK_MODE_ANKI]: try: pkk_output_node(0, dnode) except Exception as e: print("") pkk_dump_recursive(0, dnode) print(str(e)) sys.exit(0) elif pkk_cfg.mode == PKK_MODE_DUMP: pkk_dump_recursive(0, dnode) print("") elif pkk_cfg.mode == PKK_MODE_XML: pkk_print(xmlET.tostring(dnode, encoding="utf8").decode("utf8") + "\n\n") else: pkk_fatal(f"Invalid operation mode '{pkk_cfg.mode}'.") except (BrokenPipeError, IOError) as e: sys.stderr.close() sys.exit(1)