Mercurial > hg > lxmldump
view lxmldump.py @ 30:34755af2ea1f
Make Ptr field URL formatting configurable.
author | Matti Hamalainen <ccr@tnsp.org> |
---|---|
date | Tue, 25 May 2021 12:40:03 +0300 |
parents | f91ef7d7615b |
children | 4cbefe4c6f53 |
line wrap: on
line source
#!/usr/bin/python3 -B # coding=utf-8 ### ### lxmldump - Dump ISO/FDIS 1951 XML file data ### Programmed and designed by Matti 'ccr' Hämäläinen <ccr@tnsp.org> ### (C) Copyright 2021 Tecnic Software productions (TNSP) ### ### Released / distributed under 3-clause BSD license ### (see file "COPYING" for more information) ### ### Python 3.7+ required! ### import sys import signal import re from pathlib import Path import xml.etree.ElementTree as xmlET import unicodedata import argparse assert sys.version_info >= (3, 7) ### ### Default settings ### # Operation modes PKK_MODE_NORMAL = 0 PKK_MODE_DUMP = 1 PKK_MODE_XML = 2 pkk_modes_list = { PKK_MODE_NORMAL: "normal", PKK_MODE_DUMP: "dump", PKK_MODE_XML: "xml", } # Default Ptr URL format strings pkk_ptr_url_fmt = { PKK_MODE_NORMAL: u"<PTR:{href}>{text}</PTR>", } # Element annotation mappings pkk_element_annotation_map = { "Fragment" : { PKK_MODE_NORMAL: ["<", ">"], }, } # List of words in kks1/ useful for debugging, option -p pkk_debug_list = [ "ahas", "ahavakkaine", "ahavakala", "ahavakoittuo", "ahvaliha", "aloilleh", "hanjahtoakseh", "akkalisto", ] ### ### Misc. helper functions, etc ### def pkk_cleanup(): return 0 ## Print string to stdout using normalized Unicode if enabled def pkk_print(smsg): try: if pkk_cfg.normalize: sys.stdout.write(unicodedata.normalize("NFC", smsg)) else: sys.stdout.write(smsg) except (BrokenPipeError, IOError) as e: sys.stderr.close() ## Print string with indentation def pkk_printi(indent, smsg): pkk_print((" " * pkk_cfg.indent * indent) + smsg) ## Fatal error handler def pkk_fatal(smsg): print(u"ERROR: "+ smsg) sys.exit(1) ## Handler for SIGINT signals def pkk_signal_handler(signal, frame): pkk_cleanup() print(u"\nQuitting due to SIGINT / Ctrl+C!") sys.exit(1) ## Annotate given string with prefix and suffix based on tag def pkk_str_annotate(mtag, mstr): if pkk_cfg.annotate and mtag in pkk_element_annotation_map: if pkk_cfg.mode in pkk_element_annotation_map[mtag]: mmode = pkk_cfg.mode else: mmode = PKK_MODE_NORMAL return pkk_element_annotation_map[mtag][mmode][0] + mstr + pkk_element_annotation_map[mtag][mmode][1] else: return mstr ## Clean string by removing tabs and newlines def pkk_str_clean(mstr): return re.sub(r'[\n\r\t]', '', mstr) ## Format a "Ptr" node as text def pkk_ptr_to_text(pnode): # If custom format set, use it if pkk_cfg.ptr_url_fmt != None: pfmt = pkk_cfg.ptr_url_fmt elif pkk_cfg.mode in pkk_ptr_url_fmt: # Else try mode-specific pfmt = pkk_ptr_url_fmt[pkk_cfg.mode] else: # Last resort is normal mode format pfmt = pkk_ptr_url_fmt[PKK_MODE_NORMAL] return pfmt.format( text=("".join(pnode.itertext())).strip(), href=pnode.attrib["{http://www.w3.org/TR/xlink}href"]) ## Get text inside a given node def pkk_node_to_text(lnode): stmp = "" for pnode in lnode.iter(): if pnode.tag == "Ptr": stmp += pkk_ptr_to_text(pnode) else: if isinstance(pnode.text, str): stmp += pkk_str_annotate(pnode.tag, pkk_str_clean(pnode.text).strip()) if isinstance(pnode.tail, str): stmp += pkk_str_clean(pnode.tail) return stmp.strip() ## Simple recursive dump starting at given node def pkk_dump_recursive(indent, lnode): if lnode.tag in ["Example"]: stmp = pkk_node_to_text(lnode) pkk_printi(indent, f"{lnode.tag} \"{stmp}\"\n") else: if isinstance(lnode.text, str): textstr = pkk_str_clean(lnode.text).strip() if textstr != "": textstr = " \""+ textstr +"\"" else: textstr = "" if len(lnode.attrib) > 0: attrstr = " "+ str(lnode.attrib) else: attrstr = "" pkk_printi(indent, f"{lnode.tag}{attrstr}{textstr}\n") for qnode in lnode.findall("./*"): pkk_dump_recursive(indent + 1, qnode) ## Output item(s) under given node with given format string def pkk_output_subs_fmt(indent, dnode, dsub, dname, dfmt): for qnode in dnode.findall(dsub): pkk_printi(indent, dfmt.format(nname=dname, ntext=pkk_node_to_text(qnode))) ## Output item(s) under given node with a prefixed name string def pkk_output_subs_prefix(indent, dnode, dsub, dname): pkk_output_subs_fmt(indent, dnode, dsub, dname, "{nname} \"{ntext}\"\n") ## Output a main "Headword" or "Sense" node under it def pkk_output_sense(indent, dnode): # Search form and definition pkk_output_subs_prefix(indent, dnode, "./SearchForm", "srch") pkk_output_subs_prefix(indent, dnode, "./Definition", "defn") # Examples for wnode in dnode.findall("./ExampleBlock/ExampleCtn"): sstr = pkk_node_to_text(wnode.find("./Example")) lstr = "" ltmp = [] for qnode in wnode.findall("./FreeTopic[@type='levikki']/GeographicalUsage"): ltmp.append("{} [{}]".format(pkk_node_to_text(qnode), qnode.attrib["class"])) if len(ltmp) > 0: lstr = " ({})".format(", ".join(ltmp)) pkk_printi(indent + 1, "{} \"{}\"{}\n".format("exmp", sstr, lstr)) ## Output one "DictionaryEntry" node def pkk_output_node(indent, dnode): for wnode in dnode.findall("./HeadwordCtn"): # Create list with grammatical attributes (noun, verb, etc.) tmpl = [] for pnode in wnode.findall("./PartOfSpeechCtn/PartOfSpeech"): tmpl.append(pnode.attrib["freeValue"]) for pnode in wnode.findall("./GrammaticalNote"): tmpl.append(pkk_node_to_text(pnode)) # Remove duplicates and sort the list tmpl = list(set(tmpl)) tmpl.sort(reverse=False, key=lambda attr: (attr, len(attr))) # Print the headword and attributes if any pkk_output_subs_fmt(indent, wnode, "./Headword", "", "\"{ntext}\"") if len(tmpl) > 0: pkk_print(" ({nlist})".format(nlist=" ; ".join(tmpl))) pkk_print("\n") # Print main "sense" pkk_output_sense(indent + 1, wnode) # Print any other "senses" index = 1 for wnode in dnode.findall("./SenseGrp"): pkk_printi(indent + 1, f"sense #{index}\n") pkk_output_sense(indent + 2, wnode) index += 1 ### ### Main program starts ### signal.signal(signal.SIGINT, pkk_signal_handler) optparser = argparse.ArgumentParser( description="lxmldump - Dump ISO/FDIS 1951 XML file data", usage="%(prog)s [options] <input xml file(s)>", epilog="\n\n" ) optparser.add_argument("filenames", type=str, action="extend", nargs="*", metavar="filename", help="XML filename(s)") optparser.add_argument("-d", "--dump", dest="mode", action="store_const", const=PKK_MODE_DUMP, default=PKK_MODE_NORMAL, help="output as simple dump") optparser.add_argument("-x", "--xml", dest="mode", action="store_const", const=PKK_MODE_XML, help="output as XML") optparser.add_argument("--ptr-url-fmt", dest="ptr_url_fmt", type=str, default=None, metavar="str", help='Ptr URL format string (see below)') optparser.add_argument("-n", "--normalize", dest="normalize", action="store_true", help="output NFC normalized Unicode") optparser.add_argument("-a", "--annotate", dest="annotate", action="store_true", help="annotate strings") optparser.add_argument("-p", "--debug", dest="debug", action="store_true", help=argparse.SUPPRESS) optparser.add_argument("-i", "--indent", dest="indent", type=int, choices=range(0, 32), default=4, metavar="n", help='indent output by <n> characters (default: %(default)s)') ### Show help if needed pkk_cfg = optparser.parse_args() if len(pkk_cfg.filenames) == 0: optparser.print_help() sys.exit(0) ### Handle each input file for filename in pkk_cfg.filenames: # Parse XML file into element tree try: uxml = xmlET.parse(filename) except Exception as e: pkk_fatal(u"SVG/XML parsing failed: {0}".format(str(e))) # Dump output try: xroot = uxml.getroot() for dnode in xroot.findall("./DictionaryEntry"): if pkk_cfg.debug and dnode.attrib["identifier"] not in pkk_debug_list: continue if pkk_cfg.mode == PKK_MODE_NORMAL: try: pkk_output_node(0, dnode) except Exception as e: pkk_dump_recursive(0, dnode) print(str(e)) sys.exit(0) elif pkk_cfg.mode == PKK_MODE_DUMP: pkk_dump_recursive(0, dnode) elif pkk_cfg.mode == PKK_MODE_XML: pkk_print(str(xmlET.tostring(dnode, encoding="utf8")) + "\n") else: pkk_fatal("Invalid operation mode?") print("\n") except (BrokenPipeError, IOError) as e: sys.stderr.close() sys.exit(1) pkk_cleanup() sys.exit(0)