X-Git-Url: https://git.donarmstrong.com/?a=blobdiff_plain;f=buildscripts%2Fadd_html_footer.py;h=34ada97aac97d2cb7a5cfc8af0c0c481aab23aec;hb=37ca1f80bf5401accd17056938f4f7b2c147ddb2;hp=d70ddcee6ea87d34ba46d2078feb50d9d198fc3a;hpb=2b5cabd0f53c8fc3e553fee7b3aa5c3352e1106a;p=lilypond.git diff --git a/buildscripts/add_html_footer.py b/buildscripts/add_html_footer.py index d70ddcee6e..34ada97aac 100644 --- a/buildscripts/add_html_footer.py +++ b/buildscripts/add_html_footer.py @@ -15,17 +15,13 @@ non_copied_pages = ['Documentation/user/out-www/lilypond-big-page', 'Documentation/user/out-www/lilypond-internals-big-page', 'Documentation/user/out-www/music-glossary-big-page', 'out-www/examples', - 'Documentation/topdocs/out-www/NEWS', - 'Documentation/topdocs/out-www/INSTALL', - 'Documentation/bibliography/out-www/index', - 'Documentation/bibliography/out-www/engraving', - 'Documentation/bibliography/out-www/colorado', - 'Documentation/bibliography/out-www/computer-notation' + 'Documentation/topdocs', + 'Documentation/bibliography', 'Documentation/out-www/THANKS', 'Documentation/out-www/DEDICATION', - 'Documentation/topdocs/out-www/AUTHORS'] + 'input/'] -def _ (s): +def _doc (s): return s header = r""" @@ -37,22 +33,27 @@ footer = ''' %(footer_name_version)s
+
+%(footer_report_errors)s
+
+%(footer_suggest_docs)s
-
-%(footer_report_errors)s

''' -footer_name_version = _ ('This page is for %(package_name)s-%(package_version)s (%(branch_str)s).') -footer_report_errors = _ ('Report errors to %(mail_address)s.') +footer_name_version = _doc ('This page is for %(package_name)s-%(package_version)s (%(branch_str)s).') +footer_report_errors = _doc ('Report errors to %(mail_address)s.') +# ugh, must not have "_doc" in strings because it is naively replaced with "_" in hacked gettext process +footer_suggest_docs = _doc ('Your suggestions for the documentation are welcome.') mail_address = 'http://post.gmane.org/post.php?group=gmane.comp.gnu.lilypond.bugs' +suggest_Docs_url = 'http://lilypond.org/web/devel/participating/documentation-adding' header_tag = '' footer_tag = '' -lang_available = _ ("Other languages: %s.") -browser_lang = _ ('About automatic language selection.') +lang_available = _doc ("Other languages: %s.") +browser_lang = _doc ('About automatic language selection.') browser_language_url = "/web/about/browser-language" LANGUAGES_TEMPLATE = ''' @@ -68,7 +69,7 @@ html_re = re.compile ('(.*?)(?:[.]([^/.]*))?[.]html$') pages_dict = {} def build_pages_dict (filelist): - """Build dictionnary of available translations of each page""" + """Build dictionary of available translations of each page""" global pages_dict for f in filelist: m = html_re.match (f) @@ -78,11 +79,39 @@ def build_pages_dict (filelist): e = '' else: e = g[1] - if not g[0] in pages_dict.keys(): + if not g[0] in pages_dict: pages_dict[g[0]] = [e] else: pages_dict[g[0]].append (e) +def source_links_replace (m, source_val): + return 'href="' + os.path.join (source_val, m.group (1)) + '"' + +splitted_docs_re = re.compile ('(input/lsr/out-www/lilypond-snippets|Documentation/user/out-www/(lilypond|music-glossary|lilypond-program|lilypond-learning))/') + +snippets_ref_re = re.compile (r'href="(\.\./)?lilypond-snippets') +user_ref_re = re.compile (r'href="(?:\.\./)?lilypond(|-internals|-learning|-program)') + +## Windows does not support symlinks. +# This function avoids creating symlinks for splitted HTML manuals +# Get rid of symlinks in GNUmakefile.in (local-WWW-post) +# this also fixes missing PNGs only present in translated docs +def hack_urls (s, prefix): + if splitted_docs_re.match (prefix): + s = re.sub ('(href|src)="(../lily-.*?|.*?[.]png)"', '\\1="../\\2"', s) + + # fix xrefs between documents in different directories ad hoc + if 'user/out-www/lilypond' in prefix: + s = snippets_ref_re.sub ('href="source/input/lsr/lilypond-snippets', s) + elif 'input/lsr' in prefix: + s = user_ref_re.sub ('href="source/Documentation/user/lilypond\\1', s) + + source_path = os.path.join (os.path.dirname (prefix), 'source') + if not os.path.islink (source_path): + return s + source_val = os.readlink (source_path) + return re.sub ('href="source/(.*?)"', lambda m: source_links_replace (m, source_val), s) + def add_header (s): """Add header ( and doctype)""" if re.search (header_tag, s) == None: @@ -102,10 +131,6 @@ def add_header (s): s = doctype + s return s -def info_external_ref_remove (s): - """Remove info's annoying's indication of referencing external document""" - return re.sub (' \((lilypond|lilypond-internals|music-glossary)\)', '', s) - def add_title (s): # urg # maybe find first node? @@ -149,7 +174,7 @@ def find_translations (prefix, lang_ext): if lang_ext != e: if e in pages_dict[prefix]: available.append (l) - elif lang_ext == '' and l.enabled and not prefix in non_copied_pages: + elif lang_ext == '' and l.enabled and reduce (lambda x, y: x and y, [not prefix.startswith (s) for s in non_copied_pages]): # English version of missing translated pages will be written missing.append (e) return available, missing @@ -181,13 +206,13 @@ def process_links (s, prefix, lang_ext, file_name, missing, target): return page_flavors def add_menu (page_flavors, prefix, available, target, translation): - for k in page_flavors.keys(): + for k in page_flavors: language_menu = '' languages = '' if page_flavors[k][0] != '': t = translation[page_flavors[k][0]] else: - t = _ + t = _doc for lang in available: lang_file = lang.file_name (os.path.basename (prefix), '.html') if language_menu != '': @@ -210,15 +235,13 @@ def add_menu (page_flavors, prefix, available, target, translation): return page_flavors -def add_html_footer (translation, - package_name = '', +def add_html_footer (package_name = '', package_version = '', target = 'offline', name_filter = lambda s: s): """Add header, footer to a number of HTML files Arguments: - translation gettext translations dictionary, with language codes as keys package_name=NAME set package_name to NAME package_version=VERSION set package version to VERSION targets=offline|online set page processing depending on the target @@ -227,6 +250,7 @@ def add_html_footer (translation, negotiation name_filter a HTML file name filter """ + translation = langdefs.translation localtime = time.strftime ('%c %Z', time.localtime (time.time ())) if re.search ("http://", mail_address): @@ -235,9 +259,9 @@ def add_html_footer (translation, mail_address_url= 'mailto:' + mail_address versiontup = package_version.split ('.') - branch_str = _('stable-branch') - if int ( versiontup[1]) % 2: - branch_str = _('development-branch') + branch_str = _doc ('stable-branch') + if int (versiontup[1]) % 2: + branch_str = _doc ('development-branch') for prefix, ext_list in pages_dict.items (): for lang_ext in ext_list: @@ -247,9 +271,8 @@ def add_html_footer (translation, in_f.close() s = re.sub ('%', '%%', s) + s = hack_urls (s, prefix) s = add_header (s) - # seems to be no more needed - # s = info_external_ref_remove (s) ### add footer if re.search (footer_tag, s) == None: @@ -259,19 +282,15 @@ def add_html_footer (translation, page_flavors = process_links (s, prefix, lang_ext, file_name, missing, target) # Add menu after stripping: must not have autoselection for language menu. page_flavors = add_menu (page_flavors, prefix, available, target, translation) - # urg, this stuff is outdated and seems useless, let's disable it - #else: - # for e in [l.webext for l in langdefs.LANGUAGES]: - # if not e in pages_dict[prefix]: - # page_flavors[langdefs.lang_file_name (prefix, e, '.html')] = s subst = dict ([i for i in globals().items() if type (i[1]) is str]) subst.update (dict ([i for i in locals().items() if type (i[1]) is str])) - for k in page_flavors.keys(): - if page_flavors[k][0] in translation.keys(): - for name in subst.keys(): + for k in page_flavors: + if page_flavors[k][0] in translation: + for name in subst: subst[name] = translation[page_flavors[k][0]] (subst[name]) subst['footer_name_version'] = subst['footer_name_version'] % subst subst['footer_report_errors'] = subst['footer_report_errors'] % subst + subst['footer_suggest_docs'] = subst['footer_suggest_docs'] % subst page_flavors[k][1] = page_flavors[k][1] % subst out_f = open (name_filter (k), 'w') out_f.write (page_flavors[k][1])