X-Git-Url: https://git.donarmstrong.com/?a=blobdiff_plain;f=buildscripts%2Fadd_html_footer.py;fp=buildscripts%2Fadd_html_footer.py;h=addf3c7d7a6c3f5a4114d94ff35d60cda97671e5;hb=b567811cb0fa5a805aabb9e8fb903763c13e8c29;hp=bd7d8409cee5af622e6792fde092fda347776e0f;hpb=58d7b40d1152b001d5700d14f32a2a7701c82799;p=lilypond.git diff --git a/buildscripts/add_html_footer.py b/buildscripts/add_html_footer.py index bd7d8409ce..addf3c7d7a 100644 --- a/buildscripts/add_html_footer.py +++ b/buildscripts/add_html_footer.py @@ -6,6 +6,7 @@ Print a nice footer. import re import os import time +import operator import langdefs @@ -50,7 +51,10 @@ mail_address = 'http://post.gmane.org/post.php?group=gmane.comp.gnu.lilypond.bug suggest_Docs_url = 'http://lilypond.org/web/devel/participating/documentation-adding' header_tag = '' +header_tag_re = re.compile (header_tag) + footer_tag = '' +footer_tag_re = re.compile (footer_tag) lang_available = _doc ("Other languages: %s.") browser_lang = _doc ('About automatic language selection.') @@ -90,6 +94,9 @@ def source_links_replace (m, source_val): splitted_docs_re = re.compile ('(input/lsr/out-www/lilypond-snippets|Documentation/user/out-www/(lilypond|music-glossary|lilypond-program|lilypond-learning))/') snippets_ref_re = re.compile (r'href="(\.\./)?lilypond-snippets') +# TODO: master has ../lily- ! +src_href_re = re.compile ('(href|src)="(lily-.*?|.*?[.]png)"') +source_link_re = re.compile ('href="source/(.*?)"') ## Windows does not support symlinks. # This function avoids creating symlinks for splitted HTML manuals @@ -97,7 +104,7 @@ snippets_ref_re = re.compile (r'href="(\.\./)?lilypond-snippets') # this also fixes missing PNGs only present in translated docs def hack_urls (s, prefix): if splitted_docs_re.match (prefix): - s = re.sub ('(href|src)="(../lily-.*?|.*?[.]png)"', '\\1="../\\2"', s) + s = src_href_re.sub ('\\1="../\\2"', s) # fix Snippets xrefs ad hoc s = snippets_ref_re.sub ('href="source/input/lsr/lilypond-snippets', s) @@ -106,59 +113,52 @@ def hack_urls (s, prefix): if not os.path.islink (source_path): return s source_val = os.readlink (source_path) - return re.sub ('href="source/(.*?)"', lambda m: source_links_replace (m, source_val), s) + return source_link_re.sub (lambda m: source_links_replace (m, source_val), s) + +body_tag_re = re.compile ('(?i)]*)>') +html_tag_re = re.compile ('(?i)') +doctype_re = re.compile ('(?i)\n' def add_header (s): - """Add header ( and doctype)""" - if re.search (header_tag, s) == None: - body = '' - s = re.sub ('(?i)', body, s) - if re.search ('(?i)]*>', body + header, s, 1) - elif re.search ('(?i)', '' + header, s, 1) - else: - s = header + s + """Add header ( and doctype)""" + if header_tag_re.search (s) == None: + body = '' + (s, n) = body_tag_re.subn (body + header, s, 1) + if not n: + (s, n) = html_tag_re.subn ('' + header, s, 1) + if not n: + s = header + s s = header_tag + '\n' + s - if re.search ('(?i)\n' + if doctype_re.search (s) == None: s = doctype + s return s +title_tag_re = re.compile ('.*?(.*?)', re.DOTALL) +AT_web_title_re = re.compile ('@WEB-TITLE@') + def add_title (s): # urg # maybe find first node? fallback_web_title = '-- --' - m = re.match ('.*?(.*?)', s, re.DOTALL) + m = title_tag_re.match (s) if m: fallback_web_title = m.group (1) - s = re.sub ('@WEB-TITLE@', fallback_web_title, s) + s = AT_web_title_re.sub (fallback_web_title, s) return s -info_nav_bar = re.compile (r'
\s*

\s*(.+?)


\s*
', re.M | re.S) -info_footnote_hr = re.compile (r'
\s*()?\s*', re.M | re.I) +end_body_re = re.compile ('(?i)') +end_html_re = re.compile ('(?i)') def add_footer (s): - """add footer - -also add navigation bar to bottom of Info HTML pages""" - m = info_nav_bar.search (s) - if m: - # avoid duplicate
in case there are footnotes at the end of the Info HTML page - if info_footnote_hr.search (s): - custom_footer = '
\n

' + m.group (1) + '

\n' + footer - else: - custom_footer = '

\n
\n

' + m.group (1) + '

\n' + footer - else: - custom_footer = footer - if re.search ('(?i)
', footer_tag + custom_footer + '\n' + '', s, 1) - elif re.search ('(?i)', footer_tag + custom_footer + '\n' + '', s, 1) - else: - s += footer_tag + custom_footer + '\n' + """add footer""" + (s, n) = end_body_re.subn (footer_tag + footer + '\n' + '', s, 1) + if not n: + (s, n) = end_html_re.subn (footer_tag + footer + '\n' + '', s, 1) + if not n: + s += footer_tag + footer + '\n' return s def find_translations (prefix, lang_ext): @@ -170,35 +170,37 @@ def find_translations (prefix, lang_ext): if lang_ext != e: if e in pages_dict[prefix]: available.append (l) - elif lang_ext == '' and l.enabled and reduce (lambda x, y: x and y, [not prefix.startswith (s) for s in non_copied_pages]): + elif lang_ext == '' and l.enabled and reduce (operator.and_, [not prefix.startswith (s) for s in non_copied_pages]): # English version of missing translated pages will be written missing.append (e) return available, missing +online_links_re = re.compile ('''(href|src)=[\'"]([^/][.]*[^.:\'"]*)(.html|.png)(#[^"\']*|)[\'"]''') +offline_links_re = re.compile ('''href=[\'"]([^/][.]*[^.:\'"]*)(.html)(#[^"\']*|)[\'"]''') + def process_links (s, prefix, lang_ext, file_name, missing, target): page_flavors = {} if target == 'online': # Strip .html, .png suffix for auto language selection (content # negotiation). The menu must keep the full extension, so do # this before adding the menu. - page_flavors[file_name] = [lang_ext, re.sub ( - '''(href|src)=[\'"]([^/][.]*[^.:\'"]*)(.html|.png)(#[^"\']*|)[\'"]''', - '\\1="\\2\\4"', s)] + page_flavors[file_name] = \ + [lang_ext, online_links_re.sub ('\\1="\\2\\4"', s)] elif target == 'offline': - # in LANG doc index: don't rewrite .html suffixes as not all .LANG.html pages exist + # in LANG doc index: don't rewrite .html suffixes + # as not all .LANG.html pages exist; # the doc index should be translated and contain the right links if prefix == 'Documentation/out-www/index': page_flavors[file_name] = [lang_ext, s] elif lang_ext == '': page_flavors[file_name] = [lang_ext, s] for e in missing: - page_flavors[langdefs.lang_file_name (prefix, e, '.html')] = [e, re.sub ( - '''href=[\'"]([^/][.]*[^.:\'"]*)(.html)(#[^"\']*|)[\'"]''', - 'href="\\1.' + e + '\\2\\3"', s)] + page_flavors[langdefs.lang_file_name (prefix, e, '.html')] = \ + [e, offline_links_re.sub ('href="\\1.' + e + '\\2\\3"', s)] else: - page_flavors[file_name] = [lang_ext, re.sub ( - '''href=[\'"]([^/][.]*[^.:\'"]*)(.html)(#[^"\']*|)[\'"]''', - 'href="\\1.' + lang_ext + '\\2\\3"', s)] + page_flavors[file_name] = \ + [lang_ext, + offline_links_re.sub ('href="\\1.' + lang_ext + '\\2\\3"', s)] return page_flavors def add_menu (page_flavors, prefix, available, target, translation): @@ -222,12 +224,11 @@ def add_menu (page_flavors, prefix, available, target, translation): language_available = t (lang_available) % language_menu languages = LANGUAGES_TEMPLATE % vars () # put language menu before '' and '' tags - if re.search ('(?i)', languages + '', page_flavors[k][1], 1) - elif re.search ('(?i)', languages + '', page_flavors[k][1], 1) - else: - page_flavors[k][1] += languages + (page_flavors[k][1], n) = end_body_re.subn (languages + '', page_flavors[k][1], 1) + if not n: + (page_flavors[k][1], n) = end_html_re.subn (languages + '', page_flavors[k][1], 1) + if not n: + page_flavors[k][1] += languages return page_flavors @@ -267,12 +268,12 @@ def add_html_footer (translation, s = in_f.read() in_f.close() - s = re.sub ('%', '%%', s) + s = s.replace ('%', '%%') s = hack_urls (s, prefix) s = add_header (s) ### add footer - if re.search (footer_tag, s) == None: + if footer_tag_re.search (s) == None: s = add_footer (s) available, missing = find_translations (prefix, lang_ext)