# see process_links()
non_copied_pages = ['Documentation/user/out-www/lilypond-big-page',
'Documentation/user/out-www/lilypond-internals-big-page',
+ 'Documentation/user/out-www/lilypond-learning-big-page',
+ 'Documentation/user/out-www/lilypond-program-big-page',
'Documentation/user/out-www/music-glossary-big-page',
'out-www/examples',
'Documentation/topdocs',
'Documentation/bibliography',
'Documentation/out-www/THANKS',
'Documentation/out-www/DEDICATION',
+ 'Documentation/out-www/devel',
'input/']
def _doc (s):
pages_dict = {}
def build_pages_dict (filelist):
- """Build dictionnary of available translations of each page"""
+ """Build dictionary of available translations of each page"""
global pages_dict
for f in filelist:
m = html_re.match (f)
e = ''
else:
e = g[1]
- if not g[0] in pages_dict.keys():
+ if not g[0] in pages_dict:
pages_dict[g[0]] = [e]
else:
pages_dict[g[0]].append (e)
def source_links_replace (m, source_val):
return 'href="' + os.path.join (source_val, m.group (1)) + '"'
-splitted_docs_re = re.compile ('Documentation/user/out-www/(lilypond|music-glossary|lilypond-program|lilypond-learning)/')
+splitted_docs_re = re.compile ('(input/lsr/out-www/lilypond-snippets|Documentation/user/out-www/(lilypond|music-glossary|lilypond-program|lilypond-learning))/')
-# On systems without symlinks (e.g. Windows), docs are not very usable
-# Get rid of symlinks references here
+snippets_ref_re = re.compile (r'href="(\.\./)?lilypond-snippets')
+user_ref_re = re.compile (r'href="(?:\.\./)?lilypond(|-internals|-learning|-program)')
+
+## Windows does not support symlinks.
+# This function avoids creating symlinks for splitted HTML manuals
# Get rid of symlinks in GNUmakefile.in (local-WWW-post)
-def replace_symlinks_urls (s, prefix):
+# this also fixes missing PNGs only present in translated docs
+def hack_urls (s, prefix):
if splitted_docs_re.match (prefix):
- s = re.sub ('(href|src)="(lily-.*?|.*?-flat-.*?|context-example.*?)"', '\\1="../\\2"', s)
+ s = re.sub ('(href|src)="(../lily-.*?|.*?[.]png)"', '\\1="../\\2"', s)
+
+ # fix xrefs between documents in different directories ad hoc
+ if 'user/out-www/lilypond' in prefix:
+ s = snippets_ref_re.sub ('href="source/input/lsr/lilypond-snippets', s)
+ elif 'input/lsr' in prefix:
+ s = user_ref_re.sub ('href="source/Documentation/user/lilypond\\1', s)
+
source_path = os.path.join (os.path.dirname (prefix), 'source')
if not os.path.islink (source_path):
return s
return page_flavors
def add_menu (page_flavors, prefix, available, target, translation):
- for k in page_flavors.keys():
+ for k in page_flavors:
language_menu = ''
languages = ''
if page_flavors[k][0] != '':
return page_flavors
-def add_html_footer (translation,
- package_name = '',
+def add_html_footer (package_name = '',
package_version = '',
target = 'offline',
name_filter = lambda s: s):
"""Add header, footer to a number of HTML files
Arguments:
- translation gettext translations dictionary, with language codes as keys
package_name=NAME set package_name to NAME
package_version=VERSION set package version to VERSION
targets=offline|online set page processing depending on the target
negotiation
name_filter a HTML file name filter
"""
+ translation = langdefs.translation
localtime = time.strftime ('%c %Z', time.localtime (time.time ()))
- if re.search ("http://", mail_address):
+ if "http://" in mail_address:
mail_address_url = mail_address
else:
mail_address_url= 'mailto:' + mail_address
if int (versiontup[1]) % 2:
branch_str = _doc ('development-branch')
+ # Initialize dictionaries for string formatting
+ subst = {}
+ subst[''] = dict ([i for i in globals ().items() if type (i[1]) is str])
+ subst[''].update (dict ([i for i in locals ().items() if type (i[1]) is str]))
+ for l in translation:
+ e = langdefs.LANGDICT[l].webext
+ if e:
+ subst[e] = {}
+ for name in subst['']:
+ subst[e][name] = translation[l] (subst[''][name])
+ # Do deeper string formatting as early as possible,
+ # so only one '%' formatting pass is needed later
+ for e in subst:
+ subst[e]['footer_name_version'] = subst[e]['footer_name_version'] % subst[e]
+ subst[e]['footer_report_errors'] = subst[e]['footer_report_errors'] % subst[e]
+ subst[e]['footer_suggest_docs'] = subst[e]['footer_suggest_docs'] % subst[e]
+
for prefix, ext_list in pages_dict.items ():
for lang_ext in ext_list:
file_name = langdefs.lang_file_name (prefix, lang_ext, '.html')
in_f.close()
s = re.sub ('%', '%%', s)
- if target == 'offline':
- s = replace_symlinks_urls (s, prefix)
+ s = hack_urls (s, prefix)
s = add_header (s)
### add footer
page_flavors = process_links (s, prefix, lang_ext, file_name, missing, target)
# Add menu after stripping: must not have autoselection for language menu.
page_flavors = add_menu (page_flavors, prefix, available, target, translation)
- subst = dict ([i for i in globals().items() if type (i[1]) is str])
- subst.update (dict ([i for i in locals().items() if type (i[1]) is str]))
- for k in page_flavors.keys():
- if page_flavors[k][0] in translation.keys():
- for name in subst.keys():
- subst[name] = translation[page_flavors[k][0]] (subst[name])
- subst['footer_name_version'] = subst['footer_name_version'] % subst
- subst['footer_report_errors'] = subst['footer_report_errors'] % subst
- subst['footer_suggest_docs'] = subst['footer_suggest_docs'] % subst
- page_flavors[k][1] = page_flavors[k][1] % subst
+ for k in page_flavors:
+ page_flavors[k][1] = page_flavors[k][1] % subst[page_flavors[k][0]]
out_f = open (name_filter (k), 'w')
out_f.write (page_flavors[k][1])
out_f.close()