From: John Mandereau Date: Wed, 20 Aug 2008 15:29:22 +0000 (+0200) Subject: Rename translated manuals makefiles and add_html_footer.py X-Git-Tag: release/2.11.58-1~32^2~48 X-Git-Url: https://git.donarmstrong.com/?a=commitdiff_plain;h=8d2d1eee8d1562676cd5c0307ddd886a899a37af;p=lilypond.git Rename translated manuals makefiles and add_html_footer.py --- diff --git a/Documentation/de/user/GNUmakefile b/Documentation/de/user/GNUmakefile index fb4ca861ff..50042f40cb 100644 --- a/Documentation/de/user/GNUmakefile +++ b/Documentation/de/user/GNUmakefile @@ -1,4 +1,4 @@ ISOLANG = de depth = ../../.. -LOCALSTEPMAKE_TEMPLATES = lilypond ly doclang +LOCALSTEPMAKE_TEMPLATES = lilypond ly doc-i18n-user include $(depth)/make/stepmake.make diff --git a/Documentation/es/user/GNUmakefile b/Documentation/es/user/GNUmakefile index e93ae28aea..06916b9733 100644 --- a/Documentation/es/user/GNUmakefile +++ b/Documentation/es/user/GNUmakefile @@ -1,5 +1,5 @@ ISOLANG = es depth = ../../.. -LOCALSTEPMAKE_TEMPLATES = lilypond ly doclang +LOCALSTEPMAKE_TEMPLATES = lilypond ly doc-i18n-user BIG_PAGE_MANUALS = lilypond-learning include $(depth)/make/stepmake.make diff --git a/Documentation/fr/user/GNUmakefile b/Documentation/fr/user/GNUmakefile index cff64c97e9..a7defa6c44 100644 --- a/Documentation/fr/user/GNUmakefile +++ b/Documentation/fr/user/GNUmakefile @@ -1,4 +1,4 @@ ISOLANG = fr depth = ../../.. -LOCALSTEPMAKE_TEMPLATES = lilypond ly doclang +LOCALSTEPMAKE_TEMPLATES = lilypond ly doc-i18n-user include $(depth)/make/stepmake.make diff --git a/buildscripts/add_html_footer.py b/buildscripts/add_html_footer.py deleted file mode 100644 index 8536ef3956..0000000000 --- a/buildscripts/add_html_footer.py +++ /dev/null @@ -1,309 +0,0 @@ -#!@PYTHON@ - -""" -Print a nice footer. -""" -import re -import os -import time -import operator - -import langdefs - -# This is to try to make the docball not too big with almost duplicate files -# see process_links() -non_copied_pages = ['Documentation/user/out-www/lilypond-big-page', - 'Documentation/user/out-www/lilypond-internals-big-page', - 'Documentation/user/out-www/lilypond-learning-big-page', - 'Documentation/user/out-www/lilypond-program-big-page', - 'Documentation/user/out-www/music-glossary-big-page', - 'out-www/examples', - 'Documentation/topdocs', - 'Documentation/bibliography', - 'Documentation/out-www/THANKS', - 'Documentation/out-www/DEDICATION', - 'Documentation/out-www/devel', - 'input/'] - -def _doc (s): - return s - -header = r""" -""" - -footer = ''' -
-

- -%(footer_name_version)s -
-

-%(footer_report_errors)s
-
-%(footer_suggest_docs)s -
-

-
-''' -footer_name_version = _doc ('This page is for %(package_name)s-%(package_version)s (%(branch_str)s).') -footer_report_errors = _doc ('Report errors to %(mail_address)s.') -# ugh, must not have "_doc" in strings because it is naively replaced with "_" in hacked gettext process -footer_suggest_docs = _doc ('Your suggestions for the documentation are welcome.') - -mail_address = 'http://post.gmane.org/post.php?group=gmane.comp.gnu.lilypond.bugs' -suggest_Docs_url = 'http://lilypond.org/web/devel/participating/documentation-adding' - -header_tag = '' -header_tag_re = re.compile (header_tag) - -footer_tag = '' -footer_tag_re = re.compile (footer_tag) - -lang_available = _doc ("Other languages: %s.") -browser_lang = _doc ('About automatic language selection.') -browser_language_url = "/web/about/browser-language" - -LANGUAGES_TEMPLATE = ''' -

- %(language_available)s -
- %(browser_language)s -

-''' - - -html_re = re.compile ('(.*?)(?:[.]([^/.]*))?[.]html$') -pages_dict = {} - -def build_pages_dict (filelist): - """Build dictionary of available translations of each page""" - global pages_dict - for f in filelist: - m = html_re.match (f) - if m: - g = m.groups() - if len (g) <= 1 or g[1] == None: - e = '' - else: - e = g[1] - if not g[0] in pages_dict: - pages_dict[g[0]] = [e] - else: - pages_dict[g[0]].append (e) - -def source_links_replace (m, source_val): - return 'href="' + os.path.join (source_val, m.group (1)) + '"' - -splitted_docs_re = re.compile ('(input/lsr/out-www/lilypond-snippets|Documentation/user/out-www/(lilypond|music-glossary|lilypond-program|lilypond-learning))/') - -snippets_ref_re = re.compile (r'href="(\.\./)?lilypond-snippets') -user_ref_re = re.compile (r'href="(?:\.\./)?lilypond(-internals|-learning|-program|(?!-snippets))') - -## Windows does not support symlinks. -# This function avoids creating symlinks for splitted HTML manuals -# Get rid of symlinks in GNUmakefile.in (local-WWW-post) -# this also fixes missing PNGs only present in translated docs -def hack_urls (s, prefix): - if splitted_docs_re.match (prefix): - s = re.sub ('(href|src)="(../lily-.*?|.*?[.]png)"', '\\1="../\\2"', s) - - # fix xrefs between documents in different directories ad hoc - if 'user/out-www/lilypond' in prefix: - s = snippets_ref_re.sub ('href="source/input/lsr/lilypond-snippets', s) - elif 'input/lsr' in prefix: - s = user_ref_re.sub ('href="source/Documentation/user/lilypond\\1', s) - - source_path = os.path.join (os.path.dirname (prefix), 'source') - if not os.path.islink (source_path): - return s - source_val = os.readlink (source_path) - return re.sub ('href="source/(.*?)"', lambda m: source_links_replace (m, source_val), s) - -body_tag_re = re.compile ('(?i)]*)>') -html_tag_re = re.compile ('(?i)') -doctype_re = re.compile ('(?i)\n' - -def add_header (s): - """Add header ( and doctype)""" - if header_tag_re.search (s) == None: - body = '' - (s, n) = body_tag_re.subn (body + header, s, 1) - if not n: - (s, n) = html_tag_re.subn ('' + header, s, 1) - if not n: - s = header + s - - s = header_tag + '\n' + s - - if doctype_re.search (s) == None: - s = doctype + s - return s - -title_tag_re = re.compile ('.*?(.*?)', re.DOTALL) -AT_web_title_re = re.compile ('@WEB-TITLE@') - -def add_title (s): - # urg - # maybe find first node? - fallback_web_title = '-- --' - m = title_tag_re.match (s) - if m: - fallback_web_title = m.group (1) - s = AT_web_title_re.sub (fallback_web_title, s) - return s - -footer_insert_re = re.compile ('') -end_body_re = re.compile ('(?i)') -end_html_re = re.compile ('(?i)') - -def add_footer (s, footer_text): - """add footer""" - (s, n) = footer_insert_re.subn (footer_text + '\n' + '', s, 1) - if not n: - (s, n) = end_body_re.subn (footer_text + '\n' + '', s, 1) - if not n: - (s, n) = end_html_re.subn (footer_text + '\n' + '', s, 1) - if not n: - s += footer_text + '\n' - return s - -def find_translations (prefix, lang_ext): - """find available translations of a page""" - available = [] - missing = [] - for l in langdefs.LANGUAGES: - e = l.webext - if lang_ext != e: - if e in pages_dict[prefix]: - available.append (l) - elif lang_ext == '' and l.enabled and reduce (operator.and_, [not prefix.startswith (s) for s in non_copied_pages]): - # English version of missing translated pages will be written - missing.append (e) - return available, missing - -online_links_re = re.compile ('''(href|src)=[\'"]([^/][.]*[^.:\'"]*)(.html|.png)(#[^"\']*|)[\'"]''') -offline_links_re = re.compile ('''href=[\'"]([^/][.]*[^.:\'"]*)(.html)(#[^"\']*|)[\'"]''') - -def process_links (s, prefix, lang_ext, file_name, missing, target): - page_flavors = {} - if target == 'online': - # Strip .html, .png suffix for auto language selection (content - # negotiation). The menu must keep the full extension, so do - # this before adding the menu. - page_flavors[file_name] = \ - [lang_ext, online_links_re.sub ('\\1="\\2\\4"', s)] - elif target == 'offline': - # in LANG doc index: don't rewrite .html suffixes - # as not all .LANG.html pages exist; - # the doc index should be translated and contain the right links - if prefix == 'Documentation/out-www/index': - page_flavors[file_name] = [lang_ext, s] - elif lang_ext == '': - page_flavors[file_name] = [lang_ext, s] - for e in missing: - page_flavors[langdefs.lang_file_name (prefix, e, '.html')] = \ - [e, offline_links_re.sub ('href="\\1.' + e + '\\2\\3"', s)] - else: - page_flavors[file_name] = \ - [lang_ext, - offline_links_re.sub ('href="\\1.' + lang_ext + '\\2\\3"', s)] - return page_flavors - -def add_menu (page_flavors, prefix, available, target, translation): - for k in page_flavors: - language_menu = '' - languages = '' - if page_flavors[k][0] != '': - t = translation[page_flavors[k][0]] - else: - t = _doc - for lang in available: - lang_file = lang.file_name (os.path.basename (prefix), '.html') - if language_menu != '': - language_menu += ', ' - language_menu += '%s' % (lang_file, t (lang.name)) - if target == 'offline': - browser_language = '' - elif target == 'online': - browser_language = t (browser_lang) % browser_language_url - if language_menu: - language_available = t (lang_available) % language_menu - languages = LANGUAGES_TEMPLATE % vars () - # put language menu before '' and '' tags - page_flavors[k][1] = add_footer (page_flavors[k][1], languages) - return page_flavors - - -def add_html_footer (package_name = '', - package_version = '', - target = 'offline', - name_filter = lambda s: s): - """Add header, footer to a number of HTML files - - Arguments: - package_name=NAME set package_name to NAME - package_version=VERSION set package version to VERSION - targets=offline|online set page processing depending on the target - offline is for reading HTML pages locally - online is for hosting the HTML pages on a website with content - negotiation - name_filter a HTML file name filter - """ - translation = langdefs.translation - localtime = time.strftime ('%c %Z', time.localtime (time.time ())) - - if "http://" in mail_address: - mail_address_url = mail_address - else: - mail_address_url= 'mailto:' + mail_address - - versiontup = package_version.split ('.') - branch_str = _doc ('stable-branch') - if int (versiontup[1]) % 2: - branch_str = _doc ('development-branch') - - # Initialize dictionaries for string formatting - subst = {} - subst[''] = dict ([i for i in globals ().items() if type (i[1]) is str]) - subst[''].update (dict ([i for i in locals ().items() if type (i[1]) is str])) - for l in translation: - e = langdefs.LANGDICT[l].webext - if e: - subst[e] = {} - for name in subst['']: - subst[e][name] = translation[l] (subst[''][name]) - # Do deeper string formatting as early as possible, - # so only one '%' formatting pass is needed later - for e in subst: - subst[e]['footer_name_version'] = subst[e]['footer_name_version'] % subst[e] - subst[e]['footer_report_errors'] = subst[e]['footer_report_errors'] % subst[e] - subst[e]['footer_suggest_docs'] = subst[e]['footer_suggest_docs'] % subst[e] - - for prefix, ext_list in pages_dict.items (): - for lang_ext in ext_list: - file_name = langdefs.lang_file_name (prefix, lang_ext, '.html') - in_f = open (file_name) - s = in_f.read() - in_f.close() - - s = s.replace ('%', '%%') - s = hack_urls (s, prefix) - s = add_header (s) - - ### add footer - if footer_tag_re.search (s) == None: - s = add_footer (s, footer_tag + footer) - - available, missing = find_translations (prefix, lang_ext) - page_flavors = process_links (s, prefix, lang_ext, file_name, missing, target) - # Add menu after stripping: must not have autoselection for language menu. - page_flavors = add_menu (page_flavors, prefix, available, target, translation) - for k in page_flavors: - page_flavors[k][1] = page_flavors[k][1] % subst[page_flavors[k][0]] - out_f = open (name_filter (k), 'w') - out_f.write (page_flavors[k][1]) - out_f.close() - # if the page is translated, a .en.html symlink is necessary for content negotiation - if target == 'online' and ext_list != ['']: - os.symlink (os.path.basename (prefix) + '.html', name_filter (prefix + '.en.html')) diff --git a/buildscripts/postprocess_html.py b/buildscripts/postprocess_html.py new file mode 100644 index 0000000000..f4c4797f98 --- /dev/null +++ b/buildscripts/postprocess_html.py @@ -0,0 +1,309 @@ +#!@PYTHON@ + +""" +Postprocess HTML files. +""" +import re +import os +import time +import operator + +import langdefs + +# This is to try to make the docball not too big with almost duplicate files +# see process_links() +non_copied_pages = ['Documentation/user/out-www/lilypond-big-page', + 'Documentation/user/out-www/lilypond-internals-big-page', + 'Documentation/user/out-www/lilypond-learning-big-page', + 'Documentation/user/out-www/lilypond-program-big-page', + 'Documentation/user/out-www/music-glossary-big-page', + 'out-www/examples', + 'Documentation/topdocs', + 'Documentation/bibliography', + 'Documentation/out-www/THANKS', + 'Documentation/out-www/DEDICATION', + 'Documentation/out-www/devel', + 'input/'] + +def _doc (s): + return s + +header = r""" +""" + +footer = ''' +
+

+ +%(footer_name_version)s +
+

+%(footer_report_errors)s
+
+%(footer_suggest_docs)s +
+

+
+''' +footer_name_version = _doc ('This page is for %(package_name)s-%(package_version)s (%(branch_str)s).') +footer_report_errors = _doc ('Report errors to %(mail_address)s.') +# ugh, must not have "_doc" in strings because it is naively replaced with "_" in hacked gettext process +footer_suggest_docs = _doc ('Your suggestions for the documentation are welcome.') + +mail_address = 'http://post.gmane.org/post.php?group=gmane.comp.gnu.lilypond.bugs' +suggest_Docs_url = 'http://lilypond.org/web/devel/participating/documentation-adding' + +header_tag = '' +header_tag_re = re.compile (header_tag) + +footer_tag = '' +footer_tag_re = re.compile (footer_tag) + +lang_available = _doc ("Other languages: %s.") +browser_lang = _doc ('About automatic language selection.') +browser_language_url = "/web/about/browser-language" + +LANGUAGES_TEMPLATE = ''' +

+ %(language_available)s +
+ %(browser_language)s +

+''' + + +html_re = re.compile ('(.*?)(?:[.]([^/.]*))?[.]html$') +pages_dict = {} + +def build_pages_dict (filelist): + """Build dictionary of available translations of each page""" + global pages_dict + for f in filelist: + m = html_re.match (f) + if m: + g = m.groups() + if len (g) <= 1 or g[1] == None: + e = '' + else: + e = g[1] + if not g[0] in pages_dict: + pages_dict[g[0]] = [e] + else: + pages_dict[g[0]].append (e) + +def source_links_replace (m, source_val): + return 'href="' + os.path.join (source_val, m.group (1)) + '"' + +splitted_docs_re = re.compile ('(input/lsr/out-www/lilypond-snippets|Documentation/user/out-www/(lilypond|music-glossary|lilypond-program|lilypond-learning))/') + +snippets_ref_re = re.compile (r'href="(\.\./)?lilypond-snippets') +user_ref_re = re.compile (r'href="(?:\.\./)?lilypond(-internals|-learning|-program|(?!-snippets))') + +## Windows does not support symlinks. +# This function avoids creating symlinks for splitted HTML manuals +# Get rid of symlinks in GNUmakefile.in (local-WWW-post) +# this also fixes missing PNGs only present in translated docs +def hack_urls (s, prefix): + if splitted_docs_re.match (prefix): + s = re.sub ('(href|src)="(../lily-.*?|.*?[.]png)"', '\\1="../\\2"', s) + + # fix xrefs between documents in different directories ad hoc + if 'user/out-www/lilypond' in prefix: + s = snippets_ref_re.sub ('href="source/input/lsr/lilypond-snippets', s) + elif 'input/lsr' in prefix: + s = user_ref_re.sub ('href="source/Documentation/user/lilypond\\1', s) + + source_path = os.path.join (os.path.dirname (prefix), 'source') + if not os.path.islink (source_path): + return s + source_val = os.readlink (source_path) + return re.sub ('href="source/(.*?)"', lambda m: source_links_replace (m, source_val), s) + +body_tag_re = re.compile ('(?i)]*)>') +html_tag_re = re.compile ('(?i)') +doctype_re = re.compile ('(?i)\n' + +def add_header (s): + """Add header ( and doctype)""" + if header_tag_re.search (s) == None: + body = '' + (s, n) = body_tag_re.subn (body + header, s, 1) + if not n: + (s, n) = html_tag_re.subn ('' + header, s, 1) + if not n: + s = header + s + + s = header_tag + '\n' + s + + if doctype_re.search (s) == None: + s = doctype + s + return s + +title_tag_re = re.compile ('.*?(.*?)', re.DOTALL) +AT_web_title_re = re.compile ('@WEB-TITLE@') + +def add_title (s): + # urg + # maybe find first node? + fallback_web_title = '-- --' + m = title_tag_re.match (s) + if m: + fallback_web_title = m.group (1) + s = AT_web_title_re.sub (fallback_web_title, s) + return s + +footer_insert_re = re.compile ('') +end_body_re = re.compile ('(?i)') +end_html_re = re.compile ('(?i)') + +def add_footer (s, footer_text): + """add footer""" + (s, n) = footer_insert_re.subn (footer_text + '\n' + '', s, 1) + if not n: + (s, n) = end_body_re.subn (footer_text + '\n' + '', s, 1) + if not n: + (s, n) = end_html_re.subn (footer_text + '\n' + '', s, 1) + if not n: + s += footer_text + '\n' + return s + +def find_translations (prefix, lang_ext): + """find available translations of a page""" + available = [] + missing = [] + for l in langdefs.LANGUAGES: + e = l.webext + if lang_ext != e: + if e in pages_dict[prefix]: + available.append (l) + elif lang_ext == '' and l.enabled and reduce (operator.and_, [not prefix.startswith (s) for s in non_copied_pages]): + # English version of missing translated pages will be written + missing.append (e) + return available, missing + +online_links_re = re.compile ('''(href|src)=[\'"]([^/][.]*[^.:\'"]*)(.html|.png)(#[^"\']*|)[\'"]''') +offline_links_re = re.compile ('''href=[\'"]([^/][.]*[^.:\'"]*)(.html)(#[^"\']*|)[\'"]''') + +def process_links (s, prefix, lang_ext, file_name, missing, target): + page_flavors = {} + if target == 'online': + # Strip .html, .png suffix for auto language selection (content + # negotiation). The menu must keep the full extension, so do + # this before adding the menu. + page_flavors[file_name] = \ + [lang_ext, online_links_re.sub ('\\1="\\2\\4"', s)] + elif target == 'offline': + # in LANG doc index: don't rewrite .html suffixes + # as not all .LANG.html pages exist; + # the doc index should be translated and contain the right links + if prefix == 'Documentation/out-www/index': + page_flavors[file_name] = [lang_ext, s] + elif lang_ext == '': + page_flavors[file_name] = [lang_ext, s] + for e in missing: + page_flavors[langdefs.lang_file_name (prefix, e, '.html')] = \ + [e, offline_links_re.sub ('href="\\1.' + e + '\\2\\3"', s)] + else: + page_flavors[file_name] = \ + [lang_ext, + offline_links_re.sub ('href="\\1.' + lang_ext + '\\2\\3"', s)] + return page_flavors + +def add_menu (page_flavors, prefix, available, target, translation): + for k in page_flavors: + language_menu = '' + languages = '' + if page_flavors[k][0] != '': + t = translation[page_flavors[k][0]] + else: + t = _doc + for lang in available: + lang_file = lang.file_name (os.path.basename (prefix), '.html') + if language_menu != '': + language_menu += ', ' + language_menu += '%s' % (lang_file, t (lang.name)) + if target == 'offline': + browser_language = '' + elif target == 'online': + browser_language = t (browser_lang) % browser_language_url + if language_menu: + language_available = t (lang_available) % language_menu + languages = LANGUAGES_TEMPLATE % vars () + # put language menu before '' and '' tags + page_flavors[k][1] = add_footer (page_flavors[k][1], languages) + return page_flavors + + +def process_html_files (package_name = '', + package_version = '', + target = 'offline', + name_filter = lambda s: s): + """Add header, footer and tweak links to a number of HTML files + + Arguments: + package_name=NAME set package_name to NAME + package_version=VERSION set package version to VERSION + targets=offline|online set page processing depending on the target + offline is for reading HTML pages locally + online is for hosting the HTML pages on a website with content + negotiation + name_filter a HTML file name filter + """ + translation = langdefs.translation + localtime = time.strftime ('%c %Z', time.localtime (time.time ())) + + if "http://" in mail_address: + mail_address_url = mail_address + else: + mail_address_url= 'mailto:' + mail_address + + versiontup = package_version.split ('.') + branch_str = _doc ('stable-branch') + if int (versiontup[1]) % 2: + branch_str = _doc ('development-branch') + + # Initialize dictionaries for string formatting + subst = {} + subst[''] = dict ([i for i in globals ().items() if type (i[1]) is str]) + subst[''].update (dict ([i for i in locals ().items() if type (i[1]) is str])) + for l in translation: + e = langdefs.LANGDICT[l].webext + if e: + subst[e] = {} + for name in subst['']: + subst[e][name] = translation[l] (subst[''][name]) + # Do deeper string formatting as early as possible, + # so only one '%' formatting pass is needed later + for e in subst: + subst[e]['footer_name_version'] = subst[e]['footer_name_version'] % subst[e] + subst[e]['footer_report_errors'] = subst[e]['footer_report_errors'] % subst[e] + subst[e]['footer_suggest_docs'] = subst[e]['footer_suggest_docs'] % subst[e] + + for prefix, ext_list in pages_dict.items (): + for lang_ext in ext_list: + file_name = langdefs.lang_file_name (prefix, lang_ext, '.html') + in_f = open (file_name) + s = in_f.read() + in_f.close() + + s = s.replace ('%', '%%') + s = hack_urls (s, prefix) + s = add_header (s) + + ### add footer + if footer_tag_re.search (s) == None: + s = add_footer (s, footer_tag + footer) + + available, missing = find_translations (prefix, lang_ext) + page_flavors = process_links (s, prefix, lang_ext, file_name, missing, target) + # Add menu after stripping: must not have autoselection for language menu. + page_flavors = add_menu (page_flavors, prefix, available, target, translation) + for k in page_flavors: + page_flavors[k][1] = page_flavors[k][1] % subst[page_flavors[k][0]] + out_f = open (name_filter (k), 'w') + out_f.write (page_flavors[k][1]) + out_f.close() + # if the page is translated, a .en.html symlink is necessary for content negotiation + if target == 'online' and ext_list != ['']: + os.symlink (os.path.basename (prefix) + '.html', name_filter (prefix + '.en.html')) diff --git a/buildscripts/www_post.py b/buildscripts/www_post.py index 146f8955b8..6fdb84f660 100644 --- a/buildscripts/www_post.py +++ b/buildscripts/www_post.py @@ -13,7 +13,7 @@ import re import langdefs import mirrortree -import add_html_footer +import postprocess_html package_name, package_version, outdir, targets = sys.argv[1:] targets = targets.split (' ') @@ -89,10 +89,10 @@ if 'online' in targets: f.write ('#.htaccess\nDirectoryIndex index\n') f.close () -add_html_footer.build_pages_dict (html_files) +postprocess_html.build_pages_dict (html_files) for t in targets: sys.stderr.write ("Processing HTML pages for %s target...\n" % t) - add_html_footer.add_html_footer ( + postprocess_html.process_html_files ( package_name = package_name, package_version = package_version, target = t, diff --git a/make/doc-i18n-user-rules.make b/make/doc-i18n-user-rules.make new file mode 100644 index 0000000000..91f09c47a1 --- /dev/null +++ b/make/doc-i18n-user-rules.make @@ -0,0 +1,33 @@ +$(outdir)/%/index.html: $(outdir)/%.texi $(XREF_MAPS_DIR)/%.$(ISOLANG).xref-map $(OUT_PNG_IMAGES) $(outdir)/version.itexi + mkdir -p $(dir $@) + $(TEXI2HTML) --I=$(outdir) $(TEXI2HTML_FLAGS) --output=$(dir $@) --prefix=index --split=section $(TEXI2HTML_INIT) $< + cp $(top-src-dir)/Documentation/lilypond*.css $(dir $@) + +$(outdir)/%-big-page.html: $(outdir)/%.texi $(XREF_MAPS_DIR)/%.$(ISOLANG).xref-map $(OUT_PNG_IMAGES) $(outdir)/version.itexi + $(TEXI2HTML) --I=$(outdir) -D bigpage $(TEXI2HTML_FLAGS) --output=$@ $(TEXI2HTML_INIT) $< + cp $(top-src-dir)/Documentation/lilypond*.css $(dir $@) + +$(outdir)/%.pdftexi: $(outdir)/%.texi $(outdir)/version.itexi + $(PYTHON) $(buildscript-dir)/texi-gettext.py $(ISOLANG) $< + +$(outdir)/%.pdf: $(outdir)/%.pdftexi $(outdir)/version.itexi + cd $(outdir); texi2pdf $(TEXI2PDF_FLAGS) $(TEXINFO_PAPERSIZE_OPTION) $(notdir $*).pdftexi + +$(outdir)/version.%: $(top-src-dir)/VERSION + echo '@macro version'> $@ + echo $(TOPLEVEL_VERSION)>> $@ + echo '@end macro'>> $@ + +$(outdir)/%.png: $(top-build-dir)/Documentation/user/$(outdir)/%.png + ln -f $< $@ + +$(XREF_MAPS_DIR)/%.$(ISOLANG).xref-map: $(outdir)/%.texi + $(PYTHON) $(buildscript-dir)/extract_texi_filenames.py -o $(XREF_MAPS_DIR) $< + +# This makes sure lilypond-doc gettext domain has been compiled +# before lilypond-book runs +$(TELY_FILES): doc-po + +$(MASTER_TEXI_FILES): $(ITELY_FILES) $(ITEXI_FILES) + +.SECONDARY: diff --git a/make/doc-i18n-user-targets.make b/make/doc-i18n-user-targets.make new file mode 100644 index 0000000000..7f8f6799b0 --- /dev/null +++ b/make/doc-i18n-user-targets.make @@ -0,0 +1,14 @@ +default: + +local-WWW-1: $(MASTER_TEXI_FILES) $(PDF_FILES) $(XREF_MAPS_FILES) + +# BIG_PAGE_HTML_FILES is defined differently in each language makefile +local-WWW-2: $(DEEP_HTML_FILES) $(BIG_PAGE_HTML_FILES) + find $(outdir) -name '*.html' | xargs grep -L 'UNTRANSLATED NODE: IGNORE ME' | xargs $(PYTHON) $(buildscript-dir)/html-gettext.py $(ISOLANG) + find $(outdir) -name '*.html' | xargs grep -L --label="" 'UNTRANSLATED NODE: IGNORE ME' | sed 's!$(outdir)/!!g' | xargs $(PYTHON) $(buildscript-dir)/mass-link.py --prepend-suffix .$(ISOLANG) hard $(outdir) $(top-build-dir)/Documentation/user/$(outdir) $(TELY_FILES:%.tely=%.pdf) + find $(outdir) \( -name 'lily-*.png' -o -name 'lily-*.ly' \) | sed 's!$(outdir)/!!g' | xargs $(PYTHON) $(buildscript-dir)/mass-link.py hard $(outdir) $(top-build-dir)/Documentation/user/$(outdir) + +doc-po: + $(MAKE) -C $(depth)/Documentation/po out=www messages + +.PHONY: doc-po diff --git a/make/doc-i18n-user-vars.make b/make/doc-i18n-user-vars.make new file mode 100644 index 0000000000..bee28ee46b --- /dev/null +++ b/make/doc-i18n-user-vars.make @@ -0,0 +1,36 @@ +# ISOLANG must be defined + +LANGS = $(shell $(PYTHON) $(buildscript-dir)/langdefs.py) + +SOURCE_PNG_IMAGES=$(shell ls $(top-src-dir)/Documentation/user/*.png) +OUT_PNG_IMAGES=$(SOURCE_PNG_IMAGES:$(top-src-dir)/Documentation/user/%.png=$(outdir)/%.png) $(outdir)/context-example.png + +TELY_FILES := $(call src-wildcard,*.tely) +MASTER_TEXI_FILES := $(TELY_FILES:%.tely=$(outdir)/%.texi) +BIG_PAGE_HTML_FILES := $(BIG_PAGE_MANUALS:%=$(outdir)/%-big-page.html) +DEEP_HTML_FILES := $(TELY_FILES:%.tely=$(outdir)/%/index.html) +PDF_FILES := $(TELY_FILES:%.tely=$(outdir)/%.pdf) + +ITELY_FILES := $(call src-wildcard,*.itely) +ITEXI_FILES := $(call src-wildcard,*.itexi) + +DOCUMENTATION_INCLUDES = \ + -I $(top-src-dir)/Documentation/user \ + -I $(top-build-dir)/Documentation/user/$(outdir) + +LILYPOND_BOOK_INCLUDES += $(DOCUMENTATION_INCLUDES) +MAKEINFO_FLAGS += --force --enable-encoding $(DOCUMENTATION_INCLUDES) +MAKEINFO = LANG= $(MAKEINFO_PROGRAM) $(MAKEINFO_FLAGS) + +# texi2html xref map files +XREF_MAPS_DIR=$(top-build-dir)/out/xref-maps +XREF_MAPS_FILES=$(TELY_FILES:%.tely=$(XREF_MAPS_DIR)/%.$(ISOLANG).xref-map) + +# texi2html flags +TEXI2HTML_INIT= --init-file=$(top-src-dir)/lilypond-texi2html.init +TEXI2HTML_LANG=--lang=$(ISOLANG) +TEXI2HTML_FLAGS += $(TEXI2HTML_LANG) $(DOCUMENTATION_INCLUDES) \ + -I $(XREF_MAPS_DIR) +TEXI2HTML = LANG= $(TEXI2HTML_PROGRAM) + +TEXI2PDF_FLAGS += -q --batch $(DOCUMENTATION_INCLUDES) diff --git a/make/doclang-rules.make b/make/doclang-rules.make deleted file mode 100644 index 91f09c47a1..0000000000 --- a/make/doclang-rules.make +++ /dev/null @@ -1,33 +0,0 @@ -$(outdir)/%/index.html: $(outdir)/%.texi $(XREF_MAPS_DIR)/%.$(ISOLANG).xref-map $(OUT_PNG_IMAGES) $(outdir)/version.itexi - mkdir -p $(dir $@) - $(TEXI2HTML) --I=$(outdir) $(TEXI2HTML_FLAGS) --output=$(dir $@) --prefix=index --split=section $(TEXI2HTML_INIT) $< - cp $(top-src-dir)/Documentation/lilypond*.css $(dir $@) - -$(outdir)/%-big-page.html: $(outdir)/%.texi $(XREF_MAPS_DIR)/%.$(ISOLANG).xref-map $(OUT_PNG_IMAGES) $(outdir)/version.itexi - $(TEXI2HTML) --I=$(outdir) -D bigpage $(TEXI2HTML_FLAGS) --output=$@ $(TEXI2HTML_INIT) $< - cp $(top-src-dir)/Documentation/lilypond*.css $(dir $@) - -$(outdir)/%.pdftexi: $(outdir)/%.texi $(outdir)/version.itexi - $(PYTHON) $(buildscript-dir)/texi-gettext.py $(ISOLANG) $< - -$(outdir)/%.pdf: $(outdir)/%.pdftexi $(outdir)/version.itexi - cd $(outdir); texi2pdf $(TEXI2PDF_FLAGS) $(TEXINFO_PAPERSIZE_OPTION) $(notdir $*).pdftexi - -$(outdir)/version.%: $(top-src-dir)/VERSION - echo '@macro version'> $@ - echo $(TOPLEVEL_VERSION)>> $@ - echo '@end macro'>> $@ - -$(outdir)/%.png: $(top-build-dir)/Documentation/user/$(outdir)/%.png - ln -f $< $@ - -$(XREF_MAPS_DIR)/%.$(ISOLANG).xref-map: $(outdir)/%.texi - $(PYTHON) $(buildscript-dir)/extract_texi_filenames.py -o $(XREF_MAPS_DIR) $< - -# This makes sure lilypond-doc gettext domain has been compiled -# before lilypond-book runs -$(TELY_FILES): doc-po - -$(MASTER_TEXI_FILES): $(ITELY_FILES) $(ITEXI_FILES) - -.SECONDARY: diff --git a/make/doclang-targets.make b/make/doclang-targets.make deleted file mode 100644 index 7f8f6799b0..0000000000 --- a/make/doclang-targets.make +++ /dev/null @@ -1,14 +0,0 @@ -default: - -local-WWW-1: $(MASTER_TEXI_FILES) $(PDF_FILES) $(XREF_MAPS_FILES) - -# BIG_PAGE_HTML_FILES is defined differently in each language makefile -local-WWW-2: $(DEEP_HTML_FILES) $(BIG_PAGE_HTML_FILES) - find $(outdir) -name '*.html' | xargs grep -L 'UNTRANSLATED NODE: IGNORE ME' | xargs $(PYTHON) $(buildscript-dir)/html-gettext.py $(ISOLANG) - find $(outdir) -name '*.html' | xargs grep -L --label="" 'UNTRANSLATED NODE: IGNORE ME' | sed 's!$(outdir)/!!g' | xargs $(PYTHON) $(buildscript-dir)/mass-link.py --prepend-suffix .$(ISOLANG) hard $(outdir) $(top-build-dir)/Documentation/user/$(outdir) $(TELY_FILES:%.tely=%.pdf) - find $(outdir) \( -name 'lily-*.png' -o -name 'lily-*.ly' \) | sed 's!$(outdir)/!!g' | xargs $(PYTHON) $(buildscript-dir)/mass-link.py hard $(outdir) $(top-build-dir)/Documentation/user/$(outdir) - -doc-po: - $(MAKE) -C $(depth)/Documentation/po out=www messages - -.PHONY: doc-po diff --git a/make/doclang-vars.make b/make/doclang-vars.make deleted file mode 100644 index bee28ee46b..0000000000 --- a/make/doclang-vars.make +++ /dev/null @@ -1,36 +0,0 @@ -# ISOLANG must be defined - -LANGS = $(shell $(PYTHON) $(buildscript-dir)/langdefs.py) - -SOURCE_PNG_IMAGES=$(shell ls $(top-src-dir)/Documentation/user/*.png) -OUT_PNG_IMAGES=$(SOURCE_PNG_IMAGES:$(top-src-dir)/Documentation/user/%.png=$(outdir)/%.png) $(outdir)/context-example.png - -TELY_FILES := $(call src-wildcard,*.tely) -MASTER_TEXI_FILES := $(TELY_FILES:%.tely=$(outdir)/%.texi) -BIG_PAGE_HTML_FILES := $(BIG_PAGE_MANUALS:%=$(outdir)/%-big-page.html) -DEEP_HTML_FILES := $(TELY_FILES:%.tely=$(outdir)/%/index.html) -PDF_FILES := $(TELY_FILES:%.tely=$(outdir)/%.pdf) - -ITELY_FILES := $(call src-wildcard,*.itely) -ITEXI_FILES := $(call src-wildcard,*.itexi) - -DOCUMENTATION_INCLUDES = \ - -I $(top-src-dir)/Documentation/user \ - -I $(top-build-dir)/Documentation/user/$(outdir) - -LILYPOND_BOOK_INCLUDES += $(DOCUMENTATION_INCLUDES) -MAKEINFO_FLAGS += --force --enable-encoding $(DOCUMENTATION_INCLUDES) -MAKEINFO = LANG= $(MAKEINFO_PROGRAM) $(MAKEINFO_FLAGS) - -# texi2html xref map files -XREF_MAPS_DIR=$(top-build-dir)/out/xref-maps -XREF_MAPS_FILES=$(TELY_FILES:%.tely=$(XREF_MAPS_DIR)/%.$(ISOLANG).xref-map) - -# texi2html flags -TEXI2HTML_INIT= --init-file=$(top-src-dir)/lilypond-texi2html.init -TEXI2HTML_LANG=--lang=$(ISOLANG) -TEXI2HTML_FLAGS += $(TEXI2HTML_LANG) $(DOCUMENTATION_INCLUDES) \ - -I $(XREF_MAPS_DIR) -TEXI2HTML = LANG= $(TEXI2HTML_PROGRAM) - -TEXI2PDF_FLAGS += -q --batch $(DOCUMENTATION_INCLUDES)