12 # This is to try to make the docball not too big with almost duplicate files
14 non_copied_pages = ['Documentation/user/out-www/lilypond-big-page',
15 'Documentation/user/out-www/lilypond-internals-big-page',
16 'Documentation/user/out-www/music-glossary-big-page',
18 'Documentation/topdocs/out-www/NEWS',
19 'Documentation/topdocs/out-www/INSTALL',
20 'Documentation/bibliography/out-www/index',
21 'Documentation/out-www/THANKS',
22 'Documentation/out-www/DEDICATION',
23 'Documentation/topdocs/ou-www/AUTHORS']
29 <div style="background-color: #e8ffe8; padding: 2; border: #c0ffc0 1px solid;">
32 This page is for %(package_name)s-%(package_version)s (%(branch_str)s). <br>
34 <address><font size="-1">
35 Report errors to <a href="%(mail_address_url)s">%(mail_address)s</a>.</font></address>
40 mail_address = 'http://post.gmane.org/post.php?group=gmane.comp.gnu.lilypond.bugs'
42 header_tag = '<!-- header_tag -->'
43 footer_tag = '<!-- footer_tag -->'
48 language_available = _ ("Other languages: %s.") % "%(language_menu)s"
49 browser_language = _ ("Using <A HREF='%s'>automatic language selection</A>.") \
50 % "/web/about/browser-language"
52 LANGUAGES_TEMPLATE = '''\
54 %(language_available)s
61 html_re = re.compile ('(.*?)(?:[.]([^/.]*))?[.]html$')
64 def build_pages_dict (filelist):
65 """Build dictionnary of available translations of each page"""
71 if len (g) <= 1 or g[1] == None:
75 if not g[0] in pages_dict.keys():
76 pages_dict[g[0]] = [e]
78 pages_dict[g[0]].append (e)
81 """Add header (<BODY> and doctype)"""
82 if re.search (header_tag, s) == None:
83 body = '<BODY BGCOLOR=WHITE TEXT=BLACK>'
84 s = re.sub ('(?i)<body>', body, s)
85 if re.search ('(?i)<BODY', s):
86 s = re.sub ('(?i)<body[^>]*>', body + header, s, 1)
87 elif re.search ('(?i)<html', s):
88 s = re.sub ('(?i)<html>', '<HTML>' + header, s, 1)
92 s = header_tag + '\n' + s
94 if re.search ('(?i)<!DOCTYPE', s) == None:
95 doctype = '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">\n'
99 def info_external_ref_remove (s):
100 """Remove info's annoying's indication of referencing external document"""
101 return re.sub (' \((lilypond|lilypond-internals|music-glossary)\)</a>', '</a>', s)
105 # maybe find first node?
106 fallback_web_title = '-- --'
107 m = re.match ('.*?<title>(.*?)</title>', s, re.DOTALL)
109 fallback_web_title = m.group (1)
110 s = re.sub ('@WEB-TITLE@', fallback_web_title, s)
113 info_nav_bar = re.compile (r'<div class="node">\s*<p>\s*<a name=".+?"></a>(.+?)<hr>\s*</div>', re.M | re.S)
118 also add navigation bar to bottom of Info HTML pages"""
119 m = info_nav_bar.search (s)
121 custom_footer = '<br><hr>\n<div class="node">\n<p>' + m.group (1) + '</div>\n' + footer
123 custom_footer = footer
124 if re.search ('(?i)</body', s):
125 s = re.sub ('(?i)</body>', footer_tag + custom_footer + '\n' + '</BODY>', s, 1)
126 elif re.search ('(?i)</html', s):
127 s = re.sub ('(?i)</html>', footer_tag + custom_footer + '\n' + '</HTML>', s, 1)
129 s += footer_tag + custom_footer + '\n'
132 def find_translations (prefix, lang_ext):
133 """find available translations of a page"""
136 for l in langdefs.LANGUAGES:
139 if e in pages_dict[prefix]:
141 elif lang_ext == '' and l.enabled and not prefix in non_copied_pages:
142 # English version of missing translated pages will be written
144 return available, missing
146 def process_links (s, prefix, lang_ext, file_name, missing, target):
148 if target == 'online':
149 # Strip .html, .png suffix for auto language selection (content
150 # negotiation). The menu must keep the full extension, so do
151 # this before adding the menu.
152 page_flavors[file_name] = re.sub (
153 '''(href|src)=[\'"]([^/][.]*[^.:\'"]*)(.html|.png)(#[^"\']*|)[\'"]''',
155 elif target == 'offline':
156 # in LANG doc index: don't rewrite .html suffixes as not all .LANG.html pages exist
157 # the doc index should be translated and contain the right links
158 if prefix == 'Documentation/out-www/index':
159 page_flavors[file_name] = s
161 page_flavors[file_name] = s
163 page_flavors[langdefs.lang_file_name (prefix, e, '.html')] = re.sub (
164 '''href=[\'"]([^/][.]*[^.:\'"]*)(.html)(#[^"\']*|)[\'"]''',
165 'href="\\1.' + e + '\\2\\3"', s)
167 page_flavors[file_name] = re.sub (
168 '''href=[\'"]([^/][.]*[^.:\'"]*)(.html)(#[^"\']*|)[\'"]''',
169 'href="\\1.' + lang_ext + '\\2\\3"', s)
172 def add_menu (page_flavors, prefix, available):
174 for lang in available:
175 lang_file = lang.file_name (os.path.basename (prefix), '.html')
176 if language_menu != '':
177 language_menu += ', '
178 language_menu += '<a href="%s">%s</a>' % (lang_file, lang.name)
182 languages = LANGUAGES_TEMPLATE % vars ()
184 # put language menu before '</body>' and '</html>' tags
185 for k in page_flavors.keys():
186 if re.search ('(?i)</body', page_flavors[k]):
187 page_flavors[k] = re.sub ('(?i)</body>', languages + '</BODY>', page_flavors[k], 1)
188 elif re.search ('(?i)</html', page_flavors[k]):
189 page_flavors[k] = re.sub ('(?i)</html>', languages + '</HTML>', page_flavors[k], 1)
191 page_flavors[k] += languages
195 def add_html_footer (package_name = '',
196 package_version = '',
198 name_filter = lambda s: s):
199 """Add header, footer to a number of HTML files
202 package_name=NAME set package_name to NAME
203 package_version=VERSION set package version to VERSION
204 targets=offline|online set page processing depending on the target
205 offline is for reading HTML pages locally
206 online is for hosting the HTML pages on a website with content
208 name_filter a HTML file name filter
210 localtime = time.strftime ('%c %Z', time.localtime (time.time ()))
212 if re.search ("http://", mail_address):
213 mail_address_url = mail_address
215 mail_address_url= 'mailto:' + mail_address
217 versiontup = package_version.split ('.')
218 branch_str = 'stable-branch'
219 if int ( versiontup[1]) % 2:
220 branch_str = 'development-branch'
222 for prefix, ext_list in pages_dict.items ():
223 for lang_ext in ext_list:
224 file_name = langdefs.lang_file_name (prefix, lang_ext, '.html')
225 in_f = open (file_name)
229 s = re.sub ('%', '%%', s)
231 # seems to be no more needed
232 # s = info_external_ref_remove (s)
235 if re.search (footer_tag, s) == None:
237 available, missing = find_translations (prefix, lang_ext)
238 page_flavors = process_links (s, prefix, lang_ext, file_name, missing, target)
239 # Add menu after stripping: must not have autoselection for language menu.
240 page_flavors = add_menu (page_flavors, prefix, available)
241 # urg, this stuff is oudated and seems useless, let's disable it
243 # for e in [l.webext for l in langdefs.LANGUAGES]:
244 # if not e in pages_dict[prefix]:
245 # page_flavors[langdefs.lang_file_name (prefix, e, '.html')] = s
248 subst.update (locals())
249 for k in page_flavors.keys():
250 page_flavors[k] = page_flavors[k] % subst
252 out_f = open (name_filter (k), 'w')
253 out_f.write (page_flavors[k])
255 # if the page is translated, a .en.html symlink is necessary for content negotiation
256 if target == 'online' and ext_list != ['']:
257 os.symlink (os.path.basename (prefix) + '.html', name_filter (prefix + '.en.html'))