4 # WARNING: this script can't find files included in a different directory
13 def read_pipe (command):
15 pipe = os.popen (command)
18 print "pipe failed: %(command)s" % locals ()
22 optlist, texi_files = getopt.getopt(sys.argv[1:],'no:d:b:i:l:',['skeleton', 'gettext'])
23 process_includes = not ('-n', '') in optlist # -n don't process @include's in texinfo files
25 make_gettext = ('--gettext', '') in optlist # --gettext generate a node list from a Texinfo source
26 make_skeleton = ('--skeleton', '') in optlist # --skeleton extract the node tree from a Texinfo source
28 output_name = 'doc.pot'
30 # @untranslated should be defined as a macro in Texinfo source
31 node_blurb = '''@untranslated
34 head_committish = read_pipe ('git rev-parse HEAD')
35 intro_blurb = '''\\input texinfo @c -*- coding: utf-8; mode: texinfo%(doclang)s -*-
36 @c This file is part of %(topfile)s
38 Translation of GIT committish: %(head_committish)s
39 When revising a translation, copy the HEAD committish of the
40 version that you are working on. See TRANSLATION for details.
45 @c -- SKELETON FILE --
49 if x[0] == '-o': # -o NAME set PO output file name to NAME
51 elif x[0] == '-d': # -d DIR set working directory to DIR
52 print 'FIXME: this is evil. use cd DIR && texi-langutils ...'
53 # even better, add a sane -o option
55 elif x[0] == '-b': # -b BLURB set blurb written at each node to BLURB
57 elif x[0] == '-i': # -i BLURB set blurb written at beginning of each file to BLURB
59 elif x[0] == '-l': # -l ISOLANG set documentlanguage to ISOLANG
60 doclang = '; documentlanguage: ' + x[1]
62 texinfo_with_menus_re = re.compile (r"^(\*) +([^:\n]+)::.*?$|^@(afourpaper|author|bye|contents|copying|end copying|divClass|divEnd|divId|documentencoding|documentlanguage|finalout|ifnottex|end ifnottex|imageClickable|imageFloat|imageId|image|include|menu|end menu|node|quotation|end quotation|ref|rgloss|setfilename|settitle|set|(?:unnumbered|appendix)(?:(?:sub){0,2}sec)?|titlefont|titlepage|end titlepage|title|sourceimage|subtitle|top|vskip|chapter|(?:sub){0,2}section|(?:major|chap|(?:sub){0,2})heading|c) *(([^ \n].*)|$)", re.M)
64 texinfo_re = re.compile (r"^@(include|node|(?:unnumbered|appendix)(?:(?:sub){0,2}sec)?|top|chapter|(?:sub){0,2}section|(?:major|chap|(?:sub){0,2})heading) *(.+?)$|@(rglos){(.+?)}", re.M)
66 ly_string_re = re.compile (r'^([a-zA-Z]+)[\t ]*=|%+[\t ]*(.*)$|\\(?:new|context)\s+(?:[a-zA-Z]*?(?:Staff(?:Group)?|Voice|FiguredBass|FretBoards|Names|Devnull))\s+=\s+"?([a-zA-Z]+)"?\s+')
67 lsr_verbatim_ly_re = re.compile (r'% begin verbatim$')
68 texinfo_verbatim_ly_re = re.compile (r'^@lilypond\[.*?verbatim')
70 def process_texi (texifilename, i_blurb, n_blurb, write_skeleton, topfile, output_file=None, scan_ly=False):
72 f = open (texifilename, 'r')
75 printedfilename = texifilename.replace ('../','')
78 # process ly var names and comments
79 if output_file and (scan_ly or texifilename.endswith ('.ly')):
80 lines = texifile.splitlines ()
82 in_verb_ly_block = False
83 if texifilename.endswith ('.ly'):
84 verbatim_ly_re = lsr_verbatim_ly_re
86 verbatim_ly_re = texinfo_verbatim_ly_re
87 for i in range (len (lines)):
88 if verbatim_ly_re.search (lines[i]):
89 in_verb_ly_block = True
90 elif lines[i].startswith ('@end lilypond'):
91 in_verb_ly_block = False
92 elif in_verb_ly_block:
93 for (var, comment, context_id) in ly_string_re.findall (lines[i]):
95 output_file.write ('# ' + printedfilename + ':' + \
96 str (i + 1) + ' (variable)\n_(r"' + var + '")\n')
98 output_file.write ('# ' + printedfilename + ':' + \
99 str (i + 1) + ' (comment)\n_(r"' + \
100 comment.replace ('"', '\\"') + '")\n')
102 output_file.write ('# ' + printedfilename + ':' + \
103 str (i + 1) + ' (context id)\n_(r"' + \
106 # process Texinfo node names and section titles
108 g = open (os.path.basename (texifilename), 'w')
110 subst.update (locals ())
111 g.write (i_blurb % subst)
112 tutu = texinfo_with_menus_re.findall (texifile)
113 node_just_defined = ''
116 g.write ('* ' + item[1] + '::\n')
117 elif output_file and item[4] == 'rglos':
118 output_file.write ('_(r"' + item[5] + '") # @rglos in ' + printedfilename + '\n')
119 elif item[2] == 'menu':
121 elif item[2] == 'end menu':
122 g.write ('@end menu\n\n')
123 elif item[2] == 'documentlanguage':
124 g.write ('@documentlanguage ' + doclang + '\n')
127 if item[3].startswith ('{') or not item[3].strip ():
129 g.write ('@' + item[2] + space + item[3] + '\n')
130 if node_just_defined:
131 g.write ('@translationof ' + node_just_defined + '\n')
133 node_just_defined = ''
134 elif item[2] == 'include':
135 includes.append (item[3])
138 output_file.write ('# @' + item[2] + ' in ' + \
139 printedfilename + '\n_(r"' + item[3].strip () + '")\n')
140 if item[2] == 'node':
141 node_just_defined = item[3].strip ()
146 toto = texinfo_re.findall (texifile)
148 if item[0] == 'include':
149 includes.append(item[1])
150 elif item[2] == 'rglos':
151 output_file.write ('# @rglos in ' + printedfilename + '\n_(r"' + item[3] + '")\n')
153 output_file.write ('# @' + item[0] + ' in ' + printedfilename + '\n_(r"' + item[1].strip () + '")\n')
156 dir = os.path.dirname (texifilename)
157 for item in includes:
158 process_texi (os.path.join (dir, item.strip ()), i_blurb, n_blurb, write_skeleton, topfile, output_file, scan_ly)
159 except IOError, (errno, strerror):
160 sys.stderr.write ("I/O error(%s): %s: %s\n" % (errno, texifilename, strerror))
163 if intro_blurb != '':
164 intro_blurb += '\n\n'
166 node_blurb = '\n' + node_blurb + '\n\n'
168 node_list_filename = 'node_list'
169 node_list = open (node_list_filename, 'w')
170 node_list.write ('# -*- coding: utf-8 -*-\n')
171 for texi_file in texi_files:
172 # Urgly: scan ly comments and variable names only in English doco
175 and not 'Documentation/de/' in texi_file
176 and not 'Documentation/es/' in texi_file
177 and not 'Documentation/fr/' in texi_file
178 and not 'Documentation/ja/' in texi_file
179 and not 'Documentation/nl/' in texi_file
180 and not 'Documentation/po/' in texi_file
182 process_texi (texi_file, intro_blurb, node_blurb, make_skeleton,
183 os.path.basename (texi_file), node_list,
184 scan_ly=is_english_doc)
185 for word in ('Up:', 'Next:', 'Previous:', 'Appendix ', 'Footnotes', 'Table of Contents'):
186 node_list.write ('_(r"' + word + '")\n')
188 os.system ('xgettext -c -L Python --no-location -o ' + output_name + ' ' + node_list_filename)
190 for texi_file in texi_files:
191 process_texi (texi_file, intro_blurb, node_blurb, make_skeleton,
192 os.path.basename (texi_file))