DEST = os.path.join ('input', 'lsr')
NEW_LYS = os.path.join ('input', 'new')
+TEXIDOCS = os.path.join ('input', 'texidocs')
TAGS = []
# NR 1
def mark_verbatim_section (ly_code):
return end_header_re.sub ('\\1% begin verbatim\n', ly_code, 1)
-# add tags to ly files from LSR
-add_tags_re = re.compile ('\\header\\s*{', re.M)
+begin_header_re = re.compile ('\\header\\s*{', re.M)
+# add tags to ly files from LSR
def add_tags (ly_code, tags):
- return add_tags_re.sub ('\\g<0>\n lsrtags = "' + tags + '"\n', ly_code, 1)
+ return begin_header_re.sub ('\\g<0>\n lsrtags = "' + tags + '"\n', ly_code, 1)
def copy_ly (srcdir, name, tags):
global unsafe
dest = os.path.join (DEST, name)
tags = ', '.join (tags)
s = open (os.path.join (srcdir, name)).read ()
+
+ texidoc_translations_path = os.path.join (TEXIDOCS,
+ os.path.splitext (name)[0] + '.texidoc')
+ if os.path.exists (texidoc_translations_path):
+ texidoc_translations = open (texidoc_translations_path).read ()
+ s = begin_header_re.sub ('\\g<0>\n' + texidoc_translations, s, 1)
+
if in_dir in srcdir:
s = LY_HEADER_LSR + add_tags (s, tags)
else:
s = LY_HEADER_NEW + s
+
s = mark_verbatim_section (s)
open (dest, 'w').write (s)
- e = os.system('convert-ly -e ' + dest)
+
+ e = os.system ("convert-ly -e '%s'" % dest)
if e:
unconverted.append (dest)
if os.path.exists (dest + '~'):
os.remove (dest + '~')
# -V seems to make unsafe snippets fail nicer/sooner
- e = os.system ('nice lilypond -V -dno-print-pages -dsafe -o /tmp/lsrtest ' + dest)
+ e = os.system ("lilypond -V -dno-print-pages -dsafe -o /tmp/lsrtest '%s'" % dest)
if e:
unsafe.append (dest)
depth = ..
-SUBDIRS = regression tutorial mutopia manual lsr new
+SUBDIRS = regression tutorial mutopia manual lsr new texidocs
examples = typography-demo les-nereides wilhelmus proportional bach-schenker cary
--- /dev/null
+depth=../..
+
+EXTRA_DIST_FILES=$(call src-wildcard,*.texidoc)
+
+include $(depth)/make/stepmake.make
## override from cmd line to speed up.
ANTI_ALIAS_FACTOR=2
LILYPOND_JOBS=$(if $(CPU_COUNT),-djob-count=$(CPU_COUNT),)
-LILYPOND_BOOK_LILYPOND_FLAGS=-dbackend=eps --formats=ps,png,pdf $(LILYPOND_JOBS) -dinclude-eps-fonts -dgs-load-fonts --header=texidoc -I $(top-src-dir)/input/manual -dcheck-internal-types -ddump-signatures -danti-alias-factor=$(ANTI_ALIAS_FACTOR)
+LANG_TEXIDOC_FLAG=$(if $(ISOLANG),--header=texidoc$(ISOLANG),)
+LILYPOND_BOOK_LILYPOND_FLAGS=-dbackend=eps --formats=ps,png,pdf $(LILYPOND_JOBS) -dinclude-eps-fonts -dgs-load-fonts --header=texidoc $(LANG_TEXIDOC_FLAG) -I $(top-src-dir)/input/manual -dcheck-internal-types -ddump-signatures -danti-alias-factor=$(ANTI_ALIAS_FACTOR)
LILYPOND_BOOK_VERBOSE = --verbose
LILYPOND_BOOK_INFO_IMAGES_DIR = $(if $(INFO_IMAGES_DIR),--info-images-dir=$(INFO_IMAGES_DIR),)
LILYPOND_BOOK_FLAGS = $(LILYPOND_BOOK_VERBOSE) $(LILYPOND_BOOK_INFO_IMAGES_DIR)
FRAGMENT = 'fragment'
HTML = 'html'
INDENT = 'indent'
+LANG = 'lang'
LATEX = 'latex'
LAYOUT = 'layout'
LINE_WIDTH = 'line-width'
NOINDENT,
PRINTFILENAME,
TEXIDOC,
+ LANG,
VERBATIM,
FONTLOAD,
FILENAME,
option_string)
return []
+texinfo_lang_re = re.compile ('(?m)^@documentlanguage (.*?)( |$)')
+
def set_default_options (source):
global default_ly_options
if not default_ly_options.has_key (LINE_WIDTH):
default_ly_options[LINE_WIDTH] = \
'''%.0f\\pt''' % textwidth
elif global_options.format == TEXINFO:
+ m = texinfo_lang_re.search (source)
+ if m and not m.group (1).startswith ('en'):
+ default_ly_options[LANG] = m.group (1)
+ else:
+ default_ly_options[LANG] = ''
for (k, v) in texinfo_line_widths.items ():
# FIXME: @layout is usually not in
# chunk #0:
base = self.basename ()
if TEXIDOC in self.option_dict:
texidoc = base + '.texidoc'
- if os.path.exists (texidoc):
+ translated_texidoc = texidoc + default_ly_options[LANG]
+ if os.path.exists (translated_texidoc):
+ str += '@include %(translated_texidoc)s\n\n' % vars ()
+ elif os.path.exists (texidoc):
str += '@include %(texidoc)s\n\n' % vars ()
substr = ''
progress ('\n')
return do_file (name)
- include_chunks = map (process_include,
- filter (lambda x: is_derived_class (x.__class__,
- Include_snippet),
- chunks))
-
+ include_chunks = [process_include (c) for c in chunks
+ if is_derived_class (c.__class__, Include_snippet)]
- return chunks + reduce (lambda x,y: x + y, include_chunks, [])
+ return chunks + reduce (operator.add, include_chunks, [])
except Compile_error:
os.chdir (original_dir)