%% This file is in the public domain.
'''
+new_lys_marker = "%% generated from %s" % new_lys
LY_HEADER_NEW = '''%% DO NOT EDIT this file manually; it is automatically
-%% generated from %s
+%s
%% Make any changes in Documentation/snippets/new/
%% and then run scripts/auxiliar/makelsr.py
%%
%% This file is in the public domain.
-''' % new_lys
+''' % new_lys_marker
options_parser = optparse.OptionParser (
description = "makelsr - update snippets directory from LSR",
sys.exit (4)
if len (args) > 1:
exit_with_usage (2)
+ tags = os.listdir (in_dir)
else:
- in_dir = '.'
+ in_dir = ''
+ tags = [os.path.splitext (os.path.basename (f))[0]
+ for f in glob.glob (os.path.join (lys_from_lsr, '*.snippet-list'))]
+## Make sure all users get the same ordering of tags
+tags.sort ()
if options.convert_ly == "LY_PATH/convert-ly":
convert_ly = os.path.join (options.bin_path, "convert-ly")
lilypond_bin = "lilypond"
sys.stderr.write ("Using %s, %s\n" % (convert_ly, lilypond_bin))
-tags = os.listdir (in_dir)
-
unsafe = []
unconverted = []
notags_files = []
return begin_header_re.sub ('\\g<0>\n lsrtags = "' + tags + '"\n',
ly_code, 1)
-# for snippets from input/new, add message for earliest working version
+# for snippets from Documentation/snippets/new, add message for earliest working version
def add_version (ly_code):
return '''%% Note: this file works from version ''' + \
ly_new_version_re.search (ly_code).group (1) + '\n'
unsafe.append (dest)
def read_source_with_dirs (src):
- s = {}
- l = {}
+ snippet_list = {}
+ tag_list = {}
for tag in tags:
srcdir = os.path.join (src, tag)
- l[tag] = set (map (os.path.basename,
+ tag_list[tag] = set (map (os.path.basename,
glob.glob (os.path.join (srcdir, '*.ly'))))
- for f in l[tag]:
- if f in s:
- s[f][1].append (tag)
+ for f in tag_list[tag]:
+ if f in snippet_list:
+ snippet_list[f][1].append (tag)
else:
- s[f] = (srcdir, [tag])
- return s, l
+ snippet_list[f] = (srcdir, [tag])
+ return snippet_list
tags_re = re.compile ('lsrtags\\s*=\\s*"(.+?)"')
def read_source (src):
- s = {}
- l = dict ([(tag, set()) for tag in tags])
+ snippet_list = {}
+ tag_list = dict ([(tag, set()) for tag in tags])
for f in glob.glob (os.path.join (src, '*.ly')):
basename = os.path.basename (f)
m = tags_re.search (open (f, 'r').read ())
if m:
file_tags = [tag.strip() for tag in m.group (1). split(',')]
- s[basename] = (src, file_tags)
- [l[tag].add (basename) for tag in file_tags if tag in tags]
+ snippet_list[basename] = (src, file_tags)
+ for tag in file_tags:
+ if tag in tags:
+ tag_list[tag].add (basename)
+ else:
+ tag_list[tag] = set ((basename,))
else:
notags_files.append (f)
- return s, l
+ return snippet_list, tag_list
-def dump_file_list (file, file_list, update=False):
- if update:
- old_list = set (open (file, 'r').read ().splitlines ())
- old_list.update (file_list)
- new_list = list (old_list)
- else:
- new_list = file_list
+def dump_file_list (file, file_list):
+ new_list = file_list
f = open (file, 'w')
f.write ('\n'.join (sorted (new_list)) + '\n')
-## clean out existing lys and generated files
-map (os.remove, glob.glob (os.path.join (lys_from_lsr, '*.ly')) +
- glob.glob (os.path.join (lys_from_lsr, '*.snippet-list')))
+## clean out existing lys and generated files - but when we're
+## not recreating all of them from the tarball don't delete
+## snippets that came from LSR.
+if in_dir:
+ map (os.remove, glob.glob (os.path.join (lys_from_lsr, '*.ly')) +
+ glob.glob (os.path.join (lys_from_lsr, '*.snippet-list')))
+else:
+ map (os.remove, glob.glob (os.path.join (lys_from_lsr, '*.snippet-list')))
+ for f in glob.glob (os.path.join (lys_from_lsr, '*.ly')):
+ if new_lys_marker in open (f).read ():
+ os.remove (f)
+snippets = {}
+if in_dir:
+ # read LSR source where tags are defined by subdirs
+ snippets = read_source_with_dirs (in_dir)
+
+# read Documentation/snippets/new where tags are directly defined
+snippets_new, not_used_list = read_source (new_lys)
+snippets.update (snippets_new)
-# read LSR source where tags are defined by subdirs
-snippets, tag_lists = read_source_with_dirs (in_dir)
+for (name, (srcdir, file_tags)) in snippets.items ():
+ copy_ly (srcdir, name, file_tags)
-# read input/new where tags are directly defined
-s, l = read_source (new_lys)
-snippets.update (s)
-for t in tags:
- tag_lists[t].update (l[t])
+not_used_snippets, tag_lists = read_source (lys_from_lsr)
-for (name, (srcdir, tags)) in snippets.items ():
- copy_ly (srcdir, name, tags)
for (tag, file_set) in tag_lists.items ():
dump_file_list (os.path.join (lys_from_lsr, tag + '.snippet-list'),
- file_set, update=not(in_dir))
+ file_set)
if unconverted:
sys.stderr.write ('These files could not be converted successfully by convert-ly:\n')
sys.stderr.write ('\n'.join (unconverted) + '\n\n')