2 """Tell me who you are!
7 from debian_bundle import deb822
8 from ConfigParser import SafeConfigParser
9 from optparse import OptionParser, Option, OptionGroup, OptionConflictError
17 from jinja2 import Environment, PackageLoader
19 from pprint import PrettyPrinter
22 class AptListsCache(object):
23 def __init__(self, cachedir='build/cache',
26 self.cachedir = cachedir
28 if not ro_cachedirs is None:
29 self.ro_cachedirs = ro_cachedirs
31 self.ro_cachedirs = []
34 create_dir(self.cachedir)
36 def get(self, url, update=False):
37 """Looks in the cache if the file is there and takes the cached one.
38 Otherwise it is downloaded first.
40 Knows how to deal with http:// and svn:// URLs.
45 # look whether it is compressed
46 cext = url.split('.')[-1]
47 if cext in ['gz', 'bz2']:
48 target_url = url[:-1 * len(cext) -1]
50 # assume not compressed
54 # turn url into a filename -- mimik what APT does for
56 tfilename = '_'.join(target_url.split('/')[2:])
58 # if we need to download anyway do not search
60 cfilename = os.path.join(self.cachedir, tfilename)
62 # look for the uncompressed file anywhere in the cache
64 for cp in [self.cachedir] + self.ro_cachedirs:
65 if os.path.exists(os.path.join(cp, tfilename)):
66 cfilename = os.path.join(cp, tfilename)
71 cfilename = os.path.join(self.cachedir, tfilename)
74 # if updated needed -- download
76 #print 'Caching file from %s' % url
78 if url.startswith('svn://'):
80 pysvn.Client().export(url, cfilename)
81 if url.startswith('http://'):
83 tempfile, ignored = urllib.urlretrieve(url)
90 decompressor = 'bzip2'
95 "Don't know how to decompress %s files" \
98 if not decompressor is None:
99 if subprocess.call([decompressor, '-d', '-q', '-f',
101 raise RuntimeError, \
102 "Something went wrong while decompressing '%s'" \
105 # move decompressed file into cache
106 shutil.move(os.path.splitext(tempfile)[0], cfilename)
108 # XXX do we need that if explicit filename is provided?
112 fh = open(cfilename, 'r')
117 def add_pkgfromtaskfile(db, urls):
118 cache = AptListsCache()
124 # loop over all stanzas
125 for stanza in deb822.Packages.iter_paragraphs(fh):
126 if stanza.has_key('Depends'):
127 pkg = stanza['Depends']
128 elif stanza.has_key('Suggests'):
129 pkg = stanza['Suggests']
133 # account for multiple packages per line
135 pkgs += [p.strip() for p in pkg.split(',')]
137 pkgs.append(pkg.strip())
140 if not db.has_key(p):
141 db[p] = get_emptydbentry()
145 def get_emptydbentry():
148 def import_blendstask(db, url):
149 cache = AptListsCache()
153 # figure out blend's task page URL, since they differ from blend to blend
154 urlsec = url.split('/')
155 blendname = urlsec[-3]
156 if blendname == 'debian-med':
157 taskpage_url = 'http://debian-med.alioth.debian.org/tasks/'
158 elif blendname == 'debian-science':
159 taskpage_url = 'http://blends.alioth.debian.org/science/tasks/'
161 raise ValueError('Unknown blend "%s"' % blendname)
162 taskpage_url += urlsec[-1]
164 for st in deb822.Packages.iter_paragraphs(fh):
165 if st.has_key('Task'):
166 task_name = st['Task']
167 task = (blendname, task_name, taskpage_url)
169 # do not stop unless we have a description
170 if not st.has_key('Pkg-Description'):
173 if st.has_key('Depends'):
175 elif st.has_key('Suggests'):
178 print 'Warning: Cannot determine name of prospective package ' \
182 if not db.has_key(pkg):
183 print 'Ignoring blend package "%s"' % pkg
189 info['tasks'] = [task]
190 if st.has_key('License'):
191 info['license'] = st['License']
192 if st.has_key('Responsible'):
193 info['responsible'] = st['Responsible']
196 descr = st['Pkg-Description'].replace('%', '%%').split('\n')
197 info['description'] = descr[0].strip()
198 info['long_description'] = ' '.join([l.strip() for l in descr[1:]])
200 # charge the basic property set
201 db[pkg]['main']['description'] = info['description']
202 db[pkg]['main']['long_description'] = info['long_description']
203 if st.has_key('WNPP'):
204 db[pkg]['main']['debian_itp'] = st['WNPP']
205 if st.has_key('Pkg-URL'):
206 db[pkg]['main']['other_pkg'] = st['Pkg-URL']
207 if st.has_key('Homepage'):
208 db[pkg]['main']['homepage'] = st['Homepage']
210 # only store if there isn't something already
211 if not db[pkg].has_key('blends'):
212 db[pkg]['blends'] = info
214 # just add this tasks name and id
215 db[pkg]['blends']['tasks'].append(task)
220 def get_releaseinfo(rurl):
221 cache = AptListsCache()
222 # root URL of the repository
223 baseurl = '/'.join(rurl.split('/')[:-1])
224 # get the release file from the cache
225 release_file = cache.get(rurl)
227 # create parser instance
228 rp = deb822.Release(release_file)
230 # architectures on this dist
231 archs = rp['Architectures'].split()
232 components = rp['Components'].split()
233 # compile a new codename that also considers the repository label
234 # to distinguish between official and unofficial repos.
236 origin = rp['Origin']
237 codename = rp['Codename']
238 labelcode = '_'.join([rp['Label'], rp['Codename']])
243 return {'baseurl': baseurl, 'archs': archs, 'components': components,
244 'codename': codename, 'label': label, 'labelcode': labelcode,
248 def build_pkgsurl(baseurl, component, arch):
249 return '/'.join([baseurl, component, 'binary-' + arch, 'Packages.bz2'])
252 def import_release(cfg, db, rurl):
253 cache = AptListsCache()
255 ri = get_releaseinfo(rurl)
257 # compile the list of Packages files to parse and parse them
258 for c in ri['components']:
259 for a in ri['archs']:
260 # compile packages URL
261 pkgsurl = build_pkgsurl(ri['baseurl'], c, a)
263 # retrieve from cache
264 packages_file = cache.get(pkgsurl)
267 for stanza in deb822.Packages.iter_paragraphs(packages_file):
268 db = _store_pkg(cfg, db, stanza, ri['origin'], ri['codename'], c, ri['baseurl'])
271 packages_file.close()
275 def _store_pkg(cfg, db, st, origin, codename, component, baseurl):
282 # only care for known packages
283 if not db.has_key(pkg):
284 # print 'Ignoring NeuroDebian package "%s"' % pkg
287 distkey = (trans_codename(codename, cfg), 'neurodebian-' + codename)
289 if db[pkg].has_key(distkey):
290 info = db[pkg][distkey]
292 info = {'architecture': []}
295 if not st['Architecture'] in info['architecture']:
296 info['architecture'].append(st['Architecture'])
297 info['maintainer'] = st['Maintainer']
298 if st.has_key('Homepage'):
299 info['homepage'] = st['Homepage']
300 info['version'] = st['Version']
303 info['drc'] = '%s %s %s' % (origin, codename, component)
306 info['poolurl'] = '/'.join([os.path.dirname(st['Filename'])])
309 descr = st['Description'].replace('%', '%%').split('\n')
310 info['description'] = descr[0].strip()
311 info['long_description'] = ' '.join([l.strip() for l in descr[1:]])
313 db[pkg][distkey] = info
315 # charge the basic property set
316 db[pkg]['main']['description'] = info['description']
317 db[pkg]['main']['long_description'] = info['long_description']
318 if st.has_key('Homepage'):
319 db[pkg]['main']['homepage'] = st['Homepage']
324 def trans_codename(codename, cfg):
325 """Translate a known codename into a release description.
327 Unknown codenames will simply be returned as is.
329 # if we know something, tell
330 if codename in cfg.options('release codenames'):
331 return cfg.get('release codenames', codename)
336 def create_dir(path):
337 if os.path.exists(path):
340 ps = path.split(os.path.sep)
342 for i in range(1,len(ps) + 1):
343 p = os.path.sep.join(ps[:i])
345 if not os.path.exists(p):
351 return json.read(urllib2.urlopen(url+"?t=json").read())['r']
352 except urllib2.HTTPError:
356 def import_dde(cfg, db):
357 dists = cfg.get('dde', 'dists').split()
358 query_url = cfg.get('dde', 'pkgquery_url')
361 q = dde_get(query_url + "/all/%s" % p)
365 q = dde_get(query_url + "/prio-%s/%s" % (d, p))
367 db[p][(trans_codename(d.split('-')[1], cfg),d)] = q
372 def generate_pkgpage(pkg, cfg, db, template, addenum_dir):
373 # local binding for ease of use
375 # do nothing if there is not at least the very basic stuff
376 if not db['main'].has_key('description'):
378 title = '**%s** -- %s' % (pkg, db['main']['description'])
379 underline = '*' * (len(title) + 2)
380 title = '%s\n %s\n%s' % (underline, title, underline)
382 # preprocess long description
383 ld = db['main']['long_description']
384 ld = ' '.join([l.lstrip(' .') for l in ld.split('\n')])
386 page = template.render(pkg=pkg,
391 # the following can be replaced by something like
392 # {% include "sidebar.html" ignore missing %}
393 # in the template whenever jinja 2.2 becomes available
394 addenum = os.path.join(os.path.abspath(addenum_dir), '%s.rst' % pkg)
395 if os.path.exists(addenum):
396 page += '\n\n.. include:: %s\n' % addenum
400 def store_db(db, filename):
401 pp = PrettyPrinter(indent=2)
402 f = open(filename, 'w')
403 f.write(pp.pformat(db))
407 def read_db(filename):
412 def write_sourceslist(jinja_env, cfg, outdir):
414 create_dir(os.path.join(outdir, '_static'))
417 for release in cfg.options('release codenames'):
418 transrel = trans_codename(release, cfg)
420 for mirror in cfg.options('mirrors'):
421 listname = 'neurodebian.%s.%s.sources.list' % (release, mirror)
422 repos[transrel].append((mirror, listname))
423 lf = open(os.path.join(outdir, '_static', listname), 'w')
424 aptcfg = '%s %s main contrib non-free\n' % (cfg.get('mirrors', mirror),
426 lf.write('deb %s' % aptcfg)
427 lf.write('deb-src %s' % aptcfg)
430 srclist_template = jinja_env.get_template('sources_lists.rst')
431 sl = open(os.path.join(outdir, 'sources_lists'), 'w')
432 sl.write(srclist_template.render(repos=repos))
436 def write_pkgpages(jinja_env, cfg, db, outdir, addenum_dir):
438 create_dir(os.path.join(outdir, 'pkgs'))
440 # generate the TOC with all packages
441 toc_template = jinja_env.get_template('pkgs_toc.rst')
442 toc = open(os.path.join(outdir, 'pkgs.rst'), 'w')
443 toc.write(toc_template.render(pkgs=db.keys()))
446 # and now each individual package page
447 pkg_template = jinja_env.get_template('pkg.rst')
449 page = generate_pkgpage(p, cfg, db, pkg_template, addenum_dir)
450 # when no page is available skip this package
453 pf = open(os.path.join(outdir, 'pkgs', p + '.rst'), 'w')
454 pf.write(generate_pkgpage(p, cfg, db, pkg_template, addenum_dir))
458 def prepOptParser(op):
459 # use module docstring for help output
460 op.usage = "%s [OPTIONS]\n\n" % sys.argv[0] + __doc__
462 op.add_option("--db",
463 action="store", type="string", dest="db",
465 help="Database file to read. Default: None")
467 op.add_option("--cfg",
468 action="store", type="string", dest="cfg",
470 help="Repository config file.")
472 op.add_option("-o", "--outdir",
473 action="store", type="string", dest="outdir",
475 help="Target directory for ReST output. Default: None")
477 op.add_option("-r", "--release-url",
478 action="append", dest="release_urls",
481 op.add_option("--pkgaddenum", action="store", dest="addenum_dir",
482 type="string", default=None, help="None")
486 op = OptionParser(version="%prog 0.0.2")
489 (opts, args) = op.parse_args()
492 print('There needs to be exactly one command')
498 print("'--cfg' option is mandatory.")
501 print("'--db' option is mandatory.")
505 cfg = SafeConfigParser()
508 # load existing db, unless renew is requested
509 if cmd == 'updatedb':
511 if cfg.has_option('packages', 'select taskfiles'):
512 db = add_pkgfromtaskfile(db, cfg.get('packages',
513 'select taskfiles').split())
515 # add additional package names from config file
516 if cfg.has_option('packages', 'select names'):
517 for p in cfg.get('packages', 'select names').split():
518 if not db.has_key(p):
519 db[p] = get_emptydbentry()
521 # get info from task files
522 if cfg.has_option('packages', 'prospective'):
523 for url in cfg.get('packages', 'prospective').split():
524 db = import_blendstask(db, url)
526 # parse NeuroDebian repository
527 if cfg.has_option('neurodebian', 'releases'):
528 for rurl in cfg.get('neurodebian', 'releases').split():
529 db = import_release(cfg, db, rurl)
531 # collect package information from DDE
532 db = import_dde(cfg, db)
534 store_db(db, opts.db)
538 # load the db from file
539 db = read_db(opts.db)
542 jinja_env = Environment(loader=PackageLoader('neurodebian', 'templates'))
544 # generate package pages and TOC and write them to files
545 write_pkgpages(jinja_env, cfg, db, opts.outdir, opts.addenum_dir)
547 write_sourceslist(jinja_env, cfg, opts.outdir)
549 if __name__ == "__main__":