2 """Tell me who you are!
7 from debian_bundle import deb822
9 from ConfigParser import SafeConfigParser
10 from optparse import OptionParser, Option, OptionGroup, OptionConflictError
18 from jinja2 import Environment, PackageLoader
20 from pprint import PrettyPrinter
23 class AptListsCache(object):
24 def __init__(self, cachedir='build/cache',
27 self.cachedir = cachedir
29 if not ro_cachedirs is None:
30 self.ro_cachedirs = ro_cachedirs
32 self.ro_cachedirs = []
35 create_dir(self.cachedir)
37 def get(self, url, update=False):
38 """Looks in the cache if the file is there and takes the cached one.
39 Otherwise it is downloaded first.
41 Knows how to deal with http:// and svn:// URLs.
46 # look whether it is compressed
47 cext = url.split('.')[-1]
48 if cext in ['gz', 'bz2']:
49 target_url = url[:-1 * len(cext) -1]
51 # assume not compressed
55 # turn url into a filename -- mimik what APT does for
57 tfilename = '_'.join(target_url.split('/')[2:])
59 # if we need to download anyway do not search
61 cfilename = os.path.join(self.cachedir, tfilename)
63 # look for the uncompressed file anywhere in the cache
65 for cp in [self.cachedir] + self.ro_cachedirs:
66 if os.path.exists(os.path.join(cp, tfilename)):
67 cfilename = os.path.join(cp, tfilename)
72 cfilename = os.path.join(self.cachedir, tfilename)
75 # if updated needed -- download
77 #print 'Caching file from %s' % url
79 if url.startswith('svn://'):
81 pysvn.Client().export(url, cfilename)
82 if url.startswith('http://'):
84 tempfile, ignored = urllib.urlretrieve(url)
91 decompressor = 'bzip2'
96 "Don't know how to decompress %s files" \
99 if not decompressor is None:
100 if subprocess.call([decompressor, '-d', '-q', '-f',
102 raise RuntimeError, \
103 "Something went wrong while decompressing '%s'" \
106 # move decompressed file into cache
107 shutil.move(os.path.splitext(tempfile)[0], cfilename)
109 # XXX do we need that if explicit filename is provided?
113 fh = open(cfilename, 'r')
118 def add_pkgfromtaskfile(db, urls):
119 cache = AptListsCache()
125 # loop over all stanzas
126 for stanza in deb822.Packages.iter_paragraphs(fh):
127 if stanza.has_key('Depends'):
128 pkg = stanza['Depends']
129 elif stanza.has_key('Suggests'):
130 pkg = stanza['Suggests']
134 # account for multiple packages per line
136 pkgs += [p.strip() for p in pkg.split(',')]
138 pkgs.append(pkg.strip())
141 if not db.has_key(p):
142 db[p] = get_emptydbentry()
146 def get_emptydbentry():
149 def import_blendstask(db, url):
150 cache = AptListsCache()
154 # figure out blend's task page URL, since they differ from blend to blend
155 urlsec = url.split('/')
156 blendname = urlsec[-3]
157 if blendname == 'debian-med':
158 taskpage_url = 'http://debian-med.alioth.debian.org/tasks/'
159 elif blendname == 'debian-science':
160 taskpage_url = 'http://blends.alioth.debian.org/science/tasks/'
162 raise ValueError('Unknown blend "%s"' % blendname)
163 taskpage_url += urlsec[-1]
165 for st in deb822.Packages.iter_paragraphs(fh):
166 if st.has_key('Task'):
167 task_name = st['Task']
168 task = (blendname, task_name, taskpage_url)
170 # do not stop unless we have a description
171 if not st.has_key('Pkg-Description'):
174 if st.has_key('Depends'):
176 elif st.has_key('Suggests'):
179 print 'Warning: Cannot determine name of prospective package ' \
183 if not db.has_key(pkg):
184 print 'Ignoring blend package "%s"' % pkg
190 info['tasks'] = [task]
191 if st.has_key('License'):
192 info['license'] = st['License']
193 if st.has_key('Responsible'):
194 info['responsible'] = st['Responsible']
197 descr = st['Pkg-Description'].replace('%', '%%').split('\n')
198 info['description'] = descr[0].strip()
199 info['long_description'] = ' '.join([l.strip() for l in descr[1:]])
201 # charge the basic property set
202 db[pkg]['main']['description'] = info['description']
203 db[pkg]['main']['long_description'] = info['long_description']
204 if st.has_key('WNPP'):
205 db[pkg]['main']['debian_itp'] = st['WNPP']
206 if st.has_key('Pkg-URL'):
207 db[pkg]['main']['other_pkg'] = st['Pkg-URL']
208 if st.has_key('Homepage'):
209 db[pkg]['main']['homepage'] = st['Homepage']
211 # only store if there isn't something already
212 if not db[pkg].has_key('blends'):
213 db[pkg]['blends'] = info
215 # just add this tasks name and id
216 db[pkg]['blends']['tasks'].append(task)
221 def get_releaseinfo(rurl):
222 cache = AptListsCache()
223 # root URL of the repository
224 baseurl = '/'.join(rurl.split('/')[:-1])
225 # get the release file from the cache
226 release_file = cache.get(rurl)
228 # create parser instance
229 rp = deb822.Release(release_file)
231 # architectures on this dist
232 archs = rp['Architectures'].split()
233 components = rp['Components'].split()
234 # compile a new codename that also considers the repository label
235 # to distinguish between official and unofficial repos.
237 origin = rp['Origin']
238 codename = rp['Codename']
239 labelcode = '_'.join([rp['Label'], rp['Codename']])
244 return {'baseurl': baseurl, 'archs': archs, 'components': components,
245 'codename': codename, 'label': label, 'labelcode': labelcode,
249 def build_pkgsurl(baseurl, component, arch):
250 return '/'.join([baseurl, component, 'binary-' + arch, 'Packages.bz2'])
253 def import_release(cfg, db, rurl):
254 cache = AptListsCache()
256 ri = get_releaseinfo(rurl)
258 # compile the list of Packages files to parse and parse them
259 for c in ri['components']:
260 for a in ri['archs']:
261 # compile packages URL
262 pkgsurl = build_pkgsurl(ri['baseurl'], c, a)
264 # retrieve from cache
265 packages_file = cache.get(pkgsurl)
268 for stanza in deb822.Packages.iter_paragraphs(packages_file):
269 db = _store_pkg(cfg, db, stanza, ri['origin'], ri['codename'], c, ri['baseurl'])
272 packages_file.close()
276 def _store_pkg(cfg, db, st, origin, codename, component, baseurl):
283 # only care for known packages
284 if not db.has_key(pkg):
285 # print 'Ignoring NeuroDebian package "%s"' % pkg
288 distkey = (trans_codename(codename, cfg), 'neurodebian-' + codename)
290 if db[pkg].has_key(distkey):
291 info = db[pkg][distkey]
293 info = {'architecture': []}
296 if not st['Architecture'] in info['architecture']:
297 info['architecture'].append(st['Architecture'])
298 info['maintainer'] = st['Maintainer']
299 if st.has_key('Homepage'):
300 info['homepage'] = st['Homepage']
301 info['version'] = st['Version']
304 info['distribution'] = origin
305 info['release'] = codename
306 info['component'] = component
309 info['poolurl'] = '/'.join([os.path.dirname(st['Filename'])])
312 descr = st['Description'].replace('%', '%%').split('\n')
313 info['description'] = descr[0].strip()
314 info['long_description'] = ' '.join([l.strip() for l in descr[1:]])
316 db[pkg][distkey] = info
318 # charge the basic property set
319 db[pkg]['main']['description'] = info['description']
320 db[pkg]['main']['long_description'] = info['long_description']
321 if st.has_key('Homepage'):
322 db[pkg]['main']['homepage'] = st['Homepage']
327 def trans_codename(codename, cfg):
328 """Translate a known codename into a release description.
330 Unknown codenames will simply be returned as is.
332 # if we know something, tell
333 if codename in cfg.options('release codenames'):
334 return cfg.get('release codenames', codename)
339 def create_dir(path):
340 if os.path.exists(path):
343 ps = path.split(os.path.sep)
345 for i in range(1,len(ps) + 1):
346 p = os.path.sep.join(ps[:i])
348 if not os.path.exists(p):
354 return json.read(urllib2.urlopen(url+"?t=json").read())['r']
355 except urllib2.HTTPError:
359 def import_dde(cfg, db):
360 dists = cfg.get('dde', 'dists').split()
361 query_url = cfg.get('dde', 'pkgquery_url')
364 q = dde_get(query_url + "/packages/all/%s" % p)
367 # get latest popcon info for debian and ubuntu
368 # cannot use origin field itself, since it is none for few packages
370 origin = q['drc'].split()[0]
371 print 'popcon query for', p
372 if origin == 'ubuntu':
373 print 'have ubuntu first'
374 if q.has_key('popcon'):
375 print 'ubuntu has popcon'
376 db[p]['main']['ubuntu_popcon'] = q['popcon']
377 # if we have ubuntu, need to get debian
378 q = dde_get(query_url + "/packages/prio-debian-sid/%s" % p)
379 if q and q.has_key('popcon'):
380 print 'debian has popcon'
381 db[p]['main']['debian_popcon'] = q['popcon']
382 elif origin == 'debian':
383 print 'have debian first'
384 if q.has_key('popcon'):
385 print 'debian has popcon'
386 db[p]['main']['debian_popcon'] = q['popcon']
387 # if we have debian, need to get ubuntu
388 q = dde_get(query_url + "/packages/prio-ubuntu-karmic/%s" % p)
389 if q and q.has_key('popcon'):
390 print 'ubuntu has popcon'
391 db[p]['main']['ubuntu_popcon'] = q['popcon']
393 print("Ignoring unkown origin '%s' for package '%s'." \
396 # now get info for package from all releases in UDD
397 q = dde_get(query_url + "/dist/p:%s" % p)
400 # hold all info about this package per distribution release
403 distkey = (trans_codename(cp['release'], cfg),
404 "%s-%s" % (cp['distribution'], cp['release']))
405 if not info.has_key(distkey):
407 # turn into a list to append others later
408 info[distkey]['architecture'] = [info[distkey]['architecture']]
409 # accumulate data for multiple over archs
411 comp = apt.VersionCompare(cp['version'],
412 info[distkey]['version'])
413 # found another arch for the same version
415 info[distkey]['architecture'].append(cp['architecture'])
416 # found newer version, dump the old ones
419 # turn into a list to append others later
420 info[distkey]['architecture'] = [info[distkey]['architecture']]
421 # simply ignore older versions
425 # finally assign the new package data
426 for k, v in info.iteritems():
432 def generate_pkgpage(pkg, cfg, db, template, addenum_dir):
433 # local binding for ease of use
435 # do nothing if there is not at least the very basic stuff
436 if not db['main'].has_key('description'):
438 title = '**%s** -- %s' % (pkg, db['main']['description'])
439 underline = '*' * (len(title) + 2)
440 title = '%s\n %s\n%s' % (underline, title, underline)
442 # preprocess long description
443 ld = db['main']['long_description']
444 ld = ' '.join([l.lstrip(' .') for l in ld.split('\n')])
446 page = template.render(pkg=pkg,
451 # the following can be replaced by something like
452 # {% include "sidebar.html" ignore missing %}
453 # in the template whenever jinja 2.2 becomes available
454 addenum = os.path.join(os.path.abspath(addenum_dir), '%s.rst' % pkg)
455 if os.path.exists(addenum):
456 page += '\n\n.. include:: %s\n' % addenum
460 def store_db(db, filename):
461 pp = PrettyPrinter(indent=2)
462 f = open(filename, 'w')
463 f.write(pp.pformat(db))
467 def read_db(filename):
472 def write_sourceslist(jinja_env, cfg, outdir):
474 create_dir(os.path.join(outdir, '_static'))
477 for release in cfg.options('release codenames'):
478 transrel = trans_codename(release, cfg)
480 for mirror in cfg.options('mirrors'):
481 listname = 'neurodebian.%s.%s.sources.list' % (release, mirror)
482 repos[transrel].append((mirror, listname))
483 lf = open(os.path.join(outdir, '_static', listname), 'w')
484 aptcfg = '%s %s main contrib non-free\n' % (cfg.get('mirrors', mirror),
486 lf.write('deb %s' % aptcfg)
487 lf.write('deb-src %s' % aptcfg)
490 srclist_template = jinja_env.get_template('sources_lists.rst')
491 sl = open(os.path.join(outdir, 'sources_lists'), 'w')
492 sl.write(srclist_template.render(repos=repos))
496 def write_pkgpages(jinja_env, cfg, db, outdir, addenum_dir):
498 create_dir(os.path.join(outdir, 'pkgs'))
500 # generate the TOC with all packages
501 toc_template = jinja_env.get_template('pkgs_toc.rst')
502 toc = open(os.path.join(outdir, 'pkgs.rst'), 'w')
503 toc.write(toc_template.render(pkgs=db.keys()))
506 # and now each individual package page
507 pkg_template = jinja_env.get_template('pkg.rst')
509 page = generate_pkgpage(p, cfg, db, pkg_template, addenum_dir)
510 # when no page is available skip this package
513 pf = open(os.path.join(outdir, 'pkgs', p + '.rst'), 'w')
514 pf.write(generate_pkgpage(p, cfg, db, pkg_template, addenum_dir))
518 def prepOptParser(op):
519 # use module docstring for help output
520 op.usage = "%s [OPTIONS]\n\n" % sys.argv[0] + __doc__
522 op.add_option("--db",
523 action="store", type="string", dest="db",
525 help="Database file to read. Default: None")
527 op.add_option("--cfg",
528 action="store", type="string", dest="cfg",
530 help="Repository config file.")
532 op.add_option("-o", "--outdir",
533 action="store", type="string", dest="outdir",
535 help="Target directory for ReST output. Default: None")
537 op.add_option("-r", "--release-url",
538 action="append", dest="release_urls",
541 op.add_option("--pkgaddenum", action="store", dest="addenum_dir",
542 type="string", default=None, help="None")
546 op = OptionParser(version="%prog 0.0.2")
549 (opts, args) = op.parse_args()
552 print('There needs to be exactly one command')
558 print("'--cfg' option is mandatory.")
561 print("'--db' option is mandatory.")
565 cfg = SafeConfigParser()
568 # load existing db, unless renew is requested
569 if cmd == 'updatedb':
571 if cfg.has_option('packages', 'select taskfiles'):
572 db = add_pkgfromtaskfile(db, cfg.get('packages',
573 'select taskfiles').split())
575 # add additional package names from config file
576 if cfg.has_option('packages', 'select names'):
577 for p in cfg.get('packages', 'select names').split():
578 if not db.has_key(p):
579 db[p] = get_emptydbentry()
581 # get info from task files
582 if cfg.has_option('packages', 'prospective'):
583 for url in cfg.get('packages', 'prospective').split():
584 db = import_blendstask(db, url)
586 # parse NeuroDebian repository
587 if cfg.has_option('neurodebian', 'releases'):
588 for rurl in cfg.get('neurodebian', 'releases').split():
589 db = import_release(cfg, db, rurl)
591 # collect package information from DDE
592 db = import_dde(cfg, db)
594 store_db(db, opts.db)
598 # load the db from file
599 db = read_db(opts.db)
602 jinja_env = Environment(loader=PackageLoader('neurodebian', 'templates'))
604 # generate package pages and TOC and write them to files
605 write_pkgpages(jinja_env, cfg, db, opts.outdir, opts.addenum_dir)
607 write_sourceslist(jinja_env, cfg, opts.outdir)
609 if __name__ == "__main__":