2 """Tell me who you are!
7 from debian_bundle import deb822
9 from ConfigParser import SafeConfigParser
10 from optparse import OptionParser, Option, OptionGroup, OptionConflictError
18 from jinja2 import Environment, PackageLoader
20 from pprint import PrettyPrinter
23 class AptListsCache(object):
24 def __init__(self, cachedir='build/cache',
27 self.cachedir = cachedir
29 if not ro_cachedirs is None:
30 self.ro_cachedirs = ro_cachedirs
32 self.ro_cachedirs = []
35 create_dir(self.cachedir)
37 def get(self, url, update=False):
38 """Looks in the cache if the file is there and takes the cached one.
39 Otherwise it is downloaded first.
41 Knows how to deal with http:// and svn:// URLs.
46 # look whether it is compressed
47 cext = url.split('.')[-1]
48 if cext in ['gz', 'bz2']:
49 target_url = url[:-1 * len(cext) -1]
51 # assume not compressed
55 # turn url into a filename -- mimik what APT does for
57 tfilename = '_'.join(target_url.split('/')[2:])
59 # if we need to download anyway do not search
61 cfilename = os.path.join(self.cachedir, tfilename)
63 # look for the uncompressed file anywhere in the cache
65 for cp in [self.cachedir] + self.ro_cachedirs:
66 if os.path.exists(os.path.join(cp, tfilename)):
67 cfilename = os.path.join(cp, tfilename)
72 cfilename = os.path.join(self.cachedir, tfilename)
75 # if updated needed -- download
77 #print 'Caching file from %s' % url
79 if url.startswith('svn://'):
81 pysvn.Client().export(url, cfilename)
82 if url.startswith('http://'):
84 tempfile, ignored = urllib.urlretrieve(url)
91 decompressor = 'bzip2'
96 "Don't know how to decompress %s files" \
99 if not decompressor is None:
100 if subprocess.call([decompressor, '-d', '-q', '-f',
102 raise RuntimeError, \
103 "Something went wrong while decompressing '%s'" \
106 # move decompressed file into cache
107 shutil.move(os.path.splitext(tempfile)[0], cfilename)
109 # XXX do we need that if explicit filename is provided?
113 fh = open(cfilename, 'r')
118 def add_pkgfromtaskfile(db, urls):
119 cache = AptListsCache()
125 # loop over all stanzas
126 for stanza in deb822.Packages.iter_paragraphs(fh):
127 if stanza.has_key('Depends'):
128 pkg = stanza['Depends']
129 elif stanza.has_key('Suggests'):
130 pkg = stanza['Suggests']
134 # account for multiple packages per line
136 pkgs += [p.strip() for p in pkg.split(',')]
138 pkgs.append(pkg.strip())
141 if not db.has_key(p):
142 db[p] = get_emptydbentry()
146 def get_emptydbentry():
149 def import_blendstask(db, url):
150 cache = AptListsCache()
154 # figure out blend's task page URL, since they differ from blend to blend
155 urlsec = url.split('/')
156 blendname = urlsec[-3]
157 if blendname == 'debian-med':
158 taskpage_url = 'http://debian-med.alioth.debian.org/tasks/'
159 elif blendname == 'debian-science':
160 taskpage_url = 'http://blends.alioth.debian.org/science/tasks/'
162 raise ValueError('Unknown blend "%s"' % blendname)
163 taskpage_url += urlsec[-1]
165 for st in deb822.Packages.iter_paragraphs(fh):
166 if st.has_key('Task'):
167 task_name = st['Task']
168 task = (blendname, task_name, taskpage_url)
170 # do not stop unless we have a description
171 if not st.has_key('Pkg-Description'):
174 if st.has_key('Depends'):
176 elif st.has_key('Suggests'):
179 print 'Warning: Cannot determine name of prospective package ' \
183 if not db.has_key(pkg):
184 print 'Ignoring blend package "%s"' % pkg
190 info['tasks'] = [task]
191 if st.has_key('License'):
192 info['license'] = st['License']
193 if st.has_key('Responsible'):
194 info['responsible'] = st['Responsible']
197 descr = st['Pkg-Description'].replace('%', '%%').split('\n')
198 info['description'] = descr[0].strip()
199 info['long_description'] = ' '.join([l.strip() for l in descr[1:]])
201 # charge the basic property set
202 db[pkg]['main']['description'] = info['description']
203 db[pkg]['main']['long_description'] = info['long_description']
204 if st.has_key('WNPP'):
205 db[pkg]['main']['debian_itp'] = st['WNPP']
206 if st.has_key('Pkg-URL'):
207 db[pkg]['main']['other_pkg'] = st['Pkg-URL']
208 if st.has_key('Homepage'):
209 db[pkg]['main']['homepage'] = st['Homepage']
211 # only store if there isn't something already
212 if not db[pkg].has_key('blends'):
213 db[pkg]['blends'] = info
215 # just add this tasks name and id
216 db[pkg]['blends']['tasks'].append(task)
221 def get_releaseinfo(rurl):
222 cache = AptListsCache()
223 # root URL of the repository
224 baseurl = '/'.join(rurl.split('/')[:-1])
225 # get the release file from the cache
226 release_file = cache.get(rurl)
228 # create parser instance
229 rp = deb822.Release(release_file)
231 # architectures on this dist
232 archs = rp['Architectures'].split()
233 components = rp['Components'].split()
234 # compile a new codename that also considers the repository label
235 # to distinguish between official and unofficial repos.
237 origin = rp['Origin']
238 codename = rp['Codename']
239 labelcode = '_'.join([rp['Label'], rp['Codename']])
244 return {'baseurl': baseurl, 'archs': archs, 'components': components,
245 'codename': codename, 'label': label, 'labelcode': labelcode,
249 def build_pkgsurl(baseurl, component, arch):
250 return '/'.join([baseurl, component, 'binary-' + arch, 'Packages.bz2'])
253 def import_release(cfg, db, rurl):
254 cache = AptListsCache()
256 ri = get_releaseinfo(rurl)
258 # compile the list of Packages files to parse and parse them
259 for c in ri['components']:
260 for a in ri['archs']:
261 # compile packages URL
262 pkgsurl = build_pkgsurl(ri['baseurl'], c, a)
264 # retrieve from cache
265 packages_file = cache.get(pkgsurl)
268 for stanza in deb822.Packages.iter_paragraphs(packages_file):
269 db = _store_pkg(cfg, db, stanza, ri['origin'], ri['codename'], c, ri['baseurl'])
272 packages_file.close()
276 def _store_pkg(cfg, db, st, origin, codename, component, baseurl):
283 # only care for known packages
284 if not db.has_key(pkg):
285 # print 'Ignoring NeuroDebian package "%s"' % pkg
288 distkey = (trans_codename(codename, cfg), 'neurodebian-' + codename)
290 if db[pkg].has_key(distkey):
291 info = db[pkg][distkey]
293 info = {'architecture': []}
296 if not st['Architecture'] in info['architecture']:
297 info['architecture'].append(st['Architecture'])
298 info['maintainer'] = st['Maintainer']
299 if st.has_key('Homepage'):
300 info['homepage'] = st['Homepage']
301 info['version'] = st['Version']
304 info['distribution'] = origin
305 info['release'] = codename
306 info['component'] = component
309 info['poolurl'] = '/'.join([os.path.dirname(st['Filename'])])
312 descr = st['Description'].replace('%', '%%').split('\n')
313 info['description'] = descr[0].strip()
314 info['long_description'] = ' '.join([l.strip() for l in descr[1:]])
316 db[pkg][distkey] = info
318 # charge the basic property set
319 db[pkg]['main']['description'] = info['description']
320 db[pkg]['main']['long_description'] = info['long_description']
321 if st.has_key('Homepage'):
322 db[pkg]['main']['homepage'] = st['Homepage']
327 def trans_codename(codename, cfg):
328 """Translate a known codename into a release description.
330 Unknown codenames will simply be returned as is.
332 # if we know something, tell
333 if codename in cfg.options('release codenames'):
334 return cfg.get('release codenames', codename)
339 def create_dir(path):
340 if os.path.exists(path):
343 ps = path.split(os.path.sep)
345 for i in range(1,len(ps) + 1):
346 p = os.path.sep.join(ps[:i])
348 if not os.path.exists(p):
354 return json.read(urllib2.urlopen(url+"?t=json").read())['r']
355 except urllib2.HTTPError:
359 def import_dde(cfg, db):
360 dists = cfg.get('dde', 'dists').split()
361 query_url = cfg.get('dde', 'pkgquery_url')
364 q = dde_get(query_url + "/packages/all/%s" % p)
368 dist, release = d.split('-')
369 q = dde_get(query_url + "/dist/d:%s/r:%s/p:%s" % (dist, release, p))
372 # accumulate data for multiple over archs
377 # turn into a list to append others later
378 item['architecture'] = [item['architecture']]
380 comp = apt.VersionCompare(i['version'], item['version'])
381 # found another arch for the same version
383 item['architecture'].append(i['architecture'])
384 # found newer version, dump the old ones
387 # turn into a list to append others later
388 item['architecture'] = [item['architecture']]
389 # simply ignore older versions
393 # finally assign the new package data
394 db[p][(trans_codename(d.split('-')[1], cfg),d)] = item
399 def generate_pkgpage(pkg, cfg, db, template, addenum_dir):
400 # local binding for ease of use
402 # do nothing if there is not at least the very basic stuff
403 if not db['main'].has_key('description'):
405 title = '**%s** -- %s' % (pkg, db['main']['description'])
406 underline = '*' * (len(title) + 2)
407 title = '%s\n %s\n%s' % (underline, title, underline)
409 # preprocess long description
410 ld = db['main']['long_description']
411 ld = ' '.join([l.lstrip(' .') for l in ld.split('\n')])
413 page = template.render(pkg=pkg,
418 # the following can be replaced by something like
419 # {% include "sidebar.html" ignore missing %}
420 # in the template whenever jinja 2.2 becomes available
421 addenum = os.path.join(os.path.abspath(addenum_dir), '%s.rst' % pkg)
422 if os.path.exists(addenum):
423 page += '\n\n.. include:: %s\n' % addenum
427 def store_db(db, filename):
428 pp = PrettyPrinter(indent=2)
429 f = open(filename, 'w')
430 f.write(pp.pformat(db))
434 def read_db(filename):
439 def write_sourceslist(jinja_env, cfg, outdir):
441 create_dir(os.path.join(outdir, '_static'))
444 for release in cfg.options('release codenames'):
445 transrel = trans_codename(release, cfg)
447 for mirror in cfg.options('mirrors'):
448 listname = 'neurodebian.%s.%s.sources.list' % (release, mirror)
449 repos[transrel].append((mirror, listname))
450 lf = open(os.path.join(outdir, '_static', listname), 'w')
451 aptcfg = '%s %s main contrib non-free\n' % (cfg.get('mirrors', mirror),
453 lf.write('deb %s' % aptcfg)
454 lf.write('deb-src %s' % aptcfg)
457 srclist_template = jinja_env.get_template('sources_lists.rst')
458 sl = open(os.path.join(outdir, 'sources_lists'), 'w')
459 sl.write(srclist_template.render(repos=repos))
463 def write_pkgpages(jinja_env, cfg, db, outdir, addenum_dir):
465 create_dir(os.path.join(outdir, 'pkgs'))
467 # generate the TOC with all packages
468 toc_template = jinja_env.get_template('pkgs_toc.rst')
469 toc = open(os.path.join(outdir, 'pkgs.rst'), 'w')
470 toc.write(toc_template.render(pkgs=db.keys()))
473 # and now each individual package page
474 pkg_template = jinja_env.get_template('pkg.rst')
476 page = generate_pkgpage(p, cfg, db, pkg_template, addenum_dir)
477 # when no page is available skip this package
480 pf = open(os.path.join(outdir, 'pkgs', p + '.rst'), 'w')
481 pf.write(generate_pkgpage(p, cfg, db, pkg_template, addenum_dir))
485 def prepOptParser(op):
486 # use module docstring for help output
487 op.usage = "%s [OPTIONS]\n\n" % sys.argv[0] + __doc__
489 op.add_option("--db",
490 action="store", type="string", dest="db",
492 help="Database file to read. Default: None")
494 op.add_option("--cfg",
495 action="store", type="string", dest="cfg",
497 help="Repository config file.")
499 op.add_option("-o", "--outdir",
500 action="store", type="string", dest="outdir",
502 help="Target directory for ReST output. Default: None")
504 op.add_option("-r", "--release-url",
505 action="append", dest="release_urls",
508 op.add_option("--pkgaddenum", action="store", dest="addenum_dir",
509 type="string", default=None, help="None")
513 op = OptionParser(version="%prog 0.0.2")
516 (opts, args) = op.parse_args()
519 print('There needs to be exactly one command')
525 print("'--cfg' option is mandatory.")
528 print("'--db' option is mandatory.")
532 cfg = SafeConfigParser()
535 # load existing db, unless renew is requested
536 if cmd == 'updatedb':
538 if cfg.has_option('packages', 'select taskfiles'):
539 db = add_pkgfromtaskfile(db, cfg.get('packages',
540 'select taskfiles').split())
542 # add additional package names from config file
543 if cfg.has_option('packages', 'select names'):
544 for p in cfg.get('packages', 'select names').split():
545 if not db.has_key(p):
546 db[p] = get_emptydbentry()
548 # get info from task files
549 if cfg.has_option('packages', 'prospective'):
550 for url in cfg.get('packages', 'prospective').split():
551 db = import_blendstask(db, url)
553 # parse NeuroDebian repository
554 if cfg.has_option('neurodebian', 'releases'):
555 for rurl in cfg.get('neurodebian', 'releases').split():
556 db = import_release(cfg, db, rurl)
558 # collect package information from DDE
559 db = import_dde(cfg, db)
561 store_db(db, opts.db)
565 # load the db from file
566 db = read_db(opts.db)
569 jinja_env = Environment(loader=PackageLoader('neurodebian', 'templates'))
571 # generate package pages and TOC and write them to files
572 write_pkgpages(jinja_env, cfg, db, opts.outdir, opts.addenum_dir)
574 write_sourceslist(jinja_env, cfg, opts.outdir)
576 if __name__ == "__main__":