2 """Tell me who you are!
7 from debian_bundle import deb822
8 from debian_bundle import debtags
9 from ConfigParser import SafeConfigParser
15 from optparse import OptionParser, Option, OptionGroup, OptionConflictError
18 'apsy_etch': 'Debian GNU/Linux 4.0 (etch)',
19 'apsy_lenny': 'Debian GNU/Linux 5.0 (lenny)',
20 'apsy_squeeze': 'Debian testing (squeeze)',
21 'apsy_sid': 'Debian unstable (sid)',
22 'apsy_dapper': 'Ubuntu 6.06 LTS "Dapper Drake" (dapper)',
23 'apsy_edgy': 'Ubuntu 6.10 "Edgy Eft" (edgy)',
24 'apsy_feisty': 'Ubuntu 7.04 "Feisty Fawn" (feisty)',
25 'apsy_gutsy': 'Ubuntu 7.10 "Gutsy Gibbon" (gutsy)',
26 'apsy_hardy': 'Ubuntu 8.04 LTS "Hardy Heron" (hardy)',
27 'apsy_intrepid': 'Ubuntu 8.10 "Intrepid Ibex" (intrepid)',
28 'apsy_jaunty': 'Ubuntu 9.04 "Jaunty Jackalope" (jaunty)',
31 def transCodename(codename):
32 """Translate a known codename into a release description.
34 Unknown codenames will simply be returned as is.
36 if codename in codename2descr.keys():
37 return codename2descr[codename]
43 if os.path.exists(path):
46 ps = path.split(os.path.sep)
48 for i in range(1,len(ps) + 1):
49 p = os.path.sep.join(ps[:i])
51 if not os.path.exists(p):
55 class AptListsCache(object):
56 def __init__(self, cachedir='cache', ro_cachedirs=None, init_db=None):
57 self.cachedir = cachedir
59 if not ro_cachedirs is None:
60 self.ro_cachedirs = ro_cachedirs
62 self.ro_cachedirs = []
64 # always use system cache
65 self.ro_cachedirs.append('/var/lib/apt/lists/')
68 createDir(self.cachedir)
71 def get(self, url, update=False):
72 """Looks in the cache if the file is there and takes the cached one.
73 Otherwise it is downloaded first.
75 Knows how to deal with http:// and svn:// URLs.
80 # look whether it is compressed
81 cext = url.split('.')[-1]
82 if cext in ['gz', 'bz2']:
83 target_url = url[:-1 * len(cext) -1]
85 # assume not compressed
89 # turn url into a filename -- mimik what APT does for
91 tfilename = '_'.join(target_url.split('/')[2:])
93 # if we need to download anyway do not search
95 cfilename = os.path.join(self.cachedir, tfilename)
97 # look for the uncompressed file anywhere in the cache
99 for cp in [self.cachedir] + self.ro_cachedirs:
100 if os.path.exists(os.path.join(cp, tfilename)):
101 cfilename = os.path.join(cp, tfilename)
104 if cfilename is None:
106 cfilename = os.path.join(self.cachedir, tfilename)
109 # if updated needed -- download
111 print 'Caching file from %s' % url
113 if url.startswith('svn://'):
115 pysvn.Client().export(url, cfilename)
116 if url.startswith('http://'):
118 tempfile, ignored = urllib.urlretrieve(url)
123 decompressor = 'gzip'
125 decompressor = 'bzip2'
130 "Don't know how to decompress %s files" \
133 if not decompressor is None:
134 if subprocess.call([decompressor, '-d', '-q', '-f',
136 raise RuntimeError, \
137 "Something went wrong while decompressing '%s'" \
140 # move decompressed file into cache
141 shutil.move(os.path.splitext(tempfile)[0], cfilename)
143 # XXX do we need that if explicit filename is provided?
147 fh = open(cfilename, 'r')
154 class DebianPkgArchive(SafeConfigParser):
157 def __init__(self, cache=None, init_db=None):
161 SafeConfigParser.__init__(self)
163 # read an existing database if provided
164 if not init_db is None:
167 # use provided file cache or use fresh one
168 if not cache is None:
171 self.cache = AptListsCache()
174 self.dtags = debtags.DB()
175 self.dtags.read(open('/var/lib/debtags/package-tags'))
177 # init package filter
178 self.pkgfilter = None
180 self._updateReleases()
183 def _updateReleases(self):
186 for p in self.sections():
187 if not self.has_option(p, 'releases'):
190 # for all releases of this package
192 [rel.strip() for rel in self.get(p, 'releases').split(',')]:
194 if not self.releases.has_key(r):
195 self.releases[r] = []
198 component = self.get(p, 'component %s' % r)
200 if not component in self.releases[r]:
201 self.releases[r].append(component)
205 """Generate INI file content for current content.
207 # make adaptor to use str as file-like (needed for ConfigParser.write()
208 class file2str(object):
211 def write(self, val):
222 def save(self, filename):
223 """Write current content to a file.
225 f = open(filename, 'w')
230 def ensureUnique(self, section, option, value):
231 if not self.has_option(section, option):
232 self.set(section, option, value)
234 if not self.get(section, option) == value:
235 raise ValueError, "%s: %s is not unique (%s != %s)" \
237 self.get(section, option), value)
240 def appendUniqueCSV(self, section, option, value):
243 if not self.has_option(section, option):
244 self.set(section, option, value)
246 l = self.get(section, option).split(', ')
248 self.set(section, option, ', '.join(l + [value]))
251 def importRelease(self, rurl, force_update=False):
252 # root URL of the repository
253 baseurl = '/'.join(rurl.split('/')[:-1])
254 # get the release file from the cache
255 release_file = self.cache.get(rurl, update=force_update)
257 # create parser instance
258 rp = deb822.Release(release_file)
260 # architectures on this dist
261 archs = rp['Architectures'].split()
262 components = rp['Components'].split()
263 # compile a new codename that also considers the repository label
264 # to distinguish between official and unofficial repos.
265 codename = '_'.join([rp['Label'], rp['Codename']])
267 # compile the list of Packages files to parse and parse them
270 # compile packages URL
271 pkgsurl = '/'.join([baseurl, c, 'binary-' + a, 'Packages.bz2'])
273 # retrieve from cache
274 packages_file = self.cache.get(pkgsurl,
278 self._parsePkgsFile(packages_file, codename, c, baseurl)
281 packages_file.close()
286 self._updateReleases()
289 def _parsePkgsFile(self, fh, codename, component, baseurl):
295 Codename of the release
297 The archive component this packages file corresponds to.
299 for stanza in deb822.Packages.iter_paragraphs(fh):
300 self._storePkg(stanza, codename, component, baseurl)
303 def _storePkg(self, st, codename, component, baseurl):
310 if not self.has_section(pkg):
311 self.add_section(pkg)
313 # do nothing if package is not in filter if there is any
314 if not self.pkgfilter is None and not pkg in self.pkgfilter:
315 self.ensureUnique(pkg, 'visibility', 'shadowed')
317 self.ensureUnique(pkg, 'visibility', 'featured')
320 self.appendUniqueCSV(pkg, "releases", codename)
323 self.appendUniqueCSV(pkg, "archs %s" % codename, st['Architecture'])
326 self.ensureUnique(pkg,
327 "version %s %s" % (codename, st['Architecture']),
331 self.ensureUnique(pkg,
332 "file %s %s" % (codename, st['Architecture']),
333 '/'.join(baseurl.split('/')[:-2] + [st['Filename']]))
336 self.ensureUnique(pkg, 'component ' + codename, component)
339 self.ensureUnique(pkg, "poolurl %s" % codename,
340 '/'.join(baseurl.split('/')[:-2] \
341 + [os.path.dirname(st['Filename'])]))
344 # now the stuff where a single variant is sufficient and where we go for
345 # the latest available one
346 if self.has_option(pkg, "newest version") \
347 and apt.VersionCompare(st['Version'],
348 self.get(pkg, "newest version")) < 0:
351 # everything from here will overwrite existing ones
353 # we seems to have an updated package
354 self.set(pkg, "newest version", st['Version'])
357 self.set(pkg, "description", st['Description'].replace('%', '%%'))
360 self.set(pkg, "maintainer", st['Maintainer'])
363 if st.has_key('Homepage'):
364 self.set(pkg, 'homepage', st['Homepage'])
367 debtags = self.dtags.tagsOfPackage(pkg)
369 self.set(pkg, 'debtags', ', '.join(debtags))
372 def writeSourcesLists(self, outdir):
374 createDir(os.path.join(outdir, 'static'))
376 fl = open(os.path.join(outdir, 'sources_lists'), 'w')
377 for trans, r in sorted([(transCodename(k), k)
378 for k in self.releases.keys()]):
379 f = open(os.path.join(outdir, 'static',
380 'debneuro.%s.sources.list' % r),
382 f.write("deb http://apsy.gse.uni-magdeburg.de/debian %s %s\n" \
383 % (r, ' '.join(self.releases[r])))
384 f.write("deb-src http://apsy.gse.uni-magdeburg.de/debian %s %s\n" \
385 % (r, ' '.join(self.releases[r])))
386 # XXX use :download: role from sphinx 0.6 on
387 fl.write('* `%s <http://apsy.gse.uni-magdeburg.de/debian/html/_static/debneuro.%s.sources.list>`_\n' \
393 def importProspectivePkgsFromTaskFile(self, url):
394 fh = self.cache.get(url)
396 for st in deb822.Packages.iter_paragraphs(fh):
397 # do not stop unless we have a description
398 if not st.has_key('Pkg-Description'):
401 if st.has_key('Depends'):
403 elif st.has_key('Suggests'):
406 print 'Warning: Cannot determine name of prospective package ' \
411 if not self.has_section(pkg):
412 self.add_section(pkg)
414 # prospective ones are always featured
415 self.ensureUnique(pkg, 'visibility', 'featured')
418 self.set(pkg, "description",
419 st['Pkg-Description'].replace('%', '%%'))
422 if st.has_key('Homepage'):
423 self.set(pkg, 'homepage', st['Homepage'])
425 if st.has_key('Pkg-URL'):
426 self.set(pkg, 'external pkg url', st['Pkg-URL'])
428 if st.has_key('WNPP'):
429 self.set(pkg, 'wnpp debian', st['WNPP'])
431 if st.has_key('License'):
432 self.set(pkg, 'license', st['License'])
434 # treat responsible as maintainer
435 if st.has_key('Responsible'):
436 self.set(pkg, "maintainer", st['Responsible'])
439 def setPkgFilterFromTaskFile(self, urls):
443 fh = self.cache.get(task)
446 # loop over all stanzas
447 for stanza in deb822.Packages.iter_paragraphs(fh):
448 if stanza.has_key('Depends'):
449 pkg = stanza['Depends']
450 elif stanza.has_key('Suggests'):
451 pkg = stanza['Suggests']
455 # account for multiple packages per line
457 pkgs += [p.strip() for p in pkg.split(',')]
459 pkgs.append(pkg.strip())
462 self.pkgfilter = pkgs
465 def genPkgPage(db, pkg):
472 descr = db.get(pkg, 'description').split('\n')
476 # only put index markup for featured packages
477 if db.get(pkg, 'visibility') == 'featured':
478 s = '.. index:: %s, ' % pkg
481 if db.has_option(pkg, 'debtags'):
483 tags = [t for t in db.get(pkg, 'debtags').split(', ')
484 if t.split('::')[0] in ['field', 'works-with']]
486 s += '.. index:: %s\n\n' % ', '.join(tags)
488 # main ref target for this package
489 s += '.. _deb_' + pkg + ':\n'
491 # separate header from the rest
494 header = '%s -- %s' % (pkg, descr[0])
495 s += '*' * (len(header) + 2)
496 s += '\n ' + header + '\n'
497 s += '*' * (len(header) + 2) + '\n\n'
500 s += '\n'.join([l.lstrip(' .') for l in descr[1:]])
503 if db.has_option(pkg, 'homepage'):
504 s += '\n**Homepage**: %s\n' % db.get(pkg, 'homepage')
506 s += '\nBinary packages'\
507 '\n===============\n'
509 s += genMaintainerSection(db, pkg)
511 if db.has_option(pkg, 'wnpp debian'):
512 s += 'A Debian packaging effort has been officially announced. ' \
513 'Please see the corresponding `intent-to-package bug report`_ ' \
514 'for more information about its current status.\n\n' \
515 '.. _intent-to-package bug report: http://bugs.debian.org/%s\n\n' \
516 % db.get(pkg, 'wnpp debian')
518 s += genBinaryPackageSummary(db, pkg, 'DebNeuro repository')
520 # if db.has_option(pkg, 'external pkg url'):
521 # s += 'Other unofficial ressources\n' \
522 # '---------------------------\n\n'
523 # s += 'An unofficial package is available from %s\ .\n\n' \
524 # % db.get(pkg, 'external pkg url')
528 def genMaintainerSection(db, pkg):
531 if not db.has_option(pkg, 'maintainer'):
532 s += '\nCurrently, nobody seems to be responsible for creating or ' \
533 'maintaining Debian packages of this software.\n\n'
536 # there is someone responsible
537 maintainer = db.get(pkg, 'maintainer')
539 # do we have actual packages, or is it just a note
540 if not db.has_option(pkg, 'releases'):
541 s += '\nThere are currently no binary packages available. However, ' \
542 'the last known packaging effort was started by %s which ' \
543 'meanwhile might have led to an initial unofficial Debian ' \
544 'packaging.\n\n' % maintainer
547 s += '\n**Maintainer**: %s\n\n' % maintainer
549 if not maintainer.startswith('Michael Hanke'):
551 s += ' Do not contact the original package maintainer regarding ' \
552 ' bugs in this unofficial binary package. Instead, contact ' \
553 ' the repository maintainer at michael.hanke@gmail.com\ .'
558 def genBinaryPackageSummary(db, pkg, reposname):
559 # do nothing if the are no packages
560 if not db.has_option(pkg, 'releases'):
563 s = '\n%s\n%s\n' % (reposname, '-' * len(reposname))
565 s += 'The repository contains binary packages for the following ' \
566 'distribution releases and system architectures. Note, that the ' \
567 'corresponding source packages are of course available too. Please ' \
568 'click on the release name to access them.\n\n'
570 # for all releases this package is part of
571 for rel in db.get(pkg, 'releases').split(', '):
572 # write release description and component
573 s += '\n`%s <%s>`_:\n ' \
574 % (transCodename(rel),
575 db.get(pkg, 'poolurl %s' % rel))
577 s += '[%s] ' % db.get(pkg, 'component ' + rel)
579 # archs this package is available for
580 archs = db.get(pkg, 'archs ' + rel).split(', ')
582 # extract all present versions for any arch
583 versions = [db.get(pkg, 'version %s %s' % (rel, arch))
586 # if there is only a single version for all of them, simplify the list
587 single_ver = versions.count(versions[0]) == len(versions)
590 # only one version string for all
591 s += ', '.join(['`%s <%s>`_' \
592 % (arch, db.get(pkg, 'file %s %s' % (rel, arch)))
594 s += ' (%s)' % versions[0]
596 # a separate version string for each arch
597 s += ', '.join(['`%s <%s>`_ (%s)' \
599 db.get(pkg, 'file %s %s' % (rel, arch)),
600 db.get(pkg, 'version %s %s' % (rel, arch)))
607 def maintainer2email(maint):
608 return maint.split('<')[1].rstrip('>')
611 def writePkgsBy(db, key, value2id, outdir, heading):
613 nwkey = key.replace(' ', '')
614 createDir(os.path.join(outdir, 'by%s' % nwkey))
618 # get packages by maintainer
619 for p in db.sections():
620 # only featured packages
621 if db.get(p, 'visibility') == 'shadowed':
624 if db.has_option(p, key):
627 if not collector.has_key(by):
628 collector[by] = (value2id(by), [p])
630 collector[by][1].append(p)
632 toc = open(os.path.join(outdir, 'by%s.rst' % nwkey), 'w')
633 toc.write('.. index:: Packages by %s\n.. _by%s:\n\n' % (key, key))
635 toc_heading = 'Packages by %s' % key
636 toc.write('%s\n%s\n\n' % (toc_heading, '=' * len(toc_heading)))
637 toc.write('.. toctree::\n :maxdepth: 1\n\n')
639 # summary page per maintainer
640 for by in sorted(collector.keys()):
641 toc.write(' by%s/%s\n' % (nwkey, collector[by][0]))
643 fh = open(os.path.join(outdir,
645 collector[by][0] + '.rst'), 'w')
647 fh.write('.. index:: %s\n.. _%s:\n\n' % (by, by))
649 hdr = heading.replace('<ITEM>', by)
651 fh.write('=' * len(hdr) + '\n\n')
653 # write sorted list of packages
654 for p in sorted(collector[by][1]):
655 fh.write('* :ref:`deb_%s`\n' % p)
662 def writeRst(db, outdir):
664 createDir(os.path.join(outdir, 'pkgs'))
667 toc = open(os.path.join(outdir, 'pkgs.rst'), 'w')
669 toc.write('.. _full_pkg_list:\n\n')
670 toc.write('Archive content\n===============\n\n'
671 '.. toctree::\n :maxdepth: 1\n\n')
673 for p in sorted(db.sections()):
674 print "Generating page for '%s'" % p
675 pf = open(os.path.join(outdir, 'pkgs', '%s.rst' % p), 'w')
676 pf.write(genPkgPage(db, p))
678 # check for doc addons
679 if os.path.exists(os.path.join(outdir, 'pkgs_addenum/%s.rst' % p)):
680 pf.write('\n\n.. include:: ../pkgs_addenum/%s.rst\n' %p)
682 toc.write(' pkgs/%s\n' % p)
688 def prepOptParser(op):
689 # use module docstring for help output
690 op.usage = "%s [OPTIONS]\n\n" % sys.argv[0] + __doc__
692 op.add_option("--db",
693 action="store", type="string", dest="db",
695 help="Database file to read. Default: None")
697 op.add_option("--release-url",
698 action="append", dest="release_urls",
703 op = OptionParser(version="%prog 0.0.1")
706 (opts, args) = op.parse_args()
709 dpa = DebianPkgArchive(init_db=opts.db)
711 target_dir = 'build/source'
715 # 'http://apsy.gse.uni-magdeburg.de/debian/dists/dapper/Release',
716 # 'http://apsy.gse.uni-magdeburg.de/debian/dists/gutsy/Release',
717 # 'http://apsy.gse.uni-magdeburg.de/debian/dists/hardy/Release',
718 # 'http://apsy.gse.uni-magdeburg.de/debian/dists/intrepid/Release',
719 # 'http://apsy.gse.uni-magdeburg.de/debian/dists/etch/Release',
720 # 'http://apsy.gse.uni-magdeburg.de/debian/dists/lenny/Release',
721 # 'http://apsy.gse.uni-magdeburg.de/debian/dists/squeeze/Release',
722 # 'http://apsy.gse.uni-magdeburg.de/debian/dists/sid/Release',
726 # 'svn://svn.debian.org/blends/projects/med/trunk/debian-med/tasks/imaging',
727 # 'svn://svn.debian.org/blends/projects/med/trunk/debian-med/tasks/imaging-dev',
728 # 'svn://svn.debian.org/blends/projects/science/trunk/debian-science/tasks/neuroscience-cognitive',
731 # dpa.setPkgFilterFromTaskFile(taskfiles)
732 # dpa.pkgfilter += ['fsl-doc', 'fslview-doc', 'fsl-atlases', 'fsl-possum-data',
733 # 'fsl-first-data', 'fsl-feeds']
735 # dpa.importProspectivePkgsFromTaskFile(taskfiles[0])
737 # for rurl in opts.release_urls:
738 # dpa.importRelease(rurl, force_update=False)
740 # dpa.writeSourcesLists(target_dir)
742 # writeRst(dpa, target_dir)
743 # writePkgsBy(dpa, 'maintainer', maintainer2email, target_dir,
744 # 'Packages maintained by <ITEM>')
746 # dpa.save('build/db.db')
749 if __name__ == "__main__":