2 """Tell me who you are!
7 from debian_bundle import deb822
8 from debian_bundle import debtags
9 from ConfigParser import SafeConfigParser
15 from optparse import OptionParser, Option, OptionGroup, OptionConflictError
18 def transCodename(codename, cfg):
19 """Translate a known codename into a release description.
21 Unknown codenames will simply be returned as is.
23 # strip repository codename
24 codename = codename[codename.find('_') + 1:]
26 # if we know something, tell
27 if codename in cfg.options('release codenames'):
28 return cfg.get('release codenames', codename)
34 if os.path.exists(path):
37 ps = path.split(os.path.sep)
39 for i in range(1,len(ps) + 1):
40 p = os.path.sep.join(ps[:i])
42 if not os.path.exists(p):
46 class AptListsCache(object):
47 def __init__(self, cachedir='cache', ro_cachedirs=None, init_db=None):
48 self.cachedir = cachedir
50 if not ro_cachedirs is None:
51 self.ro_cachedirs = ro_cachedirs
53 self.ro_cachedirs = []
55 # always use system cache
56 #self.ro_cachedirs.append('/var/lib/apt/lists/')
59 createDir(self.cachedir)
62 def get(self, url, update=False):
63 """Looks in the cache if the file is there and takes the cached one.
64 Otherwise it is downloaded first.
66 Knows how to deal with http:// and svn:// URLs.
71 # look whether it is compressed
72 cext = url.split('.')[-1]
73 if cext in ['gz', 'bz2']:
74 target_url = url[:-1 * len(cext) -1]
76 # assume not compressed
80 # turn url into a filename -- mimik what APT does for
82 tfilename = '_'.join(target_url.split('/')[2:])
84 # if we need to download anyway do not search
86 cfilename = os.path.join(self.cachedir, tfilename)
88 # look for the uncompressed file anywhere in the cache
90 for cp in [self.cachedir] + self.ro_cachedirs:
91 if os.path.exists(os.path.join(cp, tfilename)):
92 cfilename = os.path.join(cp, tfilename)
97 cfilename = os.path.join(self.cachedir, tfilename)
100 # if updated needed -- download
102 print 'Caching file from %s' % url
104 if url.startswith('svn://'):
106 pysvn.Client().export(url, cfilename)
107 if url.startswith('http://'):
109 tempfile, ignored = urllib.urlretrieve(url)
114 decompressor = 'gzip'
116 decompressor = 'bzip2'
121 "Don't know how to decompress %s files" \
124 if not decompressor is None:
125 if subprocess.call([decompressor, '-d', '-q', '-f',
127 raise RuntimeError, \
128 "Something went wrong while decompressing '%s'" \
131 # move decompressed file into cache
132 shutil.move(os.path.splitext(tempfile)[0], cfilename)
134 # XXX do we need that if explicit filename is provided?
138 fh = open(cfilename, 'r')
145 class DebianPkgArchive(SafeConfigParser):
148 def __init__(self, cache=None, init_db=None):
152 SafeConfigParser.__init__(self)
154 # read an existing database if provided
155 if not init_db is None:
158 # use provided file cache or use fresh one
159 if not cache is None:
162 self.cache = AptListsCache()
165 self.dtags = debtags.DB()
166 self.dtags.read(open('/var/lib/debtags/package-tags'))
168 # init package filter
169 self.pkgfilter = None
171 self._updateReleases()
174 def _updateReleases(self):
178 for p in self.sections():
179 # no releases, nothing to do
180 if not self.has_option(p, 'releases'):
183 # for all releases of this package
185 [rel.strip() for rel in self.get(p, 'releases').split(',')]:
187 if not self.releases.has_key(r):
188 self.releases[r] = []
191 component = self.get(p, '%s component' % r)
193 if not component in self.releases[r]:
194 self.releases[r].append(component)
198 """Generate INI file content for current content.
200 # make adaptor to use str as file-like (needed for ConfigParser.write()
201 class file2str(object):
204 def write(self, val):
215 def save(self, filename):
216 """Write current content to a file.
218 f = open(filename, 'w')
223 def ensureUnique(self, section, option, value):
224 if not self.has_option(section, option):
225 self.set(section, option, value)
227 if not self.get(section, option) == value:
228 raise ValueError, "%s: %s is not unique (%s != %s)" \
230 self.get(section, option), value)
233 def appendUniqueCSV(self, section, option, value):
236 if not self.has_option(section, option):
237 self.set(section, option, value)
239 l = self.get(section, option).split(', ')
241 self.set(section, option, ', '.join(l + [value]))
244 def getReleaseInfo(self, rurl, force_update=False):
245 # root URL of the repository
246 baseurl = '/'.join(rurl.split('/')[:-1])
247 # get the release file from the cache
248 release_file = self.cache.get(rurl, update=force_update)
250 # create parser instance
251 rp = deb822.Release(release_file)
253 # architectures on this dist
254 archs = rp['Architectures'].split()
255 components = rp['Components'].split()
256 # compile a new codename that also considers the repository label
257 # to distinguish between official and unofficial repos.
259 codename = rp['Codename']
260 labelcode = '_'.join([rp['Label'], rp['Codename']])
265 return {'baseurl': baseurl, 'archs': archs, 'components': components,
266 'codename': codename, 'label': label, 'labelcode': labelcode}
269 def checkOfficialRelease(self, rurl, force_update=False):
270 ri = self.getReleaseInfo(rurl, force_update=force_update)
272 # try with a i386 packages file, since that should be the most common
274 # loop over all components
275 for c in ri['components']:
276 pkgsurl = self.buildPkgsURL(ri['baseurl'], c, 'i386')
277 packages_file = self.cache.get(pkgsurl,
280 # now check every package, whether we also have it in the DB already
281 for st in deb822.Packages.iter_paragraphs(packages_file):
283 if self.has_section(pkg):
284 # store the label code
285 self.appendUniqueCSV(pkg, "releases", ri['labelcode'])
286 # and the associated component
287 self.ensureUnique(pkg, "%s component" % ri['labelcode'], c)
289 self.set(pkg, "%s version" % ri['labelcode'], st['Version'])
292 packages_file.close()
295 def buildPkgsURL(self, baseurl, component, arch):
296 return '/'.join([baseurl, component, 'binary-' + arch, 'Packages.bz2'])
299 def importRelease(self, rurl, force_update=False):
301 ri = self.getReleaseInfo(rurl, force_update=force_update)
303 # compile the list of Packages files to parse and parse them
304 for c in ri['components']:
305 for a in ri['archs']:
306 # compile packages URL
307 pkgsurl = self.buildPkgsURL(ri['baseurl'], c, a)
309 # retrieve from cache
310 packages_file = self.cache.get(pkgsurl,
314 for stanza in deb822.Packages.iter_paragraphs(packages_file):
315 self._storePkg(stanza, ri['labelcode'], c, ri['baseurl'])
319 packages_file.close()
321 self._updateReleases()
324 def _storePkg(self, st, codename, component, baseurl):
331 if not self.has_section(pkg):
332 self.add_section(pkg)
334 # do nothing if package is not in filter if there is any
335 if not self.pkgfilter is None and not pkg in self.pkgfilter:
336 self.ensureUnique(pkg, 'visibility', 'shadowed')
338 self.ensureUnique(pkg, 'visibility', 'featured')
341 self.appendUniqueCSV(pkg, "releases", codename)
344 self.appendUniqueCSV(pkg, "%s archs" % codename, st['Architecture'])
347 self.ensureUnique(pkg,
348 "%s version %s" % (codename, st['Architecture']),
352 self.ensureUnique(pkg,
353 "%s file %s" % (codename, st['Architecture']),
354 '/'.join(baseurl.split('/')[:-2] + [st['Filename']]))
357 self.ensureUnique(pkg, '%s component' % codename, component)
360 self.ensureUnique(pkg, "%s poolurl" % codename,
361 '/'.join(baseurl.split('/')[:-2] \
362 + [os.path.dirname(st['Filename'])]))
365 # now the stuff where a single variant is sufficient and where we go for
366 # the latest available one
367 if self.has_option(pkg, "newest version") \
368 and apt.VersionCompare(st['Version'],
369 self.get(pkg, "newest version")) < 0:
372 # everything from here will overwrite existing ones
374 # we seems to have an updated package
375 self.set(pkg, "newest version", st['Version'])
378 self.set(pkg, "description", st['Description'].replace('%', '%%'))
381 self.set(pkg, "maintainer", st['Maintainer'])
384 if st.has_key('Homepage'):
385 self.set(pkg, 'homepage', st['Homepage'])
388 debtags = self.dtags.tagsOfPackage(pkg)
390 self.set(pkg, 'debtags', ', '.join(debtags))
393 def writeSourcesLists(self, outdir, cfg):
395 createDir(os.path.join(outdir, 'static'))
397 fl = open(os.path.join(outdir, 'sources_lists'), 'w')
398 for trans, r in sorted([(transCodename(k, cfg), k)
399 for k in self.releases.keys()]):
400 # need to turn 'apsy_lenny' back into 'lenny'
401 debneuro_r = r.split('_')[1]
403 f = open(os.path.join(outdir, 'static',
404 'debneuro.%s.sources.list' % debneuro_r),
406 f.write("deb http://apsy.gse.uni-magdeburg.de/debian %s %s\n" \
407 % (debneuro_r, ' '.join(self.releases[r])))
408 f.write("deb-src http://apsy.gse.uni-magdeburg.de/debian %s %s\n" \
409 % (debneuro_r, ' '.join(self.releases[r])))
410 # XXX use :download: role from sphinx 0.6 on
411 #fl.write('* `%s <http://apsy.gse.uni-magdeburg.de/debian/html/_static/debneuro.%s.sources.list>`_\n' \
412 fl.write('* `%s <_static/debneuro.%s.sources.list>`_\n' \
413 % (trans, debneuro_r))
418 def importProspectivePkgsFromTaskFile(self, url):
419 fh = self.cache.get(url)
421 for st in deb822.Packages.iter_paragraphs(fh):
422 # do not stop unless we have a description
423 if not st.has_key('Pkg-Description'):
426 if st.has_key('Depends'):
428 elif st.has_key('Suggests'):
431 print 'Warning: Cannot determine name of prospective package ' \
436 if not self.has_section(pkg):
437 self.add_section(pkg)
439 # prospective ones are always featured
440 self.ensureUnique(pkg, 'visibility', 'featured')
443 self.set(pkg, "description",
444 st['Pkg-Description'].replace('%', '%%'))
447 if st.has_key('Homepage'):
448 self.set(pkg, 'homepage', st['Homepage'])
450 if st.has_key('Pkg-URL'):
451 self.set(pkg, 'external pkg url', st['Pkg-URL'])
453 if st.has_key('WNPP'):
454 self.set(pkg, 'wnpp debian', st['WNPP'])
456 if st.has_key('License'):
457 self.set(pkg, 'license', st['License'])
459 # treat responsible as maintainer
460 if st.has_key('Responsible'):
461 self.set(pkg, "maintainer", st['Responsible'])
464 def setPkgFilterFromTaskFile(self, urls):
468 fh = self.cache.get(task)
471 # loop over all stanzas
472 for stanza in deb822.Packages.iter_paragraphs(fh):
473 if stanza.has_key('Depends'):
474 pkg = stanza['Depends']
475 elif stanza.has_key('Suggests'):
476 pkg = stanza['Suggests']
480 # account for multiple packages per line
482 pkgs += [p.strip() for p in pkg.split(',')]
484 pkgs.append(pkg.strip())
487 self.pkgfilter = pkgs
490 def genPkgPage(db, pkg, cfg):
497 descr = db.get(pkg, 'description').split('\n')
501 # only put index markup for featured packages
502 if db.get(pkg, 'visibility') == 'featured':
503 s = '.. index:: %s, ' % pkg
506 # add a subset of available debtags (if present)
507 if db.has_option(pkg, 'debtags'):
509 tags = [t for t in db.get(pkg, 'debtags').split(', ')
510 if t.split('::')[0] in ['field', 'works-with']]
512 s += '.. index:: %s\n\n' % ', '.join(tags)
514 # main ref target for this package
515 s += '.. _deb_' + pkg + ':\n'
517 # separate header from the rest
520 header = '%s -- %s' % (pkg, descr[0])
521 s += '*' * (len(header) + 2)
522 s += '\n ' + header + '\n'
523 s += '*' * (len(header) + 2) + '\n\n'
526 # XXX honour formating syntax
527 s += '\n'.join([l.lstrip(' .') for l in descr[1:]])
530 if db.has_option(pkg, 'homepage'):
531 s += '\n**Homepage**: %s\n' % db.get(pkg, 'homepage')
533 s += '\nBinary packages'\
534 '\n===============\n'
536 s += genMaintainerSection(db, pkg)
538 if db.has_option(pkg, 'wnpp debian'):
540 A Debian packaging effort has been officially announced. Please see the
541 corresponding `intent-to-package bug report`_ for more information about
544 .. _intent-to-package bug report: http://bugs.debian.org/%s
546 """ % db.get(pkg, 'wnpp debian')
548 # write repository content summary for NeuroDebian
549 s += getReposContentSummary(db, cfg, 'apsy', pkg)
550 # see if there is something about a package in Debian proper
551 s += getDebianRefs(db, cfg, pkg)
556 def genMaintainerSection(db, pkg):
559 if not db.has_option(pkg, 'maintainer'):
562 Currently, nobody seems to be responsible for creating or maintaining
563 Debian packages of this software.
568 # there is someone responsible
569 maintainer = db.get(pkg, 'maintainer')
571 # do we have actual packages, or is it just a note
572 if not db.has_option(pkg, 'releases'):
575 There are currently no binary packages available. However, the last known
576 packaging effort was started by %s which meanwhile might have led to an
577 initial unofficial Debian packaging.
582 s += '\n**Maintainer**: %s\n\n' % maintainer
584 if not maintainer.startswith('Michael Hanke'):
588 Do not contact the original package maintainer regarding
589 bugs in this unofficial binary package. Instead, contact
590 the repository maintainer at michael.hanke@gmail.com\ .
597 def getDebianRefs(db, cfg, pkg):
598 # no release, nothing to do
599 if not db.has_option(pkg, 'releases'):
601 # which Debian release is this package part of?
602 debrels = [r.split('_')[1] for r in db.get(pkg, 'releases').split(', ')
603 if r.startswith('Debian')]
604 # do nothing if there is no package in Debian proper
609 Official Debian archive
610 -----------------------
612 This package is available from the offical Debian archive for:
616 Please see the following ressources for more information:
618 * `Debian package summary page`_
619 * `Bugreports in the Debian bug tracking system`_
620 * `Debian package popularity statistics`_
622 .. _Debian package summary page: http://packages.debian.org/%s
623 .. _Bugreports in the Debian bug tracking system: http://bugs.debian.org/%s
624 .. _Debian package popularity statistics: http://qa.debian.org/popcon.php?package=%s
626 """ % ('\n* '.join(['`%s <http://www.debian.org/releases/%s>`_ *[%s]*: %s' \
627 % (transCodename(rel, cfg),
629 db.get(pkg, 'debian_%s component' % rel),
630 db.get(pkg, 'debian_%s version' % rel))
631 for rel in debrels]),
637 def getReposContentSummary(db, cfg, reposlabel, pkg):
638 # do nothing if the are no packages
639 if not db.has_option(pkg, 'releases'):
642 reposname = cfg.get('repository labels', reposlabel)
643 s = '\n%s\n%s\n' % (reposname, '-' * len(reposname))
646 The repository contains binary packages for the following distribution
647 releases and system architectures. The corresponding source packages
651 Do not download this package manually if you plan to use it
652 regularly. Instead configure your package manager to use this
653 repository by following the instructions on the
654 :ref:`front page <repository_howto>`.
658 # for all releases this package is part of
659 for rel in db.get(pkg, 'releases').split(', '):
660 # ignore items associated with other repositories
661 if not rel.split('_')[0] == reposlabel:
663 # write release description and component
664 s += '\n%s *[%s]*:\n ' \
665 % (transCodename(rel, cfg),
666 db.get(pkg, '%s component' % rel))
668 s += '`source <%s>`_' % db.get(pkg, '%s poolurl' % rel)
670 # archs this package is available for
671 archs = db.get(pkg, '%s archs' % rel).split(', ')
673 # extract all present versions for any arch
674 versions = [db.get(pkg, '%s version %s' % (rel, arch))
677 # if there is only a single version for all of them, simplify the list
678 single_ver = versions.count(versions[0]) == len(versions)
682 # only one version string for all
683 s += ', '.join(['`%s <%s>`_' \
684 % (arch, db.get(pkg, '%s file %s' % (rel, arch)))
686 s += ' (%s)' % versions[0]
689 # a separate version string for each arch
690 s += ', '.join(['`%s <%s>`_ (%s)' \
692 db.get(pkg, '%s file %s' % (rel, arch)),
693 db.get(pkg, '%s version %s' % (rel, arch)))
700 def maintainer2email(maint):
701 return maint.split('<')[1].rstrip('>')
704 def writePkgsBy(db, key, value2id, outdir, heading):
706 nwkey = key.replace(' ', '')
707 createDir(os.path.join(outdir, 'by%s' % nwkey))
711 # get packages by maintainer
712 for p in db.sections():
713 # only featured packages
714 if db.get(p, 'visibility') == 'shadowed':
717 if db.has_option(p, key):
720 if not collector.has_key(by):
721 collector[by] = (value2id(by), [p])
723 collector[by][1].append(p)
725 toc = open(os.path.join(outdir, 'by%s.rst' % nwkey), 'w')
726 toc.write('.. index:: Packages by %s\n.. _by%s:\n\n' % (key, key))
728 toc_heading = 'Packages by %s' % key
729 toc.write('%s\n%s\n\n' % (toc_heading, '=' * len(toc_heading)))
730 toc.write('.. toctree::\n :maxdepth: 1\n\n')
732 # summary page per maintainer
733 for by in sorted(collector.keys()):
734 toc.write(' by%s/%s\n' % (nwkey, collector[by][0]))
736 fh = open(os.path.join(outdir,
738 collector[by][0] + '.rst'), 'w')
740 fh.write('.. index:: %s\n.. _%s:\n\n' % (by, by))
742 hdr = heading.replace('<ITEM>', by)
744 fh.write('=' * len(hdr) + '\n\n')
746 # write sorted list of packages
747 for p in sorted(collector[by][1]):
748 fh.write('* :ref:`deb_%s`\n' % p)
755 def writeRst(db, outdir, cfg, addenum_dir=None):
757 createDir(os.path.join(outdir, 'pkgs'))
760 toc = open(os.path.join(outdir, 'pkgs.rst'), 'w')
762 toc.write('.. _full_pkg_list:\n\n')
763 toc.write('Archive content\n===============\n\n'
764 '.. toctree::\n :maxdepth: 1\n\n')
766 for p in sorted(db.sections()):
767 print "Generating page for '%s'" % p
768 pf = open(os.path.join(outdir, 'pkgs', '%s.rst' % p), 'w')
769 pf.write(genPkgPage(db, p, cfg))
771 # check for doc addons
772 if addenum_dir is not None:
773 addenum = os.path.join(os.path.abspath(addenum_dir), '%s.rst' % p)
774 if os.path.exists(addenum):
775 pf.write('\n\n.. include:: %s\n' % addenum)
777 toc.write(' pkgs/%s\n' % p)
783 def prepOptParser(op):
784 # use module docstring for help output
785 op.usage = "%s [OPTIONS]\n\n" % sys.argv[0] + __doc__
787 op.add_option("--db",
788 action="store", type="string", dest="db",
790 help="Database file to read. Default: None")
792 op.add_option("--cfg",
793 action="store", type="string", dest="cfg",
795 help="Repository config file.")
797 op.add_option("-o", "--outdir",
798 action="store", type="string", dest="outdir",
800 help="Target directory for ReST output. Default: None")
802 op.add_option("-r", "--release-url",
803 action="append", dest="release_urls",
806 op.add_option("--pkgaddenum", action="store", dest="addenum_dir",
807 type="string", default=None, help="None")
812 op = OptionParser(version="%prog 0.0.1")
815 (opts, args) = op.parse_args()
818 print('There needs to be exactly one command')
824 print("'--cfg' option is mandatory.")
828 cfg = SafeConfigParser()
831 # load existing db, unless renew is requested
832 if cmd == 'refreshdb':
833 dpa = DebianPkgArchive()
835 dpa = DebianPkgArchive(init_db=opts.db)
838 if cmd == 'generate':
839 if opts.outdir is None:
840 print('Not output directory specified!')
843 dpa.writeSourcesLists(opts.outdir, cfg)
844 writeRst(dpa, opts.outdir, cfg, opts.addenum_dir)
845 writePkgsBy(dpa, 'maintainer', maintainer2email, opts.outdir,
846 'Packages maintained by <ITEM>')
851 if cfg.has_option('packages', 'select taskfiles'):
852 dpa.setPkgFilterFromTaskFile(cfg.get('packages',
853 'select taskfiles').split())
855 if cfg.has_option('packages', 'select names'):
856 dpa.pkgfilter += cfg.get('packages', 'select names').split()
858 if cfg.has_option('packages', 'prospective'):
859 for p in cfg.get('packages', 'prospective').split():
860 dpa.importProspectivePkgsFromTaskFile(p)
862 if cfg.has_option('repositories', 'releases'):
863 for rurl in cfg.get('repositories', 'releases').split():
864 dpa.importRelease(rurl, force_update=False)
866 if cfg.has_option('repositories', 'releases'):
867 for rurl in cfg.get('repositories', 'releases').split():
868 dpa.importRelease(rurl, force_update=False)
870 if cfg.has_option('officials', 'releases'):
871 for rurl in cfg.get('officials', 'releases').split():
872 dpa.checkOfficialRelease(rurl, force_update=False)
874 if not opts.db is None:
878 if __name__ == "__main__":