2 """Tell me who you are!
7 from debian_bundle import deb822
8 from debian_bundle import debtags
9 from ConfigParser import SafeConfigParser
15 from optparse import OptionParser, Option, OptionGroup, OptionConflictError
18 def transCodename(codename, cfg):
19 """Translate a known codename into a release description.
21 Unknown codenames will simply be returned as is.
23 # strip repository codename
24 codename = codename[codename.find('_') + 1:]
26 # if we know something, tell
27 if codename in cfg.options('release codenames'):
28 return cfg.get('release codenames', codename)
34 if os.path.exists(path):
37 ps = path.split(os.path.sep)
39 for i in range(1,len(ps) + 1):
40 p = os.path.sep.join(ps[:i])
42 if not os.path.exists(p):
46 class AptListsCache(object):
47 def __init__(self, cachedir='cache', ro_cachedirs=None, init_db=None):
48 self.cachedir = cachedir
50 if not ro_cachedirs is None:
51 self.ro_cachedirs = ro_cachedirs
53 self.ro_cachedirs = []
55 # always use system cache
56 #self.ro_cachedirs.append('/var/lib/apt/lists/')
59 createDir(self.cachedir)
62 def get(self, url, update=False):
63 """Looks in the cache if the file is there and takes the cached one.
64 Otherwise it is downloaded first.
66 Knows how to deal with http:// and svn:// URLs.
71 # look whether it is compressed
72 cext = url.split('.')[-1]
73 if cext in ['gz', 'bz2']:
74 target_url = url[:-1 * len(cext) -1]
76 # assume not compressed
80 # turn url into a filename -- mimik what APT does for
82 tfilename = '_'.join(target_url.split('/')[2:])
84 # if we need to download anyway do not search
86 cfilename = os.path.join(self.cachedir, tfilename)
88 # look for the uncompressed file anywhere in the cache
90 for cp in [self.cachedir] + self.ro_cachedirs:
91 if os.path.exists(os.path.join(cp, tfilename)):
92 cfilename = os.path.join(cp, tfilename)
97 cfilename = os.path.join(self.cachedir, tfilename)
100 # if updated needed -- download
102 print 'Caching file from %s' % url
104 if url.startswith('svn://'):
106 pysvn.Client().export(url, cfilename)
107 if url.startswith('http://'):
109 tempfile, ignored = urllib.urlretrieve(url)
114 decompressor = 'gzip'
116 decompressor = 'bzip2'
121 "Don't know how to decompress %s files" \
124 if not decompressor is None:
125 if subprocess.call([decompressor, '-d', '-q', '-f',
127 raise RuntimeError, \
128 "Something went wrong while decompressing '%s'" \
131 # move decompressed file into cache
132 shutil.move(os.path.splitext(tempfile)[0], cfilename)
134 # XXX do we need that if explicit filename is provided?
138 fh = open(cfilename, 'r')
145 class DebianPkgArchive(SafeConfigParser):
148 def __init__(self, cache=None, init_db=None):
152 SafeConfigParser.__init__(self)
154 # read an existing database if provided
155 if not init_db is None:
158 # use provided file cache or use fresh one
159 if not cache is None:
162 self.cache = AptListsCache()
165 self.dtags = debtags.DB()
166 self.dtags.read(open('/var/lib/debtags/package-tags'))
168 # init package filter
169 self.pkgfilter = None
171 self._updateReleases()
174 def _updateReleases(self):
177 for p in self.sections():
178 if not self.has_option(p, 'releases'):
181 # for all releases of this package
183 [rel.strip() for rel in self.get(p, 'releases').split(',')]:
185 if not self.releases.has_key(r):
186 self.releases[r] = []
189 component = self.get(p, 'component %s' % r)
191 if not component in self.releases[r]:
192 self.releases[r].append(component)
196 """Generate INI file content for current content.
198 # make adaptor to use str as file-like (needed for ConfigParser.write()
199 class file2str(object):
202 def write(self, val):
213 def save(self, filename):
214 """Write current content to a file.
216 f = open(filename, 'w')
221 def ensureUnique(self, section, option, value):
222 if not self.has_option(section, option):
223 self.set(section, option, value)
225 if not self.get(section, option) == value:
226 raise ValueError, "%s: %s is not unique (%s != %s)" \
228 self.get(section, option), value)
231 def appendUniqueCSV(self, section, option, value):
234 if not self.has_option(section, option):
235 self.set(section, option, value)
237 l = self.get(section, option).split(', ')
239 self.set(section, option, ', '.join(l + [value]))
242 def importRelease(self, rurl, force_update=False):
243 # root URL of the repository
244 baseurl = '/'.join(rurl.split('/')[:-1])
245 # get the release file from the cache
246 release_file = self.cache.get(rurl, update=force_update)
248 # create parser instance
249 rp = deb822.Release(release_file)
251 # architectures on this dist
252 archs = rp['Architectures'].split()
253 components = rp['Components'].split()
254 # compile a new codename that also considers the repository label
255 # to distinguish between official and unofficial repos.
256 codename = '_'.join([rp['Label'], rp['Codename']])
258 # compile the list of Packages files to parse and parse them
261 # compile packages URL
262 pkgsurl = '/'.join([baseurl, c, 'binary-' + a, 'Packages.bz2'])
264 # retrieve from cache
265 packages_file = self.cache.get(pkgsurl,
269 self._parsePkgsFile(packages_file, codename, c, baseurl)
272 packages_file.close()
277 self._updateReleases()
280 def _parsePkgsFile(self, fh, codename, component, baseurl):
286 Codename of the release
288 The archive component this packages file corresponds to.
290 for stanza in deb822.Packages.iter_paragraphs(fh):
291 self._storePkg(stanza, codename, component, baseurl)
294 def _storePkg(self, st, codename, component, baseurl):
301 if not self.has_section(pkg):
302 self.add_section(pkg)
304 # do nothing if package is not in filter if there is any
305 if not self.pkgfilter is None and not pkg in self.pkgfilter:
306 self.ensureUnique(pkg, 'visibility', 'shadowed')
308 self.ensureUnique(pkg, 'visibility', 'featured')
311 self.appendUniqueCSV(pkg, "releases", codename)
314 self.appendUniqueCSV(pkg, "archs %s" % codename, st['Architecture'])
317 self.ensureUnique(pkg,
318 "version %s %s" % (codename, st['Architecture']),
322 self.ensureUnique(pkg,
323 "file %s %s" % (codename, st['Architecture']),
324 '/'.join(baseurl.split('/')[:-2] + [st['Filename']]))
327 self.ensureUnique(pkg, 'component ' + codename, component)
330 self.ensureUnique(pkg, "poolurl %s" % codename,
331 '/'.join(baseurl.split('/')[:-2] \
332 + [os.path.dirname(st['Filename'])]))
335 # now the stuff where a single variant is sufficient and where we go for
336 # the latest available one
337 if self.has_option(pkg, "newest version") \
338 and apt.VersionCompare(st['Version'],
339 self.get(pkg, "newest version")) < 0:
342 # everything from here will overwrite existing ones
344 # we seems to have an updated package
345 self.set(pkg, "newest version", st['Version'])
348 self.set(pkg, "description", st['Description'].replace('%', '%%'))
351 self.set(pkg, "maintainer", st['Maintainer'])
354 if st.has_key('Homepage'):
355 self.set(pkg, 'homepage', st['Homepage'])
358 debtags = self.dtags.tagsOfPackage(pkg)
360 self.set(pkg, 'debtags', ', '.join(debtags))
363 def writeSourcesLists(self, outdir, cfg):
365 createDir(os.path.join(outdir, 'static'))
367 fl = open(os.path.join(outdir, 'sources_lists'), 'w')
368 for trans, r in sorted([(transCodename(k, cfg), k)
369 for k in self.releases.keys()]):
370 # need to turn 'apsy_lenny' back into 'lenny'
371 debneuro_r = r.split('_')[1]
373 f = open(os.path.join(outdir, 'static',
374 'debneuro.%s.sources.list' % debneuro_r),
376 f.write("deb http://apsy.gse.uni-magdeburg.de/debian %s %s\n" \
377 % (debneuro_r, ' '.join(self.releases[r])))
378 f.write("deb-src http://apsy.gse.uni-magdeburg.de/debian %s %s\n" \
379 % (debneuro_r, ' '.join(self.releases[r])))
380 # XXX use :download: role from sphinx 0.6 on
381 #fl.write('* `%s <http://apsy.gse.uni-magdeburg.de/debian/html/_static/debneuro.%s.sources.list>`_\n' \
382 fl.write('* `%s <_static/debneuro.%s.sources.list>`_\n' \
383 % (trans, debneuro_r))
388 def importProspectivePkgsFromTaskFile(self, url):
389 fh = self.cache.get(url)
391 for st in deb822.Packages.iter_paragraphs(fh):
392 # do not stop unless we have a description
393 if not st.has_key('Pkg-Description'):
396 if st.has_key('Depends'):
398 elif st.has_key('Suggests'):
401 print 'Warning: Cannot determine name of prospective package ' \
406 if not self.has_section(pkg):
407 self.add_section(pkg)
409 # prospective ones are always featured
410 self.ensureUnique(pkg, 'visibility', 'featured')
413 self.set(pkg, "description",
414 st['Pkg-Description'].replace('%', '%%'))
417 if st.has_key('Homepage'):
418 self.set(pkg, 'homepage', st['Homepage'])
420 if st.has_key('Pkg-URL'):
421 self.set(pkg, 'external pkg url', st['Pkg-URL'])
423 if st.has_key('WNPP'):
424 self.set(pkg, 'wnpp debian', st['WNPP'])
426 if st.has_key('License'):
427 self.set(pkg, 'license', st['License'])
429 # treat responsible as maintainer
430 if st.has_key('Responsible'):
431 self.set(pkg, "maintainer", st['Responsible'])
434 def setPkgFilterFromTaskFile(self, urls):
438 fh = self.cache.get(task)
441 # loop over all stanzas
442 for stanza in deb822.Packages.iter_paragraphs(fh):
443 if stanza.has_key('Depends'):
444 pkg = stanza['Depends']
445 elif stanza.has_key('Suggests'):
446 pkg = stanza['Suggests']
450 # account for multiple packages per line
452 pkgs += [p.strip() for p in pkg.split(',')]
454 pkgs.append(pkg.strip())
457 self.pkgfilter = pkgs
460 def genPkgPage(db, pkg, cfg):
467 descr = db.get(pkg, 'description').split('\n')
471 # only put index markup for featured packages
472 if db.get(pkg, 'visibility') == 'featured':
473 s = '.. index:: %s, ' % pkg
476 if db.has_option(pkg, 'debtags'):
478 tags = [t for t in db.get(pkg, 'debtags').split(', ')
479 if t.split('::')[0] in ['field', 'works-with']]
481 s += '.. index:: %s\n\n' % ', '.join(tags)
483 # main ref target for this package
484 s += '.. _deb_' + pkg + ':\n'
486 # separate header from the rest
489 header = '%s -- %s' % (pkg, descr[0])
490 s += '*' * (len(header) + 2)
491 s += '\n ' + header + '\n'
492 s += '*' * (len(header) + 2) + '\n\n'
495 s += '\n'.join([l.lstrip(' .') for l in descr[1:]])
498 if db.has_option(pkg, 'homepage'):
499 s += '\n**Homepage**: %s\n' % db.get(pkg, 'homepage')
501 s += '\nBinary packages'\
502 '\n===============\n'
504 s += genMaintainerSection(db, pkg)
506 if db.has_option(pkg, 'wnpp debian'):
508 A Debian packaging effort has been officially announced. Please see the
509 corresponding `intent-to-package bug report`_ for more information about
512 .. _intent-to-package bug report: http://bugs.debian.org/%s
514 """ % db.get(pkg, 'wnpp debian')
516 s += genBinaryPackageSummary(db, pkg, 'DebNeuro repository', cfg)
518 # if db.has_option(pkg, 'external pkg url'):
519 # s += 'Other unofficial ressources\n' \
520 # '---------------------------\n\n'
521 # s += 'An unofficial package is available from %s\ .\n\n' \
522 # % db.get(pkg, 'external pkg url')
526 def genMaintainerSection(db, pkg):
529 if not db.has_option(pkg, 'maintainer'):
532 Currently, nobody seems to be responsible for creating or maintaining
533 Debian packages of this software.
538 # there is someone responsible
539 maintainer = db.get(pkg, 'maintainer')
541 # do we have actual packages, or is it just a note
542 if not db.has_option(pkg, 'releases'):
545 There are currently no binary packages available. However, the last known
546 packaging effort was started by %s which meanwhile might have led to an
547 initial unofficial Debian packaging.
552 s += '\n**Maintainer**: %s\n\n' % maintainer
554 if not maintainer.startswith('Michael Hanke'):
558 Do not contact the original package maintainer regarding
559 bugs in this unofficial binary package. Instead, contact
560 the repository maintainer at michael.hanke@gmail.com\ .
567 def genBinaryPackageSummary(db, pkg, reposname, cfg):
568 # do nothing if the are no packages
569 if not db.has_option(pkg, 'releases'):
572 s = '\n%s\n%s\n' % (reposname, '-' * len(reposname))
575 The repository contains binary packages for the following distribution
576 releases and system architectures. The corresponding source packages
577 are available too. Please click on the release name to access them.
580 Do not download this package manually if you plan to use it
581 regularly. Instead configure your package manager to use this
582 repository by following the instructions on the
583 :ref:`front page <repository_howto>`.
587 # for all releases this package is part of
588 for rel in db.get(pkg, 'releases').split(', '):
589 # write release description and component
590 s += '\n`%s <%s>`_:\n ' \
591 % (transCodename(rel, cfg),
592 db.get(pkg, 'poolurl %s' % rel))
594 s += '[%s] ' % db.get(pkg, 'component ' + rel)
596 # archs this package is available for
597 archs = db.get(pkg, 'archs ' + rel).split(', ')
599 # extract all present versions for any arch
600 versions = [db.get(pkg, 'version %s %s' % (rel, arch))
603 # if there is only a single version for all of them, simplify the list
604 single_ver = versions.count(versions[0]) == len(versions)
607 # only one version string for all
608 s += ', '.join(['`%s <%s>`_' \
609 % (arch, db.get(pkg, 'file %s %s' % (rel, arch)))
611 s += ' (%s)' % versions[0]
613 # a separate version string for each arch
614 s += ', '.join(['`%s <%s>`_ (%s)' \
616 db.get(pkg, 'file %s %s' % (rel, arch)),
617 db.get(pkg, 'version %s %s' % (rel, arch)))
624 def maintainer2email(maint):
625 return maint.split('<')[1].rstrip('>')
628 def writePkgsBy(db, key, value2id, outdir, heading):
630 nwkey = key.replace(' ', '')
631 createDir(os.path.join(outdir, 'by%s' % nwkey))
635 # get packages by maintainer
636 for p in db.sections():
637 # only featured packages
638 if db.get(p, 'visibility') == 'shadowed':
641 if db.has_option(p, key):
644 if not collector.has_key(by):
645 collector[by] = (value2id(by), [p])
647 collector[by][1].append(p)
649 toc = open(os.path.join(outdir, 'by%s.rst' % nwkey), 'w')
650 toc.write('.. index:: Packages by %s\n.. _by%s:\n\n' % (key, key))
652 toc_heading = 'Packages by %s' % key
653 toc.write('%s\n%s\n\n' % (toc_heading, '=' * len(toc_heading)))
654 toc.write('.. toctree::\n :maxdepth: 1\n\n')
656 # summary page per maintainer
657 for by in sorted(collector.keys()):
658 toc.write(' by%s/%s\n' % (nwkey, collector[by][0]))
660 fh = open(os.path.join(outdir,
662 collector[by][0] + '.rst'), 'w')
664 fh.write('.. index:: %s\n.. _%s:\n\n' % (by, by))
666 hdr = heading.replace('<ITEM>', by)
668 fh.write('=' * len(hdr) + '\n\n')
670 # write sorted list of packages
671 for p in sorted(collector[by][1]):
672 fh.write('* :ref:`deb_%s`\n' % p)
679 def writeRst(db, outdir, cfg, addenum_dir=None):
681 createDir(os.path.join(outdir, 'pkgs'))
684 toc = open(os.path.join(outdir, 'pkgs.rst'), 'w')
686 toc.write('.. _full_pkg_list:\n\n')
687 toc.write('Archive content\n===============\n\n'
688 '.. toctree::\n :maxdepth: 1\n\n')
690 for p in sorted(db.sections()):
691 print "Generating page for '%s'" % p
692 pf = open(os.path.join(outdir, 'pkgs', '%s.rst' % p), 'w')
693 pf.write(genPkgPage(db, p, cfg))
695 # check for doc addons
696 if addenum_dir is not None:
697 addenum = os.path.join(os.path.abspath(addenum_dir), '%s.rst' % p)
698 if os.path.exists(addenum):
699 pf.write('\n\n.. include:: %s\n' % addenum)
701 toc.write(' pkgs/%s\n' % p)
707 def prepOptParser(op):
708 # use module docstring for help output
709 op.usage = "%s [OPTIONS]\n\n" % sys.argv[0] + __doc__
711 op.add_option("--db",
712 action="store", type="string", dest="db",
714 help="Database file to read. Default: None")
716 op.add_option("--cfg",
717 action="store", type="string", dest="cfg",
719 help="Repository config file.")
721 op.add_option("-o", "--outdir",
722 action="store", type="string", dest="outdir",
724 help="Target directory for ReST output. Default: None")
726 op.add_option("-r", "--release-url",
727 action="append", dest="release_urls",
730 op.add_option("--pkgaddenum", action="store", dest="addenum_dir",
731 type="string", default=None, help="None")
736 op = OptionParser(version="%prog 0.0.1")
739 (opts, args) = op.parse_args()
742 print('There needs to be exactly one command')
748 print("'--cfg' option is mandatory.")
752 cfg = SafeConfigParser()
755 # load existing db, unless renew is requested
756 if cmd == 'refreshdb':
757 dpa = DebianPkgArchive()
759 dpa = DebianPkgArchive(init_db=opts.db)
762 if cmd == 'generate':
763 if opts.outdir is None:
764 print('Not output directory specified!')
767 dpa.writeSourcesLists(opts.outdir, cfg)
768 writeRst(dpa, opts.outdir, cfg, opts.addenum_dir)
769 writePkgsBy(dpa, 'maintainer', maintainer2email, opts.outdir,
770 'Packages maintained by <ITEM>')
775 if cfg.has_option('packages', 'select taskfiles'):
776 dpa.setPkgFilterFromTaskFile(cfg.get('packages',
777 'select taskfiles').split())
779 if cfg.has_option('packages', 'select names'):
780 dpa.pkgfilter += cfg.get('packages', 'select names').split()
782 if cfg.has_option('packages', 'prospective'):
783 for p in cfg.get('packages', 'prospective').split():
784 dpa.importProspectivePkgsFromTaskFile(p)
786 if cfg.has_option('repositories', 'releases'):
787 for rurl in cfg.get('repositories', 'releases').split():
788 dpa.importRelease(rurl, force_update=False)
790 if not opts.db is None:
794 if __name__ == "__main__":