3 from debian_bundle import deb822
4 from debian_bundle import debtags
5 from ConfigParser import SafeConfigParser
11 target_dir = 'build/source'
14 'apsy_etch': 'Debian GNU/Linux 4.0 (etch)',
15 'apsy_lenny': 'Debian GNU/Linux 5.0 (lenny)',
16 'apsy_squeeze': 'Debian testing (squeeze)',
17 'apsy_sid': 'Debian unstable (sid)',
18 'apsy_dapper': 'Ubuntu 6.06 LTS "Dapper Drake" (dapper)',
19 'apsy_edgy': 'Ubuntu 6.10 "Edgy Eft" (edgy)',
20 'apsy_feisty': 'Ubuntu 7.04 "Feisty Fawn" (feisty)',
21 'apsy_gutsy': 'Ubuntu 7.10 "Gutsy Gibbon" (gutsy)',
22 'apsy_hardy': 'Ubuntu 8.04 LTS "Hardy Heron" (hardy)',
23 'apsy_intrepid': 'Ubuntu 8.10 "Intrepid Ibex" (intrepid)',
24 'apsy_jaunty': 'Ubuntu 9.04 "Jaunty Jackalope" (jaunty)',
27 def transCodename(codename):
28 """Translate a known codename into a release description.
30 Unknown codenames will simply be returned as is.
32 if codename in codename2descr.keys():
33 return codename2descr[codename]
39 class AptListsCache(object):
40 def __init__(self, cachedir='cache', ro_cachedirs=None):
41 self.cachedir = cachedir
43 if not ro_cachedirs is None:
44 self.ro_cachedirs = ro_cachedirs
46 self.ro_cachedirs = []
48 # always use system cache
49 self.ro_cachedirs.append('/var/lib/apt/lists/')
52 def get(self, url, update=False):
53 """Looks in the cache if the file is there and takes the cached one.
54 Otherwise it is downloaded first.
56 Knows how to deal with http:// and svn:// URLs.
61 # look whether it is compressed
62 cext = url.split('.')[-1]
63 if cext in ['gz', 'bz2']:
64 target_url = url[:-1 * len(cext) -1]
66 # assume not compressed
70 # turn url into a filename -- mimik what APT does for
72 tfilename = '_'.join(target_url.split('/')[2:])
74 # if we need to download anyway do not search
76 cfilename = os.path.join(self.cachedir, tfilename)
78 # look for the uncompressed file anywhere in the cache
80 for cp in [self.cachedir] + self.ro_cachedirs:
81 if os.path.exists(os.path.join(cp, tfilename)):
82 cfilename = os.path.join(cp, tfilename)
87 cfilename = os.path.join(self.cachedir, tfilename)
90 # if updated needed -- download
92 print 'Caching file from %s' % url
94 if url.startswith('svn://'):
96 pysvn.Client().export(url, cfilename)
97 if url.startswith('http://'):
99 tempfile, ignored = urllib.urlretrieve(url)
104 decompressor = 'gzip'
106 decompressor = 'bzip2'
111 "Don't know how to decompress %s files" \
114 if not decompressor is None:
115 if subprocess.call([decompressor, '-d', '-q', '-f',
117 raise RuntimeError, \
118 "Something went wrong while decompressing '%s'" \
121 # move decompressed file into cache
122 shutil.move(os.path.splitext(tempfile)[0], cfilename)
124 # XXX do we need that if explicit filename is provided?
128 fh = open(cfilename, 'r')
135 class DebianPkgArchive(SafeConfigParser):
138 def __init__(self, cache=None):
142 SafeConfigParser.__init__(self)
144 # release codnames found in the repos
147 # use provided file cache or use fresh one
148 if not cache is None:
151 self.cache = AptListsCache()
154 self.dtags = debtags.DB()
155 self.dtags.read(open('/var/lib/debtags/package-tags'))
157 # init package filter
158 self.pkgfilter = None
162 """Generate INI file content for current content.
164 # make adaptor to use str as file-like (needed for ConfigParser.write()
165 class file2str(object):
168 def write(self, val):
179 def save(self, filename):
180 """Write current content to a file.
182 f = open(filename, 'w')
187 def ensureUnique(self, section, option, value):
188 if not self.has_option(section, option):
189 self.set(section, option, value)
191 if not self.get(section, option) == value:
192 raise ValueError, "%s: %s is not unique (%s != %s)" \
194 self.get(section, option), value)
197 def appendUniqueCSV(self, section, option, value):
200 if not self.has_option(section, option):
201 self.set(section, option, value)
203 l = self.get(section, option).split(', ')
205 self.set(section, option, ', '.join(l + [value]))
208 def importRelease(self, rurl, force_update=False):
209 # root URL of the repository
210 baseurl = '/'.join(rurl.split('/')[:-1])
211 # get the release file from the cache
212 release_file = self.cache.get(rurl, update=force_update)
214 # create parser instance
215 rp = deb822.Release(release_file)
217 # architectures on this dist
218 archs = rp['Architectures'].split()
219 components = rp['Components'].split()
220 # compile a new codename that also considers the repository label
221 # to distinguish between official and unofficial repos.
222 codename = '_'.join([rp['Label'], rp['Codename']])
224 # store the release itself
225 if not codename in self.releases.keys():
226 self.releases[codename] = components
228 # compile the list of Packages files to parse and parse them
231 # compile packages URL
232 pkgsurl = '/'.join([baseurl, c, 'binary-' + a, 'Packages.bz2'])
234 # retrieve from cache
235 packages_file = self.cache.get(pkgsurl,
239 self._parsePkgsFile(packages_file, codename, c, baseurl)
242 packages_file.close()
248 def _parsePkgsFile(self, fh, codename, component, baseurl):
254 Codename of the release
256 The archive component this packages file corresponds to.
258 for stanza in deb822.Packages.iter_paragraphs(fh):
259 self._storePkg(stanza, codename, component, baseurl)
262 def _storePkg(self, st, codename, component, baseurl):
269 # do nothing if package is not in filter if there is any
270 if not self.pkgfilter is None and not pkg in self.pkgfilter:
273 if not self.has_section(pkg):
274 self.add_section(pkg)
277 self.appendUniqueCSV(pkg, "releases", codename)
280 self.appendUniqueCSV(pkg, "archs %s" % codename, st['Architecture'])
283 self.ensureUnique(pkg,
284 "version %s %s" % (codename, st['Architecture']),
288 self.ensureUnique(pkg,
289 "file %s %s" % (codename, st['Architecture']),
290 '/'.join(baseurl.split('/')[:-2] + [st['Filename']]))
293 self.ensureUnique(pkg, 'component ' + codename, component)
296 self.ensureUnique(pkg, "poolurl %s" % codename,
297 '/'.join(baseurl.split('/')[:-2] \
298 + [os.path.dirname(st['Filename'])]))
301 # now the stuff where a single variant is sufficient and where we go for
302 # the latest available one
303 if self.has_option(pkg, "newest version") \
304 and apt.VersionCompare(st['Version'],
305 self.get(pkg, "newest version")) < 0:
308 # everything from here will overwrite existing ones
310 # we seems to have an updated package
311 self.set(pkg, "newest version", st['Version'])
314 self.set(pkg, "description", st['Description'].replace('%', '%%'))
317 self.set(pkg, "maintainer", st['Maintainer'])
320 if st.has_key('Homepage'):
321 self.set(pkg, 'homepage', st['Homepage'])
324 debtags = self.dtags.tagsOfPackage(pkg)
326 self.set(pkg, 'debtags', ', '.join(debtags))
329 def writeSourcesLists(self):
330 fl = open(os.path.join(target_dir, 'sources_lists'), 'w')
331 for trans, r in sorted([(transCodename(k), k)
332 for k in self.releases.keys()]):
333 f = open(os.path.join(target_dir,
334 "static/debneuro.%s.sources.list" % r),
336 f.write("deb http://apsy.gse.uni-magdeburg.de/debian %s %s\n" \
337 % (r, ' '.join(self.releases[r])))
338 f.write("deb-src http://apsy.gse.uni-magdeburg.de/debian %s %s\n" \
339 % (r, ' '.join(self.releases[r])))
340 # XXX use :download: role from sphinx 0.6 on
341 fl.write('* `%s <http://apsy.gse.uni-magdeburg.de/debian/html/_static/debneuro.%s.sources.list>`_\n' \
347 def importProspectivePkgsFromTaskFile(self, url):
348 fh = dpa.cache.get(url)
350 for st in deb822.Packages.iter_paragraphs(fh):
351 # do not stop unless we have a description
352 if not st.has_key('Pkg-Description'):
355 if st.has_key('Depends'):
357 elif st.has_key('Suggests'):
360 print 'Warning: Cannot determine name of prospective package ' \
365 if not self.has_section(pkg):
366 self.add_section(pkg)
369 self.set(pkg, "description",
370 st['Pkg-Description'].replace('%', '%%'))
373 if st.has_key('Homepage'):
374 self.set(pkg, 'homepage', st['Homepage'])
376 if st.has_key('Pkg-URL'):
377 self.set(pkg, 'external pkg url', st['Pkg-URL'])
379 if st.has_key('WNPP'):
380 self.set(pkg, 'wnpp debian', st['WNPP'])
382 if st.has_key('License'):
383 self.set(pkg, 'license', st['License'])
385 # treat responsible as maintainer
386 if st.has_key('Responsible'):
387 self.set(pkg, "maintainer", st['Responsible'])
390 def setPkgFilterFromTaskFile(self, urls):
391 for task in taskfiles:
392 fh = dpa.cache.get(task)
396 # loop over all stanzas
397 for stanza in deb822.Packages.iter_paragraphs(fh):
398 if stanza.has_key('Depends'):
399 pkg = stanza['Depends']
400 elif stanza.has_key('Suggests'):
401 pkg = stanza['Suggests']
405 # account for multiple packages per line
407 pkgs += [p.strip() for p in pkg.split(',')]
409 pkgs.append(pkg.strip())
412 self.pkgfilter = pkgs
415 def genPkgPage(db, pkg):
422 descr = db.get(pkg, 'description').split('\n')
424 s = '.. index:: %s, ' % pkg
425 # s += db.get(pkg, 'maintainer').split('<')[0]
429 if db.has_option(pkg, 'debtags'):
431 tags = [t for t in db.get(pkg, 'debtags').split(', ')
432 if t.split('::')[0] in ['field', 'works-with']]
434 s += '.. index:: %s\n\n' % ', '.join(tags)
436 # main ref target for this package
437 s += '.. _deb_' + pkg + ':\n'
439 # separate header from the rest
442 header = '%s -- %s' % (pkg, descr[0])
443 s += '*' * (len(header) + 2)
444 s += '\n ' + header + '\n'
445 s += '*' * (len(header) + 2) + '\n\n'
448 s += '\n'.join([l.lstrip(' .') for l in descr[1:]])
451 if db.has_option(pkg, 'homepage'):
452 s += '\n**Homepage**: %s\n' % db.get(pkg, 'homepage')
454 s += '\nBinary packages'\
455 '\n===============\n'
457 s += genMaintainerSection(db, pkg)
459 if db.has_option(pkg, 'wnpp debian'):
460 s += 'A Debian packaging effort has been officially announced. ' \
461 'Please see the corresponding `intent-to-package bug report`_ ' \
462 'for more information about its current status.\n\n' \
463 '.. _intent-to-package bug report: http://bugs.debian.org/%s\n\n' \
464 % db.get(pkg, 'wnpp debian')
466 s += genBinaryPackageSummary(db, pkg, 'DebNeuro repository')
468 # if db.has_option(pkg, 'external pkg url'):
469 # s += 'Other unofficial ressources\n' \
470 # '---------------------------\n\n'
471 # s += 'An unofficial package is available from %s\ .\n\n' \
472 # % db.get(pkg, 'external pkg url')
476 def genMaintainerSection(db, pkg):
479 if not db.has_option(pkg, 'maintainer'):
480 s += '\nCurrently, nobody seems to be responsible for creating or ' \
481 'maintaining Debian packages of this software.\n\n'
484 # there is someone responsible
485 maintainer = db.get(pkg, 'maintainer')
487 # do we have actual packages, or is it just a note
488 if not db.has_option(pkg, 'releases'):
489 s += '\nThere are currently no binary packages available. However, ' \
490 'the last known packaging effort was started by %s which ' \
491 'meanwhile might have led to an initial unofficial Debian ' \
492 'packaging.\n\n' % maintainer
495 s += '\n**Maintainer**: %s\n\n' % maintainer
497 if not maintainer.startswith('Michael Hanke'):
499 s += ' Do not contact the original package maintainer regarding ' \
500 ' bugs in this unofficial binary package. Instead, contact ' \
501 ' the repository maintainer at michael.hanke@gmail.com\ .'
506 def genBinaryPackageSummary(db, pkg, reposname):
507 # do nothing if the are no packages
508 if not db.has_option(pkg, 'releases'):
511 s = '\n%s\n%s\n' % (reposname, '-' * len(reposname))
513 s += 'The repository contains binary packages for the following ' \
514 'distribution releases and system architectures. Note, that the ' \
515 'corresponding source packages are of course available too. Please ' \
516 'click on the release name to access them.\n\n'
518 # for all releases this package is part of
519 for rel in db.get(pkg, 'releases').split(', '):
520 # write release description and component
521 s += '\n`%s <%s>`_:\n ' \
522 % (transCodename(rel),
523 db.get(pkg, 'poolurl %s' % rel))
525 s += '[%s] ' % db.get(pkg, 'component ' + rel)
527 # archs this package is available for
528 archs = db.get(pkg, 'archs ' + rel).split(', ')
530 # extract all present versions for any arch
531 versions = [db.get(pkg, 'version %s %s' % (rel, arch))
534 # if there is only a single version for all of them, simplify the list
535 single_ver = versions.count(versions[0]) == len(versions)
538 # only one version string for all
539 s += ', '.join(['`%s <%s>`_' \
540 % (arch, db.get(pkg, 'file %s %s' % (rel, arch)))
542 s += ' (%s)' % versions[0]
544 # a separate version string for each arch
545 s += ', '.join(['`%s <%s>`_ (%s)' \
547 db.get(pkg, 'file %s %s' % (rel, arch)),
548 db.get(pkg, 'version %s %s' % (rel, arch)))
555 def maintainer2email(maint):
556 return maint.split('<')[1].rstrip('>')
559 def writePkgsBy(db, key, value2id):
562 # get packages by maintainer
563 for p in db.sections():
564 if db.has_option(p, key):
567 if not collector.has_key(by):
568 collector[by] = (value2id(by), [p])
570 collector[by][1].append(p)
572 toc = open(os.path.join(target_dir, 'by%s.rst' % key), 'w')
573 toc.write('.. index:: Packages by %s\n.. _by%s:\n' % (key, key))
575 heading = 'Packages by %s' % key
576 toc.write('%s\n%s\n\n' % (heading, '=' * len(heading)))
577 toc.write('.. toctree::\n :maxdepth: 1\n\n')
579 # summary page per maintainer
580 for by in sorted(collector.keys()):
581 toc.write(' by%s/%s\n' % (key, collector[by][0]))
588 toc = open(os.path.join(target_dir, 'pkgs.rst'), 'w')
590 toc.write('Archive content\n===============\n\n'
591 '.. toctree::\n :maxdepth: 1\n\n')
593 for p in sorted(db.sections()):
594 print "Generating page for '%s'" % p
595 pf = open(os.path.join(target_dir, 'pkgs/%s.rst' % p), 'w')
596 pf.write(genPkgPage(db, p))
598 # check for doc addons
599 if os.path.exists(os.path.join(target_dir, 'pkgs_addenum/%s.rst' % p)):
600 pf.write('\n\n.. include:: ../pkgs_addenum/%s.rst\n' %p)
602 toc.write(' pkgs/%s\n' % p)
609 dpa = DebianPkgArchive()
613 'http://apsy.gse.uni-magdeburg.de/debian/dists/dapper/Release',
614 'http://apsy.gse.uni-magdeburg.de/debian/dists/gutsy/Release',
615 'http://apsy.gse.uni-magdeburg.de/debian/dists/hardy/Release',
616 'http://apsy.gse.uni-magdeburg.de/debian/dists/intrepid/Release',
617 'http://apsy.gse.uni-magdeburg.de/debian/dists/etch/Release',
618 'http://apsy.gse.uni-magdeburg.de/debian/dists/lenny/Release',
619 'http://apsy.gse.uni-magdeburg.de/debian/dists/squeeze/Release',
620 'http://apsy.gse.uni-magdeburg.de/debian/dists/sid/Release',
624 'svn://svn.debian.org/blends/projects/med/trunk/debian-med/tasks/imaging',
625 'svn://svn.debian.org/blends/projects/med/trunk/debian-med/tasks/imaging-dev',
626 'svn://svn.debian.org/blends/projects/science/trunk/debian-science/tasks/neuroscience-cognitive',
629 dpa.setPkgFilterFromTaskFile(taskfiles)
630 dpa.pkgfilter += ['fsl-doc', 'fslview-doc', 'fsl-atlases', 'fsl-possum-data',
631 'fsl-first-data', 'fsl-feeds']
633 dpa.importProspectivePkgsFromTaskFile(taskfiles[0])
635 for rurl in release_urls:
636 dpa.importRelease(rurl, force_update=False)
640 dpa.writeSourcesLists()
643 writePkgsBy(dpa, 'maintainer', maintainer2email)