]> git.donarmstrong.com Git - neurodebian.git/commitdiff
Big rewrite using DDE and jinja insead of homebrew.
authorMichael Hanke <michael.hanke@gmail.com>
Wed, 19 Aug 2009 14:32:53 +0000 (10:32 -0400)
committerMichael Hanke <michael.hanke@gmail.com>
Wed, 19 Aug 2009 14:32:53 +0000 (10:32 -0400)
13 files changed:
Makefile
debneuro.cfg [deleted file]
neurodebian.cfg [new file with mode: 0644]
neurodebian/__init__.py [new file with mode: 0644]
neurodebian/dde.py [new file with mode: 0644]
neurodebian/templates/pkg.rst [new file with mode: 0644]
neurodebian/templates/pkgs_toc.rst [new file with mode: 0644]
neurodebian/templates/sources_lists.rst [new file with mode: 0644]
pkgs/fsl.rst
sphinx/_static/neurodebian.css
sphinx/_templates/layout.html
sphinx/conf.py
sphinx/index.rst

index 2753e678b75c82887aa6ccd36344fff507aefb78..0220a20dd7c235bcc2df3ead1962698c126c6645 100644 (file)
--- a/Makefile
+++ b/Makefile
@@ -1,11 +1,9 @@
-# old location
-WWW_UPLOAD_URI = elrond:/home/hanke/public_html/archive
-# brand new fancy one
-#WWW_UPLOAD_URI = neuro.debian.net:/home/www/neuro.debian.net/www
+WWW_UPLOAD_URI = neuro.debian.net:/home/www/neuro.debian.net/www
 WWW_DIR = build/html
 
 all: html
 
+
 prep:
        if [ ! -d build ]; then mkdir build; fi
        rsync -rvlhp sphinx/ build/src
@@ -22,32 +20,40 @@ html-stamp: pics prep source
 
 
 clean:
-       -rm -rf build
        -rm html-stamp source-stamp
        $(MAKE) -C artwork clean
 
 
-
 distclean: clean
+       -rm -rf build
        -rm -rf cache
 
 
 source: source-stamp
 source-stamp: build/db.db
-       tools/reblender generate \
-               --cfg debneuro.cfg \
+       PYTHONPATH=. python neurodebian/dde.py \
+               --cfg neurodebian.cfg \
                --db build/db.db \
                --outdir build/src \
-               --pkgaddenum pkgs
+               --pkgaddenum pkgs \
+               commandisirrelevant
        rm -f html-stamp
        touch $@
 
 
+removedb:
+       -rm build/db.db
+
+
+updatedb: distclean build/db.db
+
+
 build/db.db:
        mkdir -p build
-       tools/reblender refreshdb \
-               --cfg debneuro.cfg \
-               --db build/db.db
+       PYTHONPATH=. python neurodebian/dde.py \
+               --cfg neurodebian.cfg \
+               --db build/db.db \
+               updatedb
 
 
 upload-website: html
diff --git a/debneuro.cfg b/debneuro.cfg
deleted file mode 100644 (file)
index f90b949..0000000
+++ /dev/null
@@ -1,55 +0,0 @@
-[packages]
-# Packages listed in the following taskfiles will be featured on the website
-# _if_ they are also present in the repository
-select taskfiles =
- svn://svn.debian.org/blends/projects/med/trunk/debian-med/tasks/imaging
- svn://svn.debian.org/blends/projects/med/trunk/debian-med/tasks/imaging-dev
- svn://svn.debian.org/blends/projects/science/trunk/debian-science/tasks/neuroscience-cognitive
-
-# Additional selection filter (similar to 'select taskfiles'), only listing
-# package names
-select names = fsl-doc fslview-doc fsl-atlases fsl-possum-data fsl-first-data
- fsl-feeds libnifti1
-
-# Information about prospective packages to be imported from taskfiles
-prospective =
- svn://svn.debian.org/blends/projects/science/trunk/debian-science/tasks/neuroscience-cognitive
-
-
-[repositories]
-# Release files of all repositories to be contained in the website
-releases =
- http://apsy.gse.uni-magdeburg.de/debian/dists/dapper/Release
- http://apsy.gse.uni-magdeburg.de/debian/dists/gutsy/Release
- http://apsy.gse.uni-magdeburg.de/debian/dists/hardy/Release
- http://apsy.gse.uni-magdeburg.de/debian/dists/intrepid/Release
- http://apsy.gse.uni-magdeburg.de/debian/dists/jaunty/Release
- http://apsy.gse.uni-magdeburg.de/debian/dists/etch/Release
- http://apsy.gse.uni-magdeburg.de/debian/dists/lenny/Release
- http://apsy.gse.uni-magdeburg.de/debian/dists/squeeze/Release
- http://apsy.gse.uni-magdeburg.de/debian/dists/sid/Release
- http://apsy.gse.uni-magdeburg.de/debian/dists/sid/Release
-
-[officials]
-releases =
-  http://ftp.debian.org/debian/dists/stable/Release
-  http://ftp.debian.org/debian/dists/testing/Release
-  http://ftp.debian.org/debian/dists/unstable/Release
-
-[repository labels]
-apsy = NeuroDebian repository
-Debian = Offical Debian archive
-Ubuntu = Ubuntu
-
-[release codenames]
-etch = Debian GNU/Linux 4.0 (etch)
-lenny = Debian GNU/Linux 5.0 (lenny)
-squeeze = Debian testing (squeeze)
-sid = Debian unstable (sid)
-dapper = Ubuntu 6.06 LTS "Dapper Drake" (dapper)
-edgy = Ubuntu 6.10 "Edgy Eft" (edgy)
-feisty = Ubuntu 7.04 "Feisty Fawn" (feisty)
-gutsy = Ubuntu 7.10 "Gutsy Gibbon" (gutsy)
-hardy = Ubuntu 8.04 LTS "Hardy Heron" (hardy)
-intrepid = Ubuntu 8.10 "Intrepid Ibex" (intrepid)
-jaunty = Ubuntu 9.04 "Jaunty Jackalope" (jaunty)
diff --git a/neurodebian.cfg b/neurodebian.cfg
new file mode 100644 (file)
index 0000000..199174d
--- /dev/null
@@ -0,0 +1,51 @@
+[dde]
+dists = debian-lenny debian-squeeze debian-sid ubuntu-jaunty
+pkgquery_url = http://dde.debian.net/dde/q/udd/packages
+
+[packages]
+# Packages listed in the following taskfiles will be featured on the website
+# _if_ they are also present in the repository
+select taskfiles =
+ svn://svn.debian.org/blends/projects/science/trunk/debian-science/tasks/neuroscience-cognitive
+
+# Additional selection filter (similar to 'select taskfiles'), only listing
+# package names
+select names = fsl-doc fslview-doc fsl-atlases fsl-possum-data fsl-first-data
+ fsl-feeds libnifti1
+
+# Information about prospective packages to be imported from taskfiles
+prospective =
+ svn://svn.debian.org/blends/projects/science/trunk/debian-science/tasks/neuroscience-cognitive
+ svn://svn.debian.org/blends/projects/med/trunk/debian-med/tasks/imaging
+# svn://svn.debian.org/blends/projects/med/trunk/debian-med/tasks/imaging-dev
+
+[mirrors]
+us = http://neuro.debian.net/debian
+de = http://apsy.gse.uni-magdeburg.de/debian
+
+[neurodebian]
+# Release files of all repositories to be contained in the website
+releases =
+ http://neuro.debian.net/debian/dists/dapper/Release
+ http://neuro.debian.net/debian/dists/gutsy/Release
+ http://neuro.debian.net/debian/dists/hardy/Release
+ http://neuro.debian.net/debian/dists/intrepid/Release
+ http://neuro.debian.net/debian/dists/jaunty/Release
+ http://neuro.debian.net/debian/dists/etch/Release
+ http://neuro.debian.net/debian/dists/lenny/Release
+ http://neuro.debian.net/debian/dists/squeeze/Release
+ http://neuro.debian.net/debian/dists/sid/Release
+ http://neuro.debian.net/debian/dists/sid/Release
+
+[release codenames]
+etch = Debian GNU/Linux 4.0 (etch)
+lenny = Debian GNU/Linux 5.0 (lenny)
+squeeze = Debian testing (squeeze)
+sid = Debian unstable (sid)
+dapper = Ubuntu 6.06 LTS "Dapper Drake" (dapper)
+edgy = Ubuntu 6.10 "Edgy Eft" (edgy)
+feisty = Ubuntu 7.04 "Feisty Fawn" (feisty)
+gutsy = Ubuntu 7.10 "Gutsy Gibbon" (gutsy)
+hardy = Ubuntu 8.04 LTS "Hardy Heron" (hardy)
+intrepid = Ubuntu 8.10 "Intrepid Ibex" (intrepid)
+jaunty = Ubuntu 9.04 "Jaunty Jackalope" (jaunty)
diff --git a/neurodebian/__init__.py b/neurodebian/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/neurodebian/dde.py b/neurodebian/dde.py
new file mode 100644 (file)
index 0000000..746fee1
--- /dev/null
@@ -0,0 +1,550 @@
+#!/usr/bin/env python
+"""Tell me who you are!
+"""
+
+import pysvn
+import json
+from debian_bundle import deb822
+from ConfigParser import SafeConfigParser
+from optparse import OptionParser, Option, OptionGroup, OptionConflictError
+import sys
+import os
+import shutil
+import urllib2
+import urllib
+import subprocess
+# templating
+from jinja2 import Environment, PackageLoader
+
+from pprint import PrettyPrinter
+
+
+class AptListsCache(object):
+    def __init__(self, cachedir='build/cache',
+                 ro_cachedirs=None,
+                 init_db=None):
+        self.cachedir = cachedir
+
+        if not ro_cachedirs is None:
+            self.ro_cachedirs = ro_cachedirs
+        else:
+            self.ro_cachedirs = []
+
+        # create cachedir
+        create_dir(self.cachedir)
+
+    def get(self, url, update=False):
+        """Looks in the cache if the file is there and takes the cached one.
+        Otherwise it is downloaded first.
+
+        Knows how to deal with http:// and svn:// URLs.
+
+        :Return:
+          file handler
+        """
+        # look whether it is compressed
+        cext = url.split('.')[-1]
+        if cext in ['gz', 'bz2']:
+            target_url = url[:-1 * len(cext) -1]
+        else:
+            # assume not compressed
+            target_url = url
+            cext = None
+
+        # turn url into a filename -- mimik what APT does for
+        # /var/lib/apt/lists/
+        tfilename = '_'.join(target_url.split('/')[2:])
+
+        # if we need to download anyway do not search
+        if update:
+            cfilename = os.path.join(self.cachedir, tfilename)
+        else:
+            # look for the uncompressed file anywhere in the cache
+            cfilename = None
+            for cp in [self.cachedir] + self.ro_cachedirs:
+                if os.path.exists(os.path.join(cp, tfilename)):
+                    cfilename = os.path.join(cp, tfilename)
+
+        # nothing found?
+        if cfilename is None:
+            # add cache item
+            cfilename = os.path.join(self.cachedir, tfilename)
+            update = True
+
+        # if updated needed -- download
+        if update:
+            #print 'Caching file from %s' % url
+
+            if url.startswith('svn://'):
+                # export from SVN
+                pysvn.Client().export(url, cfilename)
+            if url.startswith('http://'):
+                # download
+                tempfile, ignored = urllib.urlretrieve(url)
+
+                # decompress
+                decompressor = None
+                if cext == 'gz':
+                    decompressor = 'gzip'
+                elif cext == 'bz2':
+                    decompressor = 'bzip2'
+                elif cext == None:
+                    decompressor = None
+                else:
+                    raise ValueError, \
+                          "Don't know how to decompress %s files" \
+                          % cext
+
+                if not decompressor is None:
+                    if subprocess.call([decompressor, '-d', '-q', '-f',
+                                       tempfile]) == 1:
+                        raise RuntimeError, \
+                              "Something went wrong while decompressing '%s'" \
+                              % tempfile
+
+                # move decompressed file into cache
+                shutil.move(os.path.splitext(tempfile)[0], cfilename)
+
+                # XXX do we need that if explicit filename is provided?
+                urllib.urlcleanup()
+
+        # open cached file
+        fh = open(cfilename, 'r')
+
+        return fh
+
+
+def add_pkgfromtaskfile(db, urls):
+    cache = AptListsCache()
+    pkgs = []
+
+    for task in urls:
+        fh = cache.get(task)
+
+        # loop over all stanzas
+        for stanza in deb822.Packages.iter_paragraphs(fh):
+            if stanza.has_key('Depends'):
+                pkg = stanza['Depends']
+            elif stanza.has_key('Suggests'):
+                pkg = stanza['Suggests']
+            else:
+                continue
+
+            # account for multiple packages per line
+            if pkg.count(','):
+                pkgs += [p.strip() for p in pkg.split(',')]
+            else:
+                pkgs.append(pkg.strip())
+
+    for p in pkgs:
+        if not db.has_key(p):
+            db[p] = get_emptydbentry()
+
+    return db
+
+def get_emptydbentry():
+    return {'main': {}}
+
+def import_blendstask(db, url):
+    cache = AptListsCache()
+    fh = cache.get(url)
+    task_name = None
+
+    # figure out blend's task page URL, since they differ from blend to blend
+    urlsec = url.split('/')
+    blendname = urlsec[-3]
+    if blendname == 'debian-med':
+        taskpage_url = 'http://debian-med.alioth.debian.org/tasks/'
+    elif blendname == 'debian-science':
+        taskpage_url = 'http://blends.alioth.debian.org/science/tasks/' 
+    else:
+        raise ValueError('Unknown blend "%s"' % blendname)
+    taskpage_url += urlsec[-1]
+
+    for st in deb822.Packages.iter_paragraphs(fh):
+        if st.has_key('Task'):
+            task_name = st['Task']
+            task = (blendname, task_name, taskpage_url)
+
+        # do not stop unless we have a description
+        if not st.has_key('Pkg-Description'):
+            continue
+
+        if st.has_key('Depends'):
+            pkg = st['Depends']
+        elif st.has_key('Suggests'):
+            pkg = st['Suggests']
+        else:
+            print 'Warning: Cannot determine name of prospective package ' \
+                    '... ignoring.'
+            continue
+
+        if not db.has_key(pkg):
+            print 'Ignoring blend package "%s"' % pkg
+            continue
+
+        info = {}
+
+        # blends info
+        info['tasks'] = [task]
+        if st.has_key('License'):
+            info['license'] = st['License']
+        if st.has_key('Responsible'):
+            info['responsible'] = st['Responsible']
+
+        # pkg description
+        descr = st['Pkg-Description'].replace('%', '%%').split('\n')
+        info['description'] = descr[0].strip()
+        info['long_description'] = ' '.join([l.strip() for l in descr[1:]])
+
+        # charge the basic property set
+        db[pkg]['main']['description'] = info['description']
+        db[pkg]['main']['long_description'] = info['long_description']
+        if st.has_key('WNPP'):
+            db[pkg]['main']['debian_itp'] = st['WNPP']
+        if st.has_key('Pkg-URL'):
+            db[pkg]['main']['other_pkg'] = st['Pkg-URL']
+        if st.has_key('Homepage'):
+            db[pkg]['main']['homepage'] = st['Homepage']
+
+        # only store if there isn't something already
+        if not db[pkg].has_key('blends'):
+            db[pkg]['blends'] = info
+        else:
+            # just add this tasks name and id
+            db[pkg]['blends']['tasks'].append(task)
+
+    return db
+
+
+def get_releaseinfo(rurl):
+    cache = AptListsCache()
+    # root URL of the repository
+    baseurl = '/'.join(rurl.split('/')[:-1])
+    # get the release file from the cache
+    release_file = cache.get(rurl)
+
+    # create parser instance
+    rp = deb822.Release(release_file)
+
+    # architectures on this dist
+    archs = rp['Architectures'].split()
+    components = rp['Components'].split()
+    # compile a new codename that also considers the repository label
+    # to distinguish between official and unofficial repos.
+    label = rp['Label']
+    origin = rp['Origin']
+    codename = rp['Codename']
+    labelcode = '_'.join([rp['Label'], rp['Codename']])
+
+    # cleanup
+    release_file.close()
+
+    return {'baseurl': baseurl, 'archs': archs, 'components': components,
+            'codename': codename, 'label': label, 'labelcode': labelcode,
+            'origin': origin}
+
+
+def build_pkgsurl(baseurl, component, arch):
+    return '/'.join([baseurl, component, 'binary-' + arch, 'Packages.bz2'])
+
+
+def import_release(cfg, db, rurl):
+    cache = AptListsCache()
+
+    ri = get_releaseinfo(rurl)
+
+    # compile the list of Packages files to parse and parse them
+    for c in ri['components']:
+        for a in ri['archs']:
+            # compile packages URL
+            pkgsurl = build_pkgsurl(ri['baseurl'], c, a)
+
+            # retrieve from cache
+            packages_file = cache.get(pkgsurl)
+
+            # parse
+            for stanza in deb822.Packages.iter_paragraphs(packages_file):
+                db = _store_pkg(cfg, db, stanza, ri['origin'], ri['codename'], c, ri['baseurl'])
+
+            # cleanup
+            packages_file.close()
+
+    return db
+
+def _store_pkg(cfg, db, st, origin, codename, component, baseurl):
+    """
+    :Parameter:
+      st: Package section
+    """
+    pkg = st['Package']
+
+    # only care for known packages
+    if not db.has_key(pkg):
+#        print 'Ignoring NeuroDebian package "%s"' % pkg
+        return db
+
+    distkey = (trans_codename(codename, cfg), 'neurodebian-' + codename)
+
+    if db[pkg].has_key(distkey):
+        info = db[pkg][distkey]
+    else:
+        info = {'architecture': []}
+
+    # fill in data
+    if not st['Architecture'] in info['architecture']:
+        info['architecture'].append(st['Architecture'])
+    info['maintainer'] = st['Maintainer']
+    if st.has_key('Homepage'):
+        info['homepage'] = st['Homepage']
+    info['version'] = st['Version']
+
+    # origin
+    info['drc'] = '%s %s %s' % (origin, codename, component)
+
+    # pool url
+    info['poolurl'] = '/'.join([os.path.dirname(st['Filename'])])
+
+    # pkg description
+    descr = st['Description'].replace('%', '%%').split('\n')
+    info['description'] = descr[0].strip()
+    info['long_description'] = ' '.join([l.strip() for l in descr[1:]])
+
+    db[pkg][distkey] = info
+
+    # charge the basic property set
+    db[pkg]['main']['description'] = info['description']
+    db[pkg]['main']['long_description'] = info['long_description']
+    if st.has_key('Homepage'):
+        db[pkg]['main']['homepage'] = st['Homepage']
+
+    return db
+
+
+def trans_codename(codename, cfg):
+    """Translate a known codename into a release description.
+
+    Unknown codenames will simply be returned as is.
+    """
+    # if we know something, tell
+    if codename in cfg.options('release codenames'):
+        return cfg.get('release codenames', codename)
+    else:
+        return codename
+
+
+def create_dir(path):
+    if os.path.exists(path):
+        return
+
+    ps = path.split(os.path.sep)
+
+    for i in range(1,len(ps) + 1):
+        p = os.path.sep.join(ps[:i])
+
+        if not os.path.exists(p):
+            os.mkdir(p)
+
+
+def dde_get(url):
+    try:
+        return json.read(urllib2.urlopen(url+"?t=json").read())['r']
+    except urllib2.HTTPError:
+        return False
+
+
+def import_dde(cfg, db):
+    dists = cfg.get('dde', 'dists').split()
+    query_url = cfg.get('dde', 'pkgquery_url')
+    for p in db.keys():
+        # get freshest
+        q = dde_get(query_url + "/all/%s" % p)
+        if q:
+            db[p]['main'] = q
+        for d in dists:
+            q = dde_get(query_url + "/prio-%s/%s" % (d, p))
+            if q:
+                db[p][(trans_codename(d.split('-')[1], cfg),d)] = q
+
+    return db
+
+
+def generate_pkgpage(pkg, cfg, db, template, addenum_dir):
+    # local binding for ease of use
+    db = db[pkg]
+    # do nothing if there is not at least the very basic stuff
+    if not db['main'].has_key('description'):
+        return
+    title = '**%s** -- %s' % (pkg, db['main']['description'])
+    underline = '*' * (len(title) + 2)
+    title = '%s\n %s\n%s' % (underline, title, underline)
+
+    # preprocess long description
+    ld = db['main']['long_description']
+    ld = ' '.join([l.lstrip(' .') for l in ld.split('\n')])
+
+    page = template.render(pkg=pkg,
+                           title=title,
+                           long_description=ld,
+                           cfg=cfg,
+                           db=db)
+    # the following can be replaced by something like
+    # {% include "sidebar.html" ignore missing %}
+    # in the template whenever jinja 2.2 becomes available
+    addenum = os.path.join(os.path.abspath(addenum_dir), '%s.rst' % pkg)
+    if os.path.exists(addenum):
+        page += '\n\n.. include:: %s\n' % addenum
+    return page
+
+
+def store_db(db, filename):
+    pp = PrettyPrinter(indent=2)
+    f = open(filename, 'w')
+    f.write(pp.pformat(db))
+    f.close()
+
+
+def read_db(filename):
+    f = open(filename)
+    db = eval(f.read())
+    return db
+
+def write_sourceslist(jinja_env, cfg, outdir):
+    create_dir(outdir)
+    create_dir(os.path.join(outdir, '_static'))
+
+    repos = {}
+    for release in cfg.options('release codenames'):
+        transrel = trans_codename(release, cfg)
+        repos[transrel] = []
+        for mirror in cfg.options('mirrors'):
+            listname = 'neurodebian.%s.%s.sources.list' % (release, mirror)
+            repos[transrel].append((mirror, listname))
+            lf = open(os.path.join(outdir, '_static', listname), 'w')
+            aptcfg = '%s %s main contrib non-free\n' % (cfg.get('mirrors', mirror),
+                                                      release)
+            lf.write('deb %s' % aptcfg)
+            lf.write('deb-src %s' % aptcfg)
+            lf.close()
+
+    srclist_template = jinja_env.get_template('sources_lists.rst')
+    sl = open(os.path.join(outdir, 'sources_lists'), 'w')
+    sl.write(srclist_template.render(repos=repos))
+    sl.close()
+
+
+def write_pkgpages(jinja_env, cfg, db, outdir, addenum_dir):
+    create_dir(outdir)
+    create_dir(os.path.join(outdir, 'pkgs'))
+
+    # generate the TOC with all packages
+    toc_template = jinja_env.get_template('pkgs_toc.rst')
+    toc = open(os.path.join(outdir, 'pkgs.rst'), 'w')
+    toc.write(toc_template.render(pkgs=db.keys()))
+    toc.close()
+
+    # and now each individual package page
+    pkg_template = jinja_env.get_template('pkg.rst')
+    for p in db.keys():
+        page = generate_pkgpage(p, cfg, db, pkg_template, addenum_dir)
+        # when no page is available skip this package
+        if page is None:
+            continue
+        pf = open(os.path.join(outdir, 'pkgs', p + '.rst'), 'w')
+        pf.write(generate_pkgpage(p, cfg, db, pkg_template, addenum_dir))
+        pf.close()
+
+
+def prepOptParser(op):
+    # use module docstring for help output
+    op.usage = "%s [OPTIONS]\n\n" % sys.argv[0] + __doc__
+
+    op.add_option("--db",
+                  action="store", type="string", dest="db",
+                  default=None,
+                  help="Database file to read. Default: None")
+
+    op.add_option("--cfg",
+                  action="store", type="string", dest="cfg",
+                  default=None,
+                  help="Repository config file.")
+
+    op.add_option("-o", "--outdir",
+                  action="store", type="string", dest="outdir",
+                  default=None,
+                  help="Target directory for ReST output. Default: None")
+
+    op.add_option("-r", "--release-url",
+                  action="append", dest="release_urls",
+                  help="None")
+
+    op.add_option("--pkgaddenum", action="store", dest="addenum_dir",
+                  type="string", default=None, help="None")
+
+
+def main():
+    op = OptionParser(version="%prog 0.0.2")
+    prepOptParser(op)
+
+    (opts, args) = op.parse_args()
+
+    if len(args) != 1:
+        print('There needs to be exactly one command')
+        sys.exit(1)
+
+    cmd = args[0]
+
+    if opts.cfg is None:
+        print("'--cfg' option is mandatory.")
+        sys.exit(1)
+    if opts.db is None:
+        print("'--db' option is mandatory.")
+        sys.exit(1)
+
+
+    cfg = SafeConfigParser()
+    cfg.read(opts.cfg)
+
+    # load existing db, unless renew is requested
+    if cmd == 'updatedb':
+        db = {}
+        if cfg.has_option('packages', 'select taskfiles'):
+            db = add_pkgfromtaskfile(db, cfg.get('packages',
+                                                 'select taskfiles').split())
+
+        # add additional package names from config file
+        if cfg.has_option('packages', 'select names'):
+            for p in cfg.get('packages', 'select names').split():
+                if not db.has_key(p):
+                    db[p] = get_emptydbentry()
+
+        # get info from task files
+        if cfg.has_option('packages', 'prospective'):
+            for url in cfg.get('packages', 'prospective').split():
+                db = import_blendstask(db, url)
+
+        # parse NeuroDebian repository
+        if cfg.has_option('neurodebian', 'releases'):
+            for rurl in cfg.get('neurodebian', 'releases').split():
+                db = import_release(cfg, db, rurl)
+
+        # collect package information from DDE
+        db = import_dde(cfg, db)
+        # store the new DB
+        store_db(db, opts.db)
+        # and be done
+        return
+
+    # load the db from file
+    db = read_db(opts.db)
+
+    # fire up jinja
+    jinja_env = Environment(loader=PackageLoader('neurodebian', 'templates'))
+
+    # generate package pages and TOC and write them to files
+    write_pkgpages(jinja_env, cfg, db, opts.outdir, opts.addenum_dir)
+
+    write_sourceslist(jinja_env, cfg, opts.outdir)
+
+if __name__ == "__main__":
+    main()
diff --git a/neurodebian/templates/pkg.rst b/neurodebian/templates/pkg.rst
new file mode 100644 (file)
index 0000000..f0d0637
--- /dev/null
@@ -0,0 +1,90 @@
+
+.. _pkg_{{ pkg }}:
+
+
+{{ title }}
+
+{{ long_description | wordwrap(width=79, break_long_words=False) }}
+
+Homepage: {{ db.main.homepage }}
+
+{% if db.blends %}
+Associated `Debian Pure Blends <http://wiki.debian.org/DebianPureBlends>`_:
+{% for blend, name, url in db.blends.tasks %}
+* `{{ name }} ({{ blend }}) <{{ url }}>`_
+{% endfor %}
+{% endif %}
+
+Binary packages
+===============
+
+{% for dist, distpkg in db|dictsort if dist[1].startswith('neurodebian') %}
+{% if loop.first %}
+NeuroDebian
+-----------
+
+{% endif %}
+{{ dist[0] }} [{{ distpkg.drc.split()[2]}}]:
+  `{{distpkg.version}} <../../debian/{{ distpkg.poolurl }}>`_ [{{ ', '.join(distpkg.architecture) }}]
+
+{% if loop.last %}
+.. seealso::
+
+  - Maintainer: {{ distpkg.maintainer }}
+{% endif %}
+{% else %}
+*There are no packages in the NeuroDebian repository.*
+{% endfor %}
+
+
+{% for dist, distpkg in db|dictsort if dist[1].startswith('debian') %}
+{% if loop.first %}
+Debian
+------
+
+{% endif %}
+{{ dist[0] }} [{{ distpkg.drc.split()[2]}}]:
+  `{{distpkg.version}} <http://packages.debian.org/search?suite={{ distpkg.drc.split()[1]}}&keywords={{ pkg }}>`_ [{{ ', '.join(distpkg.architecture) }}]
+
+{% if loop.last %}
+.. seealso::
+
+  - Maintainer: {{ distpkg.maintainer }}
+  - Bug reports: `Debian bugtracking system <http://bugs.debian.org/src:{{ distpkg.sv.split()[0] }}>`_
+{% if distpkg.popcon %}
+  - Reported installations: {{ distpkg.popcon.insts }}
+{% endif %}
+{% endif %}
+{% else %}
+*There are no official Debian packages available.*
+
+{% if db.main.debian_itp %}
+However, a Debian packaging effort has been officially announced.
+Please see the corresponding
+`intent-to-package bug report <http://bugs.debian.org/{{ db.main.debian_itp }}>`_
+for more information about its current status.
+{% endif %}
+{% endfor %}
+
+
+{% for dist, distpkg in db.iteritems() if dist[1].startswith('ubuntu') %}
+{% if loop.first %}
+Ubuntu
+------
+
+{% endif %}
+{{ dist[0] }} [{{ distpkg.drc.split()[2]}}]:
+  `{{distpkg.version}} <http://packages.ubuntu.com/search?suite={{ distpkg.drc.split()[1]}}&keywords={{ pkg }}>`_ [{{ ', '.join(distpkg.architecture) }}]
+
+{% if loop.last %}
+.. seealso::
+
+  - Maintainer: {{ distpkg.maintainer }}
+  - Bug reports: `Ubuntu Launchpad <https://bugs.launchpad.net/ubuntu/+source/{{ distpkg.sv.split()[0] }}>`_
+{% if distpkg.popcon %}
+  - Reported installations: {{ distpkg.popcon.insts }}
+{% endif %}
+{% endif %}
+{% else %}
+*There are no official Ubuntu packages available.*
+{% endfor %}
diff --git a/neurodebian/templates/pkgs_toc.rst b/neurodebian/templates/pkgs_toc.rst
new file mode 100644 (file)
index 0000000..9e13be9
--- /dev/null
@@ -0,0 +1,11 @@
+.. _full_pkg_list:
+
+Package list
+============
+
+.. toctree::
+  :maxdepth: 1
+{% for p in pkgs|sort %}
+  pkgs/{{ p }}.rst
+{%- endfor %}
+
diff --git a/neurodebian/templates/sources_lists.rst b/neurodebian/templates/sources_lists.rst
new file mode 100644 (file)
index 0000000..a69c252
--- /dev/null
@@ -0,0 +1,4 @@
+{% for dist, mirrors in repos|dictsort %}
+* {{ dist }}: {% for mirror, list in mirrors|sort %}[`{{ mirror }} <_static/{{ list }}>`_] {% endfor %}
+{% endfor %}
+
index b424bc3365e44260e684bf54c20d7572cf8f91ea..b92265f09c5a3934676ae60f5a56b30e76dc1266 100644 (file)
@@ -5,37 +5,37 @@ Since FSL covers a very broad range of analysis techniques the suite is split
 into a number of separate packages to allow a more fine-grained selection of
 the functionality provided by FSL. The following related packages are available:
 
-:ref:`deb_fsl`
+:ref:`pkg_fsl`
   This packages provides the FSL binaries. This includes everything one needs
   to run a basic fMRI analysis. However, it is recommend to also at least
-  install the :ref:`fsl-atlases <deb_fsl-atlases>` package.
+  install the :ref:`fsl-atlases <pkg_fsl-atlases>` package.
 
-:ref:`deb_fslview`
+:ref:`pkg_fslview`
   Almost everybody should install this package.
 
-:ref:`deb_fsl-doc`
+:ref:`pkg_fsl-doc`
   Contains the FSL documentation in HTML format as included in the official FSL
   sources. This package should be installed to use the online help capabilities
   of FSL.
 
-:ref:`deb_fslview-doc`
+:ref:`pkg_fslview-doc`
   The FSLView documentation in HTML format. This package is necessary for the
   online help system of FSLView.
 
-:ref:`deb_fsl-atlases`
+:ref:`pkg_fsl-atlases`
   Contains the standard space brain templates and brain atlases. Almost
   everybody should install this package.
 
-:ref:`deb_fsl-possum-data`
+:ref:`pkg_fsl-possum-data`
   This package provides the templates for the MR sequence simulator POSSUM.
   If one does not use POSSUM this package is not necessary.
 
-:ref:`deb_fsl-first-data`
+:ref:`pkg_fsl-first-data`
   This package provides model data for FIRST subcortical brain segmentation.
   This package is almost 1GB! It is only required if one wants to use FIRST
   or run the FSL FEEDS suite.
 
-:ref:`deb_fsl-feeds`
+:ref:`pkg_fsl-feeds`
   This package provides the `FSL Evaluation and Example Data Suite`_. This
   package performs two functions -- it tests whether the FSL tools are working
   properly and it provides example data to try running FSL on. Additionally the
index aca3f9ce29023bb0a4ed27f85925d80989b03dfe..2ac76d792d8bda71bf3655cdf3048b7507047989 100644 (file)
@@ -489,7 +489,7 @@ table.footnote td, table.footnote th {
 
 dl {
     margin-bottom: 15px;
-    clear: both;
+/*     clear: both; */
 }
 
 dd p {
index 4e231dc7119607ca53e367074aa040575504a933..c0c3f08d07ffca64690b4ef320f0934fbf0c4fde 100644 (file)
@@ -7,7 +7,7 @@
 {% block rootrellink %}
   <li><a href="http://www.debian.org" target="_blank">Debian</a> |&nbsp;</li>
   <li><a href="{{ pathto('index') }}">Neuroscience Repository</a> &raquo;</li>
-  <li><a href="{{ pathto('genindex') }}">Package list</a></li>
+  <li><a href="pkgs.html">Package list</a></li>
 {% endblock %}
 
 {% block sidebar1 %}
index 7f5eb46b130430be1d9b4e9a49653a4f4ac0217f..644c35862083051182df769f8d7825608b4da050 100644 (file)
@@ -130,7 +130,7 @@ html_logo = os.path.join(artworkdir(), 'logo_tuned/fmri_w200.png')
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
 # so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static', 'static']
+html_static_path = ['_static']
 
 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
 # using the given strftime format.
index 128aa3254853fbbc433d032bc6f87176738c7fae..20d29d877207c13aef39dd9f0f3c3d1958ba2aea 100644 (file)
@@ -14,20 +14,8 @@ system (although that is rather unlikely). You've been warned!
 
 The repository contains both neuroscience-related packages, as well as general
 purpose software which is necessary to resolve dependencies, or such that is
-simply useful in the neuroscience context. The featured neuroscience software
-can be browsed via the repository :ref:`genindex` or through the
-:ref:`maintainer view <bymaintainer>`.
-
-All other packages are available from the :ref:`full package list
-<full_pkg_list>`.
-
-
-News
-====
-
-Due to scheduled maintenance work on the electrical grid the repository hosted
-at `apsy.gse.uni.magdeburg.de/debian` will be down on May 16 (and possibly
-May 17).
+simply useful in the neuroscience context. All featured neuroscience software
+packages are available from the :ref:`full package list <full_pkg_list>`.
 
 
 .. _repository_howto:
@@ -36,13 +24,14 @@ How to use this repository
 ==========================
 
 The easiest way to use this repository is to download an APT-configuration file
-(`sources.list`). Simply click on the name of your target distribution/release
-and save the downloaded file in the `/etc/apt/sources.list.d/` directory on
-your system (depending on the browser, you might have to right-click and choose
-'save as').  Saving files in this directory will require superuser privileges,
-therefore you should probably download the file into a temporary directory and
-subsequently move it into `/etc/apt/sources.list.d/`. APT-configurations are
-available for the following releases:
+(`sources.list`). Simply choose your target distribution/release and download
+the configuration for a mirror close to you (depending on your browser, you
+might have to right-click and choose 'save as'). Once downloaded, put the file
+in the `/etc/apt/sources.list.d/` directory on your system. Moving files in
+this directory will require superuser privileges, therefore you should probably
+download the file into a temporary directory and subsequently move it into
+`/etc/apt/sources.list.d/`. APT-configurations are available for the following
+releases and repository mirrors:
 
 .. include:: sources_lists