From: Michael Hanke Date: Sun, 5 Apr 2009 16:59:13 +0000 (+0200) Subject: Initial (crude) but somewhat functional state. X-Git-Url: https://git.donarmstrong.com/?a=commitdiff_plain;h=3b8a7885dffc32b59df2fc645fe40f86f3d657ff;p=neurodebian.git Initial (crude) but somewhat functional state. --- diff --git a/Makefile b/Makefile index 39fe377..1d79dab 100644 --- a/Makefile +++ b/Makefile @@ -7,32 +7,34 @@ SPHINXBUILD = sphinx-build PAPER = # Internal variables. +SRCDIR = build/source PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) $(SRCDIR) + + +WWW_UPLOAD_URI = elrond:/home/hanke/public_html/archive +WWW_DIR = build/html + .PHONY: help clean html web pickle htmlhelp latex changes linkcheck -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " changes to make an overview over all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" + +prep: + mkdir -p build + cp -r source build/ + clean: - -rm -rf build/* + -rm -rf build -html: +html: prep mkdir -p build/html build/doctrees $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html @echo @echo "Build finished. The HTML pages are in build/html." -pickle: +pickle: prep mkdir -p build/pickle build/doctrees $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle @echo @@ -40,20 +42,20 @@ pickle: web: pickle -json: +json: prep mkdir -p build/json build/doctrees $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) build/json @echo @echo "Build finished; now you can process the JSON files." -htmlhelp: +htmlhelp: prep mkdir -p build/htmlhelp build/doctrees $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in build/htmlhelp." -latex: +latex: prep mkdir -p build/latex build/doctrees $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex @echo @@ -61,15 +63,23 @@ latex: @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." -changes: +changes: prep mkdir -p build/changes build/doctrees $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes @echo @echo "The overview file is in build/changes." -linkcheck: +linkcheck: prep mkdir -p build/linkcheck build/doctrees $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ - "or in build/linkcheck/output.txt." + "or in build/linkcheck/output.txt." + + +update-db: + python deb2rst/archive.py + + +upload-website: html + rsync -rvzlhp --delete --chmod=Dg+s,g+rw $(WWW_DIR) $(WWW_UPLOAD_URI) diff --git a/deb2rst/archive.py b/deb2rst/archive.py index 2d6df3e..62d68ad 100644 --- a/deb2rst/archive.py +++ b/deb2rst/archive.py @@ -1,26 +1,161 @@ import urllib -import apt_pkg as ap import apt +from debian_bundle import deb822 +from debian_bundle import debtags from ConfigParser import SafeConfigParser -import gzip import subprocess +import os +import shutil +import pysvn + +target_dir = 'build/source' + +codename2descr = { + 'apsy_etch': 'Debian GNU/Linux 4.0 (etch)', + 'apsy_lenny': 'Debian GNU/Linux 5.0 (lenny)', + 'apsy_squeeze': 'Debian testing (squeeze)', + 'apsy_sid': 'Debian unstable (sid)', + 'apsy_dapper': 'Ubuntu 6.06 LTS "Dapper Drake" (dapper)', + 'apsy_edgy': 'Ubuntu 6.10 "Edgy Eft" (edgy)', + 'apsy_feisty': 'Ubuntu 7.04 "Feisty Fawn" (feisty)', + 'apsy_gutsy': 'Ubuntu 7.10 "Gutsy Gibbon" (gutsy)', + 'apsy_hardy': 'Ubuntu 8.04 LTS "Hardy Heron" (hardy)', + 'apsy_intrepid': 'Ubuntu 8.10 "Intrepid Ibex" (intrepid)', + 'apsy_jaunty': 'Ubuntu 9.04 "Jaunty Jackalope" (jaunty)', + } + +def transCodename(codename): + """Translate a known codename into a release description. + + Unknown codenames will simply be returned as is. + """ + if codename in codename2descr.keys(): + return codename2descr[codename] + else: + return codename + + + +class AptListsCache(object): + def __init__(self, cachedir='cache', ro_cachedirs=None): + self.cachedir = cachedir + + if not ro_cachedirs is None: + self.ro_cachedirs = ro_cachedirs + else: + self.ro_cachedirs = [] + + # always use system cache + self.ro_cachedirs.append('/var/lib/apt/lists/') + + + def get(self, url, update=False): + """Looks in the cache if the file is there and takes the cached one. + Otherwise it is downloaded first. + + Knows how to deal with http:// and svn:// URLs. + + :Return: + file handler + """ + # look whether it is compressed + cext = url.split('.')[-1] + if cext in ['gz', 'bz2']: + target_url = url[:-1 * len(cext) -1] + else: + # assume not compressed + target_url = url + cext = None + + # turn url into a filename -- mimik what APT does for + # /var/lib/apt/lists/ + tfilename = '_'.join(target_url.split('/')[2:]) + + # if we need to download anyway do not search + if update: + cfilename = os.path.join(self.cachedir, tfilename) + else: + # look for the uncompressed file anywhere in the cache + cfilename = None + for cp in [self.cachedir] + self.ro_cachedirs: + if os.path.exists(os.path.join(cp, tfilename)): + cfilename = os.path.join(cp, tfilename) + + # nothing found? + if cfilename is None: + # add cache item + cfilename = os.path.join(self.cachedir, tfilename) + update = True + + # if updated needed -- download + if update: + print 'Caching file from %s' % url + + if url.startswith('svn://'): + # export from SVN + pysvn.Client().export(url, cfilename) + if url.startswith('http://'): + # download + tempfile, ignored = urllib.urlretrieve(url) + + # decompress + decompressor = None + if cext == 'gz': + decompressor = 'gzip' + elif cext == 'bz2': + decompressor = 'bzip2' + elif cext == None: + decompressor = None + else: + raise ValueError, \ + "Don't know how to decompress %s files" \ + % cext + + if not decompressor is None: + if subprocess.call([decompressor, '-d', '-q', '-f', + tempfile]) == 1: + raise RuntimeError, \ + "Something went wrong while decompressing '%s'" \ + % tempfile + + # move decompressed file into cache + shutil.move(os.path.splitext(tempfile)[0], cfilename) + + # XXX do we need that if explicit filename is provided? + urllib.urlcleanup() + + # open cached file + fh = open(cfilename, 'r') + + return fh + + + class DebianPkgArchive(SafeConfigParser): """ """ - def __init__(self, dists): + def __init__(self, cache=None): """ :Parameter: - dists: list - List of Release file URLs, one for each distribution in the archive. """ SafeConfigParser.__init__(self) - for dist in dists: - filename, ignored = urllib.urlretrieve(dist) - baseurl = '/'.join(dist.split('/')[:-1]) - self._parseDistribution(filename, baseurl) - urllib.urlcleanup() + # release codnames found in the repos + self.releases = {} + + # use provided file cache or use fresh one + if not cache is None: + self.cache = cache + else: + self.cache = AptListsCache() + + # init debtags DB + self.dtags = debtags.DB() + self.dtags.read(open('/var/lib/debtags/package-tags')) + + # init package filter + self.pkgfilter = None def __repr__(self): @@ -70,60 +205,70 @@ class DebianPkgArchive(SafeConfigParser): self.set(section, option, ', '.join(l + [value])) - def _parseDistribution(self, rfile, baseurl): - """ - :Parameter: - rfile: filename - Release file for the distribution - baseurl: str - Base URL of this distribution. This path contains the Release file. - """ + def importRelease(self, rurl, force_update=False): + # root URL of the repository + baseurl = '/'.join(rurl.split('/')[:-1]) + # get the release file from the cache + release_file = self.cache.get(rurl, update=force_update) + # create parser instance - rparser = ap.ParseTagFile(open(rfile, 'r')) - # get release section content - rparser.Step() + rp = deb822.Release(release_file) # architectures on this dist - archs = rparser.Section['Architectures'].split() - components = rparser.Section['Components'].split() - codename = rparser.Section['Codename'] + archs = rp['Architectures'].split() + components = rp['Components'].split() + # compile a new codename that also considers the repository label + # to distinguish between official and unofficial repos. + codename = '_'.join([rp['Label'], rp['Codename']]) + + # store the release itself + if not codename in self.releases.keys(): + self.releases[codename] = components # compile the list of Packages files to parse and parse them for c in components: for a in archs: - # compile URL - pkgsurl = '/'.join([baseurl, c, 'binary-' + a, 'Packages.gz']) - # retrieve - filename, ignored = urllib.urlretrieve(pkgsurl) - # decompress - subprocess.call(['gzip', '-d', filename]) + # compile packages URL + pkgsurl = '/'.join([baseurl, c, 'binary-' + a, 'Packages.bz2']) + + # retrieve from cache + packages_file = self.cache.get(pkgsurl, + update=force_update) + # parse - self._parsePkgsFile(filename[:-3], codename, c) - break + self._parsePkgsFile(packages_file, codename, c, baseurl) + + # cleanup + packages_file.close() + # cleanup + release_file.close() - def _parsePkgsFile(self, pfile, codename, component): + + def _parsePkgsFile(self, fh, codename, component, baseurl): """ :Parameters: - pfile: Packages filename + fh: file handler + Packages list file codename: str Codename of the release component: str The archive component this packages file corresponds to. """ - pp = ap.ParseTagFile(open(pfile, 'r')) - - while pp.Step(): - sec = pp.Section - self._storePkg(sec, codename, component) + for stanza in deb822.Packages.iter_paragraphs(fh): + self._storePkg(stanza, codename, component, baseurl) - def _storePkg(self, psec, codename, component): + def _storePkg(self, st, codename, component, baseurl): """ :Parameter: - psec: apt_pkg parser section + st: Package section """ - pkg = psec['Package'] + pkg = st['Package'] + + # do nothing if package is not in filter if there is any + if not self.pkgfilter is None and not pkg in self.pkgfilter: + return if not self.has_section(pkg): self.add_section(pkg) @@ -132,31 +277,139 @@ class DebianPkgArchive(SafeConfigParser): self.appendUniqueCSV(pkg, "releases", codename) # arch listing - self.appendUniqueCSV(pkg, "archs %s" % codename, psec['Architecture']) + self.appendUniqueCSV(pkg, "archs %s" % codename, st['Architecture']) # versions self.ensureUnique(pkg, - "version %s %s" % (codename, psec['Architecture']), - psec['Version']) + "version %s %s" % (codename, st['Architecture']), + st['Version']) + + # link to .deb + self.ensureUnique(pkg, + "file %s %s" % (codename, st['Architecture']), + '/'.join(baseurl.split('/')[:-2] + [st['Filename']])) + + # component + self.ensureUnique(pkg, 'component ' + codename, component) + + # store the pool url + self.ensureUnique(pkg, "poolurl %s" % codename, + '/'.join(baseurl.split('/')[:-2] \ + + [os.path.dirname(st['Filename'])])) + # now the stuff where a single variant is sufficient and where we go for # the latest available one if self.has_option(pkg, "newest version") \ - and apt.VersionCompare(psec['Version'], + and apt.VersionCompare(st['Version'], self.get(pkg, "newest version")) < 0: return # everything from here will overwrite existing ones # we seems to have an updated package - self.set(pkg, "newest version", psec['Version']) + self.set(pkg, "newest version", st['Version']) # description - self.set(pkg, "description", psec['Description']) + self.set(pkg, "description", st['Description'].replace('%', '%%')) + + # maintainer + self.set(pkg, "maintainer", st['Maintainer']) # optional stuff - if psec.has_key('Homepage'): - self.set(pkg, 'homepage', psec['Homepage']) + if st.has_key('Homepage'): + self.set(pkg, 'homepage', st['Homepage']) + + # query debtags + debtags = self.dtags.tagsOfPackage(pkg) + if debtags: + self.set(pkg, 'debtags', ', '.join(debtags)) + + + def writeSourcesLists(self): + fl = open(os.path.join(target_dir, 'sources_lists'), 'w') + for trans, r in sorted([(transCodename(k), k) + for k in self.releases.keys()]): + f = open(os.path.join(target_dir, + "static/debneuro.%s.sources.list" % r), + 'w') + f.write("deb http://apsy.gse.uni-magdeburg.de/debian %s %s\n" \ + % (r, ' '.join(self.releases[r]))) + f.write("deb-src http://apsy.gse.uni-magdeburg.de/debian %s %s\n" \ + % (r, ' '.join(self.releases[r]))) + # XXX use :download: role from sphinx 0.6 on + fl.write('* `%s `_\n' \ + % (trans, r)) + f.close() + fl.close() + + + def importProspectivePkgsFromTaskFile(self, url): + fh = dpa.cache.get(url) + + for st in deb822.Packages.iter_paragraphs(fh): + # do not stop unless we have a description + if not st.has_key('Pkg-Description'): + continue + + if st.has_key('Depends'): + pkg = st['Depends'] + elif st.has_key('Suggests'): + pkg = st['Suggests'] + else: + print 'Warning: Cannot determine name of prospective package ' \ + '... ignoring.' + continue + + # store pkg info + if not self.has_section(pkg): + self.add_section(pkg) + + # pkg description + self.set(pkg, "description", + st['Pkg-Description'].replace('%', '%%')) + + # optional stuff + if st.has_key('Homepage'): + self.set(pkg, 'homepage', st['Homepage']) + + if st.has_key('Pkg-URL'): + self.set(pkg, 'external pkg url', st['Pkg-URL']) + + if st.has_key('WNPP'): + self.set(pkg, 'wnpp debian', st['WNPP']) + + if st.has_key('License'): + self.set(pkg, 'license', st['License']) + + # treat responsible as maintainer + if st.has_key('Responsible'): + self.set(pkg, "maintainer", st['Responsible']) + + + def setPkgFilterFromTaskFile(self, urls): + for task in taskfiles: + fh = dpa.cache.get(task) + + pkgs = [] + + # loop over all stanzas + for stanza in deb822.Packages.iter_paragraphs(fh): + if stanza.has_key('Depends'): + pkg = stanza['Depends'] + elif stanza.has_key('Suggests'): + pkg = stanza['Suggests'] + else: + continue + + # account for multiple packages per line + if pkg.count(','): + pkgs += [p.strip() for p in pkg.split(',')] + else: + pkgs.append(pkg.strip()) + + # activate filter + self.pkgfilter = pkgs def genPkgPage(db, pkg): @@ -166,13 +419,227 @@ def genPkgPage(db, pkg): pkg: str Package name """ - pass + descr = db.get(pkg, 'description').split('\n') + + s = '.. index:: %s, ' % pkg +# s += db.get(pkg, 'maintainer').split('<')[0] + + s += '\n' + + if db.has_option(pkg, 'debtags'): + # filter tags + tags = [t for t in db.get(pkg, 'debtags').split(', ') + if t.split('::')[0] in ['field', 'works-with']] + if len(tags): + s += '.. index:: %s\n\n' % ', '.join(tags) + + # main ref target for this package + s += '.. _deb_' + pkg + ':\n' + + # separate header from the rest + s += '\n\n\n' + + header = '%s -- %s' % (pkg, descr[0]) + s += '*' * (len(header) + 2) + s += '\n ' + header + '\n' + s += '*' * (len(header) + 2) + '\n\n' + + # put description + s += '\n'.join([l.lstrip(' .') for l in descr[1:]]) + s += '\n' + + if db.has_option(pkg, 'homepage'): + s += '\n**Homepage**: %s\n' % db.get(pkg, 'homepage') + + s += '\nBinary packages'\ + '\n===============\n' + + s += genMaintainerSection(db, pkg) + + if db.has_option(pkg, 'wnpp debian'): + s += 'A Debian packaging effort has been officially announced. ' \ + 'Please see the corresponding `intent-to-package bug report`_ ' \ + 'for more information about its current status.\n\n' \ + '.. _intent-to-package bug report: http://bugs.debian.org/%s\n\n' \ + % db.get(pkg, 'wnpp debian') + + s += genBinaryPackageSummary(db, pkg, 'DebNeuro repository') + +# if db.has_option(pkg, 'external pkg url'): +# s += 'Other unofficial ressources\n' \ +# '---------------------------\n\n' +# s += 'An unofficial package is available from %s\ .\n\n' \ +# % db.get(pkg, 'external pkg url') + return s + + +def genMaintainerSection(db, pkg): + s = '' + + if not db.has_option(pkg, 'maintainer'): + s += '\nCurrently, nobody seems to be responsible for creating or ' \ + 'maintaining Debian packages of this software.\n\n' + return s + + # there is someone responsible + maintainer = db.get(pkg, 'maintainer') + + # do we have actual packages, or is it just a note + if not db.has_option(pkg, 'releases'): + s += '\nThere are currently no binary packages available. However, ' \ + 'the last known packaging effort was started by %s which ' \ + 'meanwhile might have led to an initial unofficial Debian ' \ + 'packaging.\n\n' % maintainer + return s + + s += '\n**Maintainer**: %s\n\n' % maintainer + + if not maintainer.startswith('Michael Hanke'): + s += '\n.. note::\n' + s += ' Do not contact the original package maintainer regarding ' \ + ' bugs in this unofficial binary package. Instead, contact ' \ + ' the repository maintainer at michael.hanke@gmail.com\ .' + + return s + + +def genBinaryPackageSummary(db, pkg, reposname): + # do nothing if the are no packages + if not db.has_option(pkg, 'releases'): + return '' + + s = '\n%s\n%s\n' % (reposname, '-' * len(reposname)) + + s += 'The repository contains binary packages for the following ' \ + 'distribution releases and system architectures. Note, that the ' \ + 'corresponding source packages are of course available too. Please ' \ + 'click on the release name to access them.\n\n' + + # for all releases this package is part of + for rel in db.get(pkg, 'releases').split(', '): + # write release description and component + s += '\n`%s <%s>`_:\n ' \ + % (transCodename(rel), + db.get(pkg, 'poolurl %s' % rel)) + + s += '[%s] ' % db.get(pkg, 'component ' + rel) + + # archs this package is available for + archs = db.get(pkg, 'archs ' + rel).split(', ') + + # extract all present versions for any arch + versions = [db.get(pkg, 'version %s %s' % (rel, arch)) + for arch in archs] + + # if there is only a single version for all of them, simplify the list + single_ver = versions.count(versions[0]) == len(versions) + + if single_ver: + # only one version string for all + s += ', '.join(['`%s <%s>`_' \ + % (arch, db.get(pkg, 'file %s %s' % (rel, arch))) + for arch in archs]) + s += ' (%s)' % versions[0] + else: + # a separate version string for each arch + s += ', '.join(['`%s <%s>`_ (%s)' \ + % (arch, + db.get(pkg, 'file %s %s' % (rel, arch)), + db.get(pkg, 'version %s %s' % (rel, arch))) + for arch in archs]) + + s += '\n' + + return s + +def maintainer2email(maint): + return maint.split('<')[1].rstrip('>') + + +def writePkgsBy(db, key, value2id): + collector = {} + + # get packages by maintainer + for p in db.sections(): + if db.has_option(p, key): + by = db.get(p, key) + + if not collector.has_key(by): + collector[by] = (value2id(by), [p]) + else: + collector[by][1].append(p) + + toc = open(os.path.join(target_dir, 'by%s.rst' % key), 'w') + toc.write('.. index:: Packages by %s\n.. _by%s:\n' % (key, key)) + + heading = 'Packages by %s' % key + toc.write('%s\n%s\n\n' % (heading, '=' * len(heading))) + toc.write('.. toctree::\n :maxdepth: 1\n\n') + + # summary page per maintainer + for by in sorted(collector.keys()): + toc.write(' by%s/%s\n' % (key, collector[by][0])) + + toc.close() + + +def writeRst(db): + # open pkgs toctree + toc = open(os.path.join(target_dir, 'pkgs.rst'), 'w') + # write header + toc.write('Archive content\n===============\n\n' + '.. toctree::\n :maxdepth: 1\n\n') + + for p in sorted(db.sections()): + print "Generating page for '%s'" % p + pf = open(os.path.join(target_dir, 'pkgs/%s.rst' % p), 'w') + pf.write(genPkgPage(db, p)) + + # check for doc addons + if os.path.exists(os.path.join(target_dir, 'pkgs_addenum/%s.rst' % p)): + pf.write('\n\n.. include:: ../pkgs_addenum/%s.rst\n' %p) + pf.close() + toc.write(' pkgs/%s\n' % p) + + + toc.close() + + + +dpa = DebianPkgArchive() + + +release_urls=[ + 'http://apsy.gse.uni-magdeburg.de/debian/dists/dapper/Release', + 'http://apsy.gse.uni-magdeburg.de/debian/dists/gutsy/Release', + 'http://apsy.gse.uni-magdeburg.de/debian/dists/hardy/Release', + 'http://apsy.gse.uni-magdeburg.de/debian/dists/intrepid/Release', + 'http://apsy.gse.uni-magdeburg.de/debian/dists/etch/Release', + 'http://apsy.gse.uni-magdeburg.de/debian/dists/lenny/Release', + 'http://apsy.gse.uni-magdeburg.de/debian/dists/squeeze/Release', + 'http://apsy.gse.uni-magdeburg.de/debian/dists/sid/Release', + ] + +taskfiles = [ + 'svn://svn.debian.org/blends/projects/med/trunk/debian-med/tasks/imaging', + 'svn://svn.debian.org/blends/projects/med/trunk/debian-med/tasks/imaging-dev', + 'svn://svn.debian.org/blends/projects/science/trunk/debian-science/tasks/neuroscience-cognitive', + ] + +dpa.setPkgFilterFromTaskFile(taskfiles) +dpa.pkgfilter += ['fsl-doc', 'fslview-doc', 'fsl-atlases', 'fsl-possum-data', + 'fsl-first-data', 'fsl-feeds'] + +dpa.importProspectivePkgsFromTaskFile(taskfiles[0]) + +for rurl in release_urls: + dpa.importRelease(rurl, force_update=False) + +dpa.save('db.db') + +dpa.writeSourcesLists() +writeRst(dpa) +writePkgsBy(dpa, 'maintainer', maintainer2email) -dpa = DebianPkgArchive( - [ - 'http://elrond/debian/dists/dapper/Release', -# 'http://elrond/debian/dists/etch/Release', - ]) -print dpa diff --git a/source/.templates/layout.html b/source/.templates/layout.html new file mode 100644 index 0000000..4e231dc --- /dev/null +++ b/source/.templates/layout.html @@ -0,0 +1,19 @@ +{% extends "!layout.html" %} + +{% block extrahead %} + +{% endblock %} + +{% block rootrellink %} +
  • Debian
  • +
  • Neuroscience Repository »
  • +
  • Package list
  • +{% endblock %} + +{% block sidebar1 %} +{{ sidebar() }} +{% endblock %} + +{% block sidebar2 %} +{{ sidebar() }} +{% endblock %} diff --git a/source/conf.py b/source/conf.py index aae7318..d850bb2 100644 --- a/source/conf.py +++ b/source/conf.py @@ -41,7 +41,7 @@ source_suffix = '.rst' master_doc = 'index' # General information about the project. -project = u'Debian Package Repository' +project = u'Debian Neuroscience Package Repository' copyright = u'2009, Michael Hanke' # The version info for the project you're documenting, acts as replacement for @@ -49,9 +49,9 @@ copyright = u'2009, Michael Hanke' # built documents. # # The short X.Y version. -version = '0.20080328' +version = '' # The full version, including alpha/beta/rc tags. -release = '0.20080328' +release = '' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -61,14 +61,14 @@ release = '0.20080328' # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. -exclude_trees = [] +exclude_trees = ['pkgs_addenum'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None @@ -98,14 +98,14 @@ html_style = 'default.css' # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +html_title = project # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +#html_short_title = "Debian Neuroscience" # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +html_logo = 'pics/debian-imaging.jpg' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 @@ -115,7 +115,7 @@ html_style = 'default.css' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['.static'] +html_static_path = ['static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. @@ -133,7 +133,7 @@ html_static_path = ['.static'] #html_additional_pages = {} # If false, no module index is generated. -#html_use_modindex = True +html_use_modindex = False # If false, no index is generated. #html_use_index = True @@ -142,7 +142,7 @@ html_static_path = ['.static'] #html_split_index = False # If true, the reST sources are included in the HTML build as _sources/. -#html_copy_source = True +html_copy_source = False # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the @@ -153,7 +153,7 @@ html_static_path = ['.static'] #html_file_suffix = '' # Output file base name for HTML help builder. -htmlhelp_basename = 'DebianPackageRepositorydoc' +htmlhelp_basename = 'debpkgrepo' # Options for LaTeX output diff --git a/source/gpg.rst b/source/gpg.rst new file mode 100644 index 0000000..43ec2d6 --- /dev/null +++ b/source/gpg.rst @@ -0,0 +1,33 @@ +.. _gpg_signatures: + + +Package authentication +====================== + + +When you start using this repository, you might get warning messages like this:: + + The following signatures couldn't be verified because + the public key is not available.` + +Or you will be asked questions like this over and over:: + + WARNING: The following packages cannot be authenticated! + ... + Install these packages without verification [y/N]? + +This is because your APT installation does not know the GPG key that is used to +sign the release files of this repository. Making APT happy again is easy: + +1. Get the key. Either download the `repository key from here + `_ + or fetch it from *subkeys.pgp.net*. + +2. Now feed the key into APT by invoking:: + + apt-key add #file# + + Where `#file#` has to be replaced with the location of the key file you just + downloaded. You need to have superuser-privileges to do this (either do it + as root or use sudo). + diff --git a/source/index.rst b/source/index.rst index 81064f2..50b233f 100644 --- a/source/index.rst +++ b/source/index.rst @@ -1,19 +1,78 @@ -.. Debian Package Repository documentation master file, created by sphinx-quickstart on Sat Mar 28 17:08:58 2009. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. +*********************************************** + Welcome to the Debian Neuroscience Repository +*********************************************** -Welcome to Debian Package Repository's documentation! -===================================================== +This repository provides mostly neuroscience-related packages to be +used on Debian systems (or a Debian-derivates like Ubuntu). It +contains both unofficial or prospective packages which are not +available from the main Debian archive, as well backported or simply +rebuilt packages also available elsewhere. -Contents: +This service is provided "as is". There is no guarantee that a package +works as expected, so use them at your own risk. They might kill your +system (although that is rather unlikely). You've been warned! -.. toctree:: - :maxdepth: 2 +An exhaustive list of available packages is provided by the Package +:ref:`genindex`. -Indices and tables -================== -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` +How to use this repository +========================== +The easiest way to use this repository is to download an APT-configuration +file. Simply click on the name of your target distribution/release and save the +downloaded file in the `/etc/apt/sources.list.d/` directory on your system +(depending on the browser, you might have to right-click and choose 'save as'). +Saving files in this directory will require superuser privileges, therefore you +should probably download the file into a temporary directory and subsequently +move it into `/etc/apt/sources.list.d/`. APT-configurations are available for +the following releases: + +.. include:: sources_lists + +Once this is done, you have to update the package index. Use your favorite +package manager, e.g. synaptic, adept, or whatever you like. In the terminal +you can use :command:`aptitude` to achieve the same:: + + sudo aptitude update + +Now, you can proceed to install packages, e.g.:: + + sudo aptitude install lipsia + +.. note:: + Not every package is available for all distributions/releases. For information + about which package version is available for which release and architecture, + please have a look at the corresponding package pages. + + +Package authentication +---------------------- + +When you start using this repository, you might get warning messages +like this:: + + The following signatures couldn't be verified because + the public key is not available.` + +Or you will be asked questions like this over and over:: + + WARNING: The following packages cannot be authenticated! + ... + Install these packages without verification [y/N]? + +This is because your APT installation initially does not know the GPG +key that is used to sign the release files of this repository. Making +APT happy again is easy: + +1. Get the key. Either download the `repository key from here + `_ + or fetch it from *subkeys.pgp.net*. + +2. Now feed the key into APT by invoking:: + + apt-key add #file# + + Where `#file#` has to be replaced with the location of the key file you just + downloaded. You need to have superuser-privileges to do this (either do it + as root or use sudo). diff --git a/source/pics/debian-imaging.jpg b/source/pics/debian-imaging.jpg new file mode 100644 index 0000000..bb1dcdf Binary files /dev/null and b/source/pics/debian-imaging.jpg differ diff --git a/source/pkgs_addenum/fsl.rst b/source/pkgs_addenum/fsl.rst new file mode 100644 index 0000000..440420f --- /dev/null +++ b/source/pkgs_addenum/fsl.rst @@ -0,0 +1,263 @@ +Related packages +================ + +Since FSL covers a very broad range of analysis techniques the suite is split +into a number of separate packages to allow a more fine-grained selection of +the functionality provided by FSL. The following related packages are available: + +:ref:`deb_fsl` + This packages provides the FSL binaries. This includes everything one needs + to run a basic fMRI analysis. However, it is recommend to also at least + install the :ref:`fsl-atlases ` package. + +:ref:`deb_fslview` + Almost everybody should install this package. + +:ref:`deb_fsl-doc` + Contains the FSL documentation in HTML format as included in the official FSL + sources. This package should be installed to use the online help capabilities + of FSL. + +:ref:`deb_fslview-doc` + The FSLView documentation in HTML format. This package is necessary for the + online help system of FSLView. + +:ref:`deb_fsl-atlases` + Contains the standard space brain templates and brain atlases. Almost + everybody should install this package. + +:ref:`deb_fsl-possum-data` + This package provides the templates for the MR sequence simulator POSSUM. + If one does not use POSSUM this package is not necessary. + +:ref:`deb_fsl-first-data` + This package provides model data for FIRST subcortical brain segmentation. + This package is almost 1GB! It is only required if one wants to use FIRST + or run the FSL FEEDS suite. + +:ref:`deb_fsl-feeds` + This package provides the `FSL Evaluation and Example Data Suite`_. This + package performs two functions -- it tests whether the FSL tools are working + properly and it provides example data to try running FSL on. Additionally the + :command:`fsl-selftest` command is avaliable. This is a little script that + runs all tests (or a selected one) in a temporary directory and reports the + results. A manpage is included. This package can be used to perform + `FSL benchmarks`_. + +.. _FSL Evaluation and Example Data Suite: http://www.fmrib.ox.ac.uk/fsl/fsl/feeds.html + + +Report bugs +=========== + +If you discover any bugs please report them. The best way to get quick and +professional help is to post to the `FSL mailing list`_. If you send a +bugreport please include detailed information about the problem. This should at +least be a description how the bug can be reproduced as well as information +concerning you environment (for example the operation system). You might also +want to have a look at the mailing list archive whether the problem has been +discovered before. + +.. _FSL mailing list: http://www.jiscmail.ac.uk/lists/fsl.html + +If you use the package on a Debian system (not Ubuntu) you can simply use the +:command:`reportbug` tool to send a bug report to the `Debian bug tracking +system`_. The bug tracker provides a public list of all reported `bugs of FSL`_ +and `bugs of FSLView`_ + +.. _bugs of FSL: http://bugs.debian.org/src:fsl +.. _bugs of FSLVIEW: http://bugs.debian.org/src:fslview +.. _Debian bug tracking system: http://bugs.debian.org + + +Additional information +====================== + +Since December 2007 the FSL package is officially part of the non-free +section of Debian. The latest package version will always be available +from http://packages.debian.org/sid/fsl in the Debian archive. +However, this only applies to the packages of the FSL and FSLView +binaries. FSL data packages (first, possum, atlases and feeds) are not +yet official Debian packages and will be available from here, as well +as backports for Debian and recent Ubuntu releases. + +.. note:: + + Please be sure to `read the information`_ about the differences + between the Debian packaging and the official FSL releases. + +.. _read the information: http://git.debian.org/?p=pkg-exppsy/fsl.git;a=blob;f=debian/README.Debian;hb=debian" + + +Usage information +----------------- + +FSL requires a config file to be sourced before it can be used. For the Debian +packages this config file is in `/etc/fsl/fsl.sh`. Open a terminal where you +want to start FSL and source it like this:: + + . /etc/fsl/fsl.sh + +Note the dot at the beginning. If you want to have this done automatically, you +could add those line to e.g. your `$HOME/.bashrc` file (or a corresponding +config file of another POSIX-compatible shell). Once you have done that, you +can start using FSL. + + + +Upgrading from FSL 3.x +---------------------- + +The FSL configuration file has changed significantly. Please be sure to +(re)source it. + +.. note:: + + There seem to be some remaining incompatibilities of FSL scripts with the + *dash* shell. This is the default shell on Ubuntu systems. If you discover any + problems, please make sure to read `a related posting on the FSL mailing + list`_. + +.. _a related posting on the FSL mailing list: http://www.jiscmail.ac.uk/cgi-bin/webadmin?A2=ind0709&L=fsl&T=0&F=&S=&P=19638 + + +Building binary packages yourself +--------------------------------- + +If no binary packages for your distribution/platform are available, you can +still build your own. All you need to do is to add this line to your +`/etc/apt/sources.list`:: + + deb-src http://apsy.gse.uni-magdeburg.de/debian #distro# main non-free + +Choose the value of `#distro#` like described in the binary package section. Be +sure to update your package list after that (Remember: :command:`aptitude +update`). To build FSL packages, do this (no superuser privileges required, +but you might have to install `apt-src` first):: + + apt-src install fsl + apt-src build fsl + +Be patient as this will take some time. All packages will be created in the +directory where the commands are executed. Please note, that you might need to +download or even compile other packages that FSL depends on. + +If you are done, you can install the packages with:: + + dpkg -i fsl*.deb + +After you have repeated this procedure for the `fslview` source package, you +should be ready to use FSL. + +Advanced: Arch-dependend compiler flags +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you have some experience with compiler flags and you also care for speed, +you might want to have the FSL binaries optimized for every detail of your +platform. To take advantage of all special features of your favorite processor +you can specify custom compiler flags that are used when building binary +packages from the source package of FSL. To achieve this, simply define them in +a variable called :envvar:`DEB_ARCH_OPT_FLAGS` in the environment. In its +simplest form, building an optimized package could be done like this:: + + DEB_ARCH_OPT_FLAGS="-march=opteron" apt-src build fsl + +Note that not all flags are available with every compiler. The above example +does not work with the standard compiler of Debian sarge (gcc 3.3) and you +obviously need an AMD Opteron processor. + + +FSL benchmarks +-------------- + +Here is a list of some benchmarking results that demonstrate how fast FSL runs +on several different platforms and distributions. The :command:`fsl-feeds` +package is used for benchmarking FSL. The listed time for a complete +fsl-selftest run is the user time as reported by :command:`time -p`. If you are +also interested in benchmarking results of the non-Debian FSL distribution, you +can visit the `FSL-FEEDS timing website`_. + +.. _FSL-FEEDS timing website: http://www.fmrib.ox.ac.uk/fsl/feeds/doc/timings.html + ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ +|Distribution|CPU |Arch. |Memory|Compiler |Flags |Version |Time|Submitted | ++============+===============+======+======+=========+==================+========+====+==============+ +|Ubuntu |Intel Core 2 |x86_64|4GB |gcc 4.1.2| |4.0.2-1 |1377| Jiří Keller | +|gutsy |Quad Q6700 3Ghz| | | | | | | | ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ +|Debian sid |2x Dual Opteron|amd64 |12GB |gcc 4.1.1| |3.3.7-2 |1560|Yaroslav | +| |275 2.2 Ghz | | | | | | |Halchenko | ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ +|Ubuntu edgy |2x Dual Opteron|i686 |3GB |gcc 4.1.2| |3.3.8-1 |2096|Jeff | +| |275 2.2 GHz | | | | | | |Stevenson | ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ +|Debian lenny|Intel Core2 |i686 |2GB |gcc 4.3.1| |4.1.0-1 |2108|Michael | +| |E8400 3Ghz | | | | | | |Hanke | ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ +|Debian etch |Quad Opteron |amd64 |32GB |gcc 3.4.6|-O3 -m64 |3.2b-4 |2152|Antti | +| |850 2.4 GHz | | | |-march=opteron | | |Korvenoja | +| | | | | |-mfpmath=sse | | | | +| | | | | |-msse2 | | | | +| | | | | |-ffast-math | | | | +| | | | | |-funroll-all-loops| | | | +| | | | | |-fpeel-loops | | | | +| | | | | |-ftracer | | | | +| | | | | |-funswitch-loops | | | | +| | | | | |-funit-at-a-time | | | | ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ +|Debian lenny|Athlon X2 |amd64 |4GB |gcc 4.3 | |4.0.4-1 |2268|Petr | +| |4800 2.5 GHz | | | | | | |HluÅ¡tík | ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ +|Ubuntu |Quad Core2 |amd64 |4GB |gcc 4.1 | |4.0-1 |2500|Vincent | +|feisty |2.4 GHz | | | | | | |Ferrera | ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ +|Debian etch |Quad Opteron |amd64 |32GB |gcc 4.0.2|-O3 -m64 |3.2b-4 |2619|Antti | +| |850 2.4 GHz | | | |-march=opteron | | |Korvenoja | +| | | | | |-mfpmath=sse | | | | +| | | | | |-msse2 | | | | +| | | | | |-ffast-math | | | | +| | | | | |-funroll-all-loops| | | | +| | | | | |-fpeel-loops | | | | +| | | | | |-ftracer | | | | +| | | | | |-funswitch-loops | | | | +| | | | | |-funit-at-a-time | | | | ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ +|Debian etch |Quad Opteron |amd64 |32GB |gcc 4.0.2|-O3 |3.2b-4 |2652|Antti | +| |850 2.4 GHz | | | | | | |Korvenoja | ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ +|Debian etch |2x Opteron |amd64 |12GB |gcc 4.1.2| |4.0.2-3 |2847|Michael | +| |270 2.2 GHz | | | | | | |Hanke | ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ +|Ubuntu gutsy|Athlon 64X2 |amd64 |2GB |gcc 4.1.3| |4.0.1 |3605|Nicholas P. | +| |5200+ 2.6 GHz | | | | | | |Holmes | ++------------+---------------+------+------+---------+------------------+--------+----+--------------+ + +.. Template + | | | | | | | | | | + | | | | | | | | | | + +------------+---------------+------+------+---------+------------------+--------+----+--------------+ + + +If you want to have your system included in this list, please send an email +with the logfile of the benchmark. You can run the benchmark (and create the +logfile) by running (fsl-feeds-3.2beta-3 or higher is required):: + + (time -p fsl-selftest -c) > benchmark.log 2>&1 + +And include the following information in your message: + +* Which distribution are you using? + +* CPU-type (as specific as possible) + +* How much physical memory has the machine? If you don't know this, send the + output of:: + + free | head -n2 | tail -n1 | awk '{print $2}' - + +* If you compiled the binary packages yourself, which compiler did you use? + (hint: `gcc --version`) + +* Which custom compiler flags did you use when building the package (if any)? + +* Which version of the Debian FSL package was used? diff --git a/source/setup.rst b/source/setup.rst new file mode 100644 index 0000000..1c6e2e5 --- /dev/null +++ b/source/setup.rst @@ -0,0 +1,31 @@ +The packages are available through an APT repository You can either browse the +archive or add this line to your `/etc/apt/sources.list`:: + + deb http://apsy.gse.uni-magdeburg.de/debian #distro# #section# + +Replace `#distro#` with ..., depending on which distribution you are using. +Note that not every package is available for all distributions. You need to +replace #section# with the value(s) corresponding to the package(s) you are +interested in (get it from the table below). Multiple sections are allowed. +Please, do not forget to update your package index, e.g. by running apt-get +update. If no binary packages are available for your distribution, you can +still download the source packages. Simply add this to your +`/etc/apt/sources.list` (you have to replace #distro# and #section# as +described above):: + + deb-src http://apsy.gse.uni-magdeburg.de/debian #distro# #section# + +After the usual apt-get update you can get and build the packages with e.g. +apt-src or whatever you like. To build e.g. dicomnifti you can do this: apt-src +install dicomnifti + +This downloads the sources and installs all build-dependencies if necessary. +You need root-privileges to install the build-dependencies, so apt-src might +ask for a password. Now invoke: apt-src build dicomnifti + +This will build all binary packages which should appear in the current +directory (if everything worked well). You can now install the packages with +dpkg. + + +