X-Git-Url: https://git.donarmstrong.com/?a=blobdiff_plain;f=neurodebian%2Fdde.py;h=ab223abd446d07bc853c7674e80037bf564593a9;hb=0a4391ea43db4141e6126b411346678390b54792;hp=50eefedced5e36901bdc63e4c20a1d8f52ada8ae;hpb=1008e02cceb981e9c42b94596f7dbc3ddd0cb282;p=neurodebian.git diff --git a/neurodebian/dde.py b/neurodebian/dde.py index 50eefed..ab223ab 100755 --- a/neurodebian/dde.py +++ b/neurodebian/dde.py @@ -16,6 +16,7 @@ from ConfigParser import SafeConfigParser from optparse import OptionParser, Option, OptionGroup, OptionConflictError import sys import os +import copy import shutil import urllib2 import urllib @@ -134,6 +135,8 @@ def add_pkgfromtaskfile(db, urls): for stanza in deb822.Packages.iter_paragraphs(fh): if stanza.has_key('Depends'): pkg = stanza['Depends'] + elif stanza.has_key('Recommends'): + pkg = stanza['Recommends'] elif stanza.has_key('Suggests'): pkg = stanza['Suggests'] else: @@ -154,7 +157,7 @@ def add_pkgfromtaskfile(db, urls): def get_emptydbentry(): return {'main': {}} -def import_blendstask(db, url): +def import_blendstask(cfg, db, url): cache = AptListsCache() fh = cache.get(url) task_name = None @@ -177,6 +180,8 @@ def import_blendstask(db, url): if st.has_key('Depends'): pkg = st['Depends'] + elif st.has_key('Recommends'): + pkg = st['Recommends'] elif st.has_key('Suggests'): pkg = st['Suggests'] else: @@ -219,7 +224,12 @@ def import_blendstask(db, url): # Publications if st.has_key('Published-Title'): - pub = {'title': st['Published-Title']} + title = st['Published-Title'] + if title[-1] == '.': + # trip trailing dot -- added later + pub = {'title': title[:-1]} + else: + pub = {'title': title} if st.has_key('Published-Authors'): pub['authors'] = st['Published-Authors'] if st.has_key('Published-Year'): @@ -232,7 +242,7 @@ def import_blendstask(db, url): pub['doi'] = st['Published-DOI'] # need at least one URL if not pub.has_key('url'): - pub['url'] = st['Published-DOI'] + pub['url'] = "http://dx.doi.org/%s" % st['Published-DOI'] db[p]['main']['publication'] = pub @@ -252,6 +262,16 @@ def import_blendstask(db, url): # just add this tasks name and id db[p]['blends']['tasks'].append(task) + # handle pkg name aliases + if p in cfg.options('blend package aliases'): + src_entry = db[p].copy() + # remove original entry + del db[p] + # copy the entry into all aliases + for alias in cfg.get('blend package aliases', p).split(): + print "Aliasing %s to %s" % (p, alias) + db[alias] = copy.deepcopy(src_entry) + return db @@ -414,8 +434,55 @@ def dde_get(url, fail=False): print "NOINFO:", url return False except json.ReadException, e: - raise RuntimeError( - "No content in response -- DDE<->UDD connection probably down") + print "UDD-DOWN?:", url, type(e) + return False + + +def nitrc_get(spec, fail=False): + nitrc_url = 'http://www.nitrc.org/export/site/projects.json.php' + try: + # change into this from python 2.6 on + #data = json.loads(urllib2.urlopen(nitrc_url + '?spec=%s' % spec).read()) + data = json.read(urllib2.urlopen(nitrc_url + '?spec=%s' % spec).read()) + print "NITRC-SUCCESS:", spec + except urllib2.HTTPError, e: + print "NITRC-NOINFO:", spec, type(e) + return False + except urllib2.URLError, e: + print "NITRC-URLERROR:", spec, type(e) + if fail: + print "Permanant failure" + return False + print "Try again after 30 seconds..." + time.sleep(30) + return nitrc_get(spec, fail=True) + return data + + +def parse_nitrc(data): + if data is False: + return None + # simplify -- there is only one project in the data + project = data['projects'][0] + nitrc_filtered = {'downloads': 0, + 'id': project['id']} + for pkg in project['packages']: + for release in pkg['releases']: + for file in release['files']: + nitrc_filtered['downloads'] += file['download_count'] + return nitrc_filtered + + +def import_nitrc(cfg, db): + for p in db.keys(): + if not cfg.has_option("nitrc ids", p): + continue + nitrc_spec = cfg.get("nitrc ids", p) + nitrc_data = nitrc_get(nitrc_spec) + nitrc_excerpt = parse_nitrc(nitrc_data) + if not nitrc_excerpt is None: + db[p]['nitrc'] = nitrc_excerpt + return db def import_dde(cfg, db): @@ -442,7 +509,7 @@ def import_dde(cfg, db): if q.has_key('popcon'): db[p]['main']['debian_popcon'] = q['popcon'] # if we have debian, need to get ubuntu - q = dde_get(query_url + "/packages/prio-ubuntu-karmic/%s" % p) + q = dde_get(query_url + "/packages/prio-ubuntu-natty/%s" % p) if q and q.has_key('popcon'): db[p]['main']['ubuntu_popcon'] = q['popcon'] else: @@ -484,6 +551,24 @@ def import_dde(cfg, db): return db +def assure_unicode(s): + """Assure that argument is unicode + + Necessary if strings are not carrying out Pythonish 'u' prefix to + signal UTF8 strings, but are in fact UTF8 + """ + if type(s) is unicode: + return s + elif type(s) is str: + # attempt regular unicode call and if fails -- just decode it + # into utf8 + try: + return unicode(s) + except UnicodeDecodeError, e: + return s.decode('utf8') + else: + return assure_unicode(str(s)) + def convert_longdescr(ld): ld = ld.replace('% ', '%% ') @@ -502,7 +587,8 @@ def convert_longdescr(ld): return ld -def generate_pkgpage(pkg, cfg, db, template, addenum_dir): +def generate_pkgpage(pkg, cfg, db, template, addenum_dir, extracts_dir): + print pkg # local binding for ease of use pkgdb = db[pkg] # do nothing if there is not at least the very basic stuff @@ -512,13 +598,21 @@ def generate_pkgpage(pkg, cfg, db, template, addenum_dir): underline = '*' * (len(title) + 2) title = '%s\n %s\n%s' % (underline, title, underline) + ex_dir = None + if 'sv' in pkgdb['main']: + ex_dir = os.path.join(extracts_dir, pkgdb['main']['sv'].split()[0]) + if not os.path.exists(ex_dir): + ex_dir = None page = template.render( pkg=pkg, title=title, - long_description=convert_longdescr(pkgdb['main']['long_description']), + long_description=convert_longdescr( + assure_unicode(pkgdb['main']['long_description'])), cfg=cfg, db=pkgdb, - fulldb=db) + fulldb=db, + extracts_dir=ex_dir, + op=os.path) # the following can be replaced by something like # {% include "sidebar.html" ignore missing %} # in the template whenever jinja 2.2 becomes available @@ -546,16 +640,20 @@ def write_sourceslist(jinja_env, cfg, outdir): repos = {} for release in cfg.options('release codenames'): + if release == 'data': + # no seperate list for the data archive + continue transrel = trans_codename(release, cfg) repos[transrel] = [] for mirror in cfg.options('mirrors'): listname = 'neurodebian.%s.%s.sources.list' % (release, mirror) repos[transrel].append((mirror, listname)) lf = open(os.path.join(outdir, '_static', listname), 'w') - aptcfg = '%s %s main contrib non-free\n' % (cfg.get('mirrors', mirror), - release) - lf.write('deb %s' % aptcfg) - lf.write('deb-src %s' % aptcfg) + for rel in ('data', release): + aptcfg = '%s %s main contrib non-free\n' % (cfg.get('mirrors', mirror), + rel) + lf.write('deb %s' % aptcfg) + lf.write('#deb-src %s' % aptcfg) lf.close() srclist_template = jinja_env.get_template('sources_lists.rst') @@ -564,20 +662,32 @@ def write_sourceslist(jinja_env, cfg, outdir): sl.close() -def write_pkgpages(jinja_env, cfg, db, outdir, addenum_dir): +def write_pkgpages(jinja_env, cfg, db, outdir, addenum_dir, extracts_dir): create_dir(outdir) create_dir(os.path.join(outdir, 'pkgs')) # generate the TOC with all packages toc_template = jinja_env.get_template('pkgs_toc.rst') toc = codecs.open(os.path.join(outdir, 'pkgs.rst'), 'w', 'utf-8') - toc.write(toc_template.render(pkgs=db.keys())) + # this is a fragile test + toc.write(toc_template.render( + pkgs=[k for k in db.keys() + if not ('Datasets', 'neurodebian-data') in db[k]])) + toc.close() + # and now only for dataset packages + toc_template = jinja_env.get_template('datasets_toc.rst') + toc = codecs.open(os.path.join(outdir, 'datasets.rst'), 'w', 'utf-8') + # this is a fragile test + toc.write(toc_template.render( + pkgs=[k for k in db.keys() + if ('Datasets', 'neurodebian-data') in db[k]])) toc.close() + # and now each individual package page pkg_template = jinja_env.get_template('pkg.rst') for p in db.keys(): - page = generate_pkgpage(p, cfg, db, pkg_template, addenum_dir) + page = generate_pkgpage(p, cfg, db, pkg_template, addenum_dir, extracts_dir) # when no page is available skip this package if page is None: continue @@ -612,6 +722,9 @@ def prepOptParser(op): op.add_option("--pkgaddenum", action="store", dest="addenum_dir", type="string", default=None, help="None") + op.add_option("--extracts", action="store", dest="extracts_dir", + type="string", default=None, help="None") + def main(): op = OptionParser(version="%prog 0.0.2") @@ -652,7 +765,7 @@ def main(): # get info from task files if cfg.has_option('packages', 'prospective'): for url in cfg.get('packages', 'prospective').split(): - db = import_blendstask(db, url) + db = import_blendstask(cfg, db, url) # parse NeuroDebian repository if cfg.has_option('neurodebian', 'releases'): @@ -661,6 +774,8 @@ def main(): # collect package information from DDE db = import_dde(cfg, db) + # get info from NITRC + db = import_nitrc(cfg, db) # store the new DB store_db(db, opts.db) # and be done @@ -673,7 +788,7 @@ def main(): jinja_env = Environment(loader=PackageLoader('neurodebian', 'templates')) # generate package pages and TOC and write them to files - write_pkgpages(jinja_env, cfg, db, opts.outdir, opts.addenum_dir) + write_pkgpages(jinja_env, cfg, db, opts.outdir, opts.addenum_dir, opts.extracts_dir) write_sourceslist(jinja_env, cfg, opts.outdir)