- binary-with-bad-dynamic-table
- usr-share-doc-symlink-without-dependency
- mknod-in-maintainer-script
+ - package-contains-info-dir-file
error:
+ - wrong-file-owner-uid-or-gid
+ - bad-relation
+ - FSSTND-dir-in-usr
- binary-in-etc
- missing-dependency-on-perlapi
- copyright-lists-upstream-authors-with-dh_make-boilerplate
generate
generate Contents-$arch.gz files
- bootstrap
- scan the debs in the existing pool and load contents in the the database
+ bootstrap_bin
+ scan the debs in the existing pool and load contents into the bin_contents table
cruft
remove files/paths which are no longer referenced by a binary
s.commit()
+ def bootstrap_bin(self):
+ """
+ scan the existing debs in the pool to populate the bin_contents table
+ """
+ pooldir = Config()[ 'Dir::Pool' ]
+
+ s = DBConn().session()
+
+ # for binary in s.query(DBBinary).all() ):
+ binary = s.query(DBBinary).first()
+ if binary:
+ filename = binary.poolfile.filename
+ # Check for existing contents
+ existingq = s.execute( "select 1 from bin_contents where binary_id=:id", {'id':binary.binary_id} );
+ if existingq.fetchone():
+ log.debug( "already imported: %s" % (filename))
+ else:
+ # We don't have existing contents so import them
+ log.debug( "scanning: %s" % (filename) )
+
+ debfile = os.path.join(pooldir, filename)
+ if os.path.exists(debfile):
+ Binary(debfile, self.reject).scan_package(binary.binary_id, True)
+ else:
+ log.error("missing .deb: %s" % filename)
+
+
+
def bootstrap(self):
"""
scan the existing debs in the pool to populate the contents database tables
]
commands = {'generate' : Contents.generate,
- 'bootstrap' : Contents.bootstrap,
+ 'bootstrap_bin' : Contents.bootstrap_bin,
'cruft' : Contents.cruft,
}
# Set up checks based on mode
if Options["Mode"] == "daily":
- checks = [ "nbs", "nviu", "obsolete source" ]
+ checks = [ "nbs", "nviu", "nvit", "obsolete source" ]
elif Options["Mode"] == "full":
- checks = [ "nbs", "nviu", "obsolete source", "nfu", "dubious nbs", "bnb", "bms", "anais" ]
+ checks = [ "nbs", "nviu", "nvit", "obsolete source", "nfu", "dubious nbs", "bnb", "bms", "anais" ]
else:
utils.warn("%s is not a recognised mode - only 'full' or 'daily' are understood." % (Options["Mode"]))
usage(1)
if "nviu" in checks:
do_newer_version('unstable', 'experimental', 'NVIU', session)
+ if "nvit" in checks:
+ do_newer_version('testing', 'testing-proposed-updates', 'NVIT', session)
+
if "nbs" in checks:
do_nbs(real_nbs)
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+Adding tables for key-based ACLs and blocks
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009 Mark Hymers <mhy@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+
+def do_update(self):
+ print "Adding tables for handling key-based ACLs and upload blocks"
+
+ try:
+ c = self.db.cursor()
+
+ # Fix up some older table permissions
+ c.execute("GRANT SELECT ON src_format TO public")
+ c.execute("GRANT ALL ON src_format TO ftpmaster")
+ c.execute("GRANT USAGE ON src_format_id_seq TO ftpmaster")
+
+ c.execute("GRANT SELECT ON suite_src_formats TO public")
+ c.execute("GRANT ALL ON suite_src_formats TO ftpmaster")
+
+ # Source ACLs table
+ print "Source ACLs table"
+ c.execute("""
+ CREATE TABLE source_acl (
+ id SERIAL PRIMARY KEY,
+ access_level TEXT UNIQUE NOT NULL
+ )
+ """)
+
+ ## Can upload all packages
+ c.execute("INSERT INTO source_acl (access_level) VALUES ('full')")
+ ## Can upload only packages marked as DM upload allowed
+ c.execute("INSERT INTO source_acl (access_level) VALUES ('dm')")
+
+ c.execute("GRANT SELECT ON source_acl TO public")
+ c.execute("GRANT ALL ON source_acl TO ftpmaster")
+ c.execute("GRANT USAGE ON source_acl_id_seq TO ftpmaster")
+
+ # Binary ACLs table
+ print "Binary ACLs table"
+ c.execute("""
+ CREATE TABLE binary_acl (
+ id SERIAL PRIMARY KEY,
+ access_level TEXT UNIQUE NOT NULL
+ )
+ """)
+
+ ## Can upload any architectures of binary packages
+ c.execute("INSERT INTO binary_acl (access_level) VALUES ('full')")
+ ## Can upload debs where architectures are based on the map table binary_acl_map
+ c.execute("INSERT INTO binary_acl (access_level) VALUES ('map')")
+
+ c.execute("GRANT SELECT ON binary_acl TO public")
+ c.execute("GRANT ALL ON binary_acl TO ftpmaster")
+ c.execute("GRANT USAGE ON binary_acl_id_seq TO ftpmaster")
+
+ # This is only used if binary_acl is 2 for the fingerprint concerned
+ c.execute("""
+ CREATE TABLE binary_acl_map (
+ id SERIAL PRIMARY KEY,
+ fingerprint_id INT4 REFERENCES fingerprint (id) NOT NULL,
+ architecture_id INT4 REFERENCES architecture (id) NOT NULL,
+
+ UNIQUE (fingerprint_id, architecture_id)
+ )""")
+
+ c.execute("GRANT SELECT ON binary_acl_map TO public")
+ c.execute("GRANT ALL ON binary_acl_map TO ftpmaster")
+ c.execute("GRANT USAGE ON binary_acl_map_id_seq TO ftpmaster")
+
+ ## NULL means no source upload access (i.e. any upload containing source
+ ## will be rejected)
+ c.execute("ALTER TABLE fingerprint ADD COLUMN source_acl_id INT4 REFERENCES source_acl(id) DEFAULT NULL")
+
+ ## NULL means no binary upload access
+ c.execute("ALTER TABLE fingerprint ADD COLUMN binary_acl_id INT4 REFERENCES binary_acl(id) DEFAULT NULL")
+
+ ## TRUE here means that if the person doesn't have binary upload permissions for
+ ## an architecture, we'll reject the .changes. FALSE means that we'll simply
+ ## dispose of those particular binaries
+ c.execute("ALTER TABLE fingerprint ADD COLUMN binary_reject BOOLEAN NOT NULL DEFAULT TRUE")
+
+ # Blockage table (replaces the hard coded stuff we used to have in extensions)
+ print "Adding blockage table"
+ c.execute("""
+ CREATE TABLE upload_blocks (
+ id SERIAL PRIMARY KEY,
+ source TEXT NOT NULL,
+ version TEXT DEFAULT NULL,
+ fingerprint_id INT4 REFERENCES fingerprint (id),
+ uid_id INT4 REFERENCES uid (id),
+ reason TEXT NOT NULL,
+
+ CHECK (fingerprint_id IS NOT NULL OR uid_id IS NOT NULL)
+ )""")
+
+ c.execute("GRANT SELECT ON upload_blocks TO public")
+ c.execute("GRANT ALL ON upload_blocks TO ftpmaster")
+ c.execute("GRANT USAGE ON upload_blocks_id_seq TO ftpmaster")
+
+ c.execute("ALTER TABLE keyrings ADD COLUMN default_source_acl_id INT4 REFERENCES source_acl (id) DEFAULT NULL")
+ c.execute("ALTER TABLE keyrings ADD COLUMN default_binary_acl_id INT4 REFERENCES binary_acl (id) DEFAULT NULL")
+ c.execute("ALTER TABLE keyrings ADD COLUMN default_binary_reject BOOLEAN NOT NULL DEFAULT TRUE")
+ # Set up keyring priorities
+ c.execute("ALTER TABLE keyrings ADD COLUMN priority INT4 NOT NULL DEFAULT 100")
+ # And then we don't need the DM stuff any more
+ c.execute("ALTER TABLE keyrings DROP COLUMN debian_maintainer")
+
+ # Default ACLs for keyrings
+ c.execute("""
+ CREATE TABLE keyring_acl_map (
+ id SERIAL PRIMARY KEY,
+ keyring_id INT4 REFERENCES keyrings (id) NOT NULL,
+ architecture_id INT4 REFERENCES architecture (id) NOT NULL,
+
+ UNIQUE (keyring_id, architecture_id)
+ )""")
+
+ c.execute("GRANT SELECT ON keyring_acl_map TO public")
+ c.execute("GRANT ALL ON keyring_acl_map TO ftpmaster")
+ c.execute("GRANT USAGE ON keyring_acl_map_id_seq TO ftpmaster")
+
+ # Set up some default stuff; default to old behaviour
+ print "Setting up some defaults"
+
+ c.execute("""UPDATE keyrings SET default_source_acl_id = (SELECT id FROM source_acl WHERE access_level = 'full'),
+ default_binary_acl_id = (SELECT id FROM binary_acl WHERE access_level = 'full')""")
+
+ c.execute("""UPDATE keyrings SET default_source_acl_id = (SELECT id FROM source_acl WHERE access_level = 'dm'),
+ default_binary_acl_id = (SELECT id FROM binary_acl WHERE access_level = 'full')
+ WHERE name = 'debian-maintainers.gpg'""")
+
+ c.execute("""UPDATE keyrings SET priority = 90 WHERE name = 'debian-maintainers.gpg'""")
+
+ # Initialize the existing keys
+ c.execute("""UPDATE fingerprint SET binary_acl_id = (SELECT default_binary_acl_id FROM keyrings
+ WHERE keyrings.id = fingerprint.keyring)""")
+
+ c.execute("""UPDATE fingerprint SET source_acl_id = (SELECT default_source_acl_id FROM keyrings
+ WHERE keyrings.id = fingerprint.keyring)""")
+
+ print "Updating config version"
+ c.execute("UPDATE config SET value = '16' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, "Unable to apply ACLs update (16), rollback issued. Error message : %s" % (str(msg))
--- /dev/null
+#!/usr/bin/env python
+# coding=utf8
+
+"""
+adding a bin_contents table to hold lists of files contained in .debs and .udebs
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009 Mike O'Connor <stew@debian.org>
+@license: GNU General Public License version 2 or later
+"""
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+################################################################################
+
+
+################################################################################
+
+import psycopg2
+import time
+from daklib.dak_exceptions import DBUpdateError
+
+################################################################################
+
+def do_update(self):
+
+ print "adding a bin_contents table to hold lists of files contained in .debs and .udebs"
+
+ try:
+ c = self.db.cursor()
+ c.execute("""CREATE TABLE bin_contents (
+ file text,
+ binary_id integer,
+ UNIQUE(file,binary_id))""" )
+
+ c.execute("""ALTER TABLE ONLY bin_contents
+ ADD CONSTRAINT bin_contents_bin_fkey
+ FOREIGN KEY (binary_id) REFERENCES binaries(id)
+ ON DELETE CASCADE;""")
+
+ c.execute("""CREATE INDEX ind_bin_contents_binary ON bin_contents(binary_id);""" )
+
+ c.execute("UPDATE config SET value = '17' WHERE name = 'db_revision'")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, "Unable to apply process-new update 17, rollback issued. Error message : %s" % (str(msg))
+
+
+
+++ /dev/null
-#!/usr/bin/env python
-# coding=utf8
-
-"""
-Adding table to get rid of queue/done checks
-
-@contact: Debian FTP Master <ftpmaster@debian.org>
-@copyright: 2009 Joerg Jaspert <joerg@debian.org>
-@license: GNU General Public License version 2 or later
-"""
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-
-################################################################################
-
-
-################################################################################
-
-import psycopg2
-import time
-import os
-import datetime
-from daklib.dak_exceptions import DBUpdateError, InvalidDscError, ChangesUnicodeError
-from daklib.config import Config
-from daklib.utils import parse_changes, warn, gpgv_get_status_output, process_gpgv_output
-
-################################################################################
-
-def check_signature (sig_filename, data_filename=""):
- keyrings = [
- "/home/joerg/keyring/keyrings/debian-keyring.gpg",
- "/home/joerg/keyring/keyrings/debian-keyring.pgp",
- "/home/joerg/keyring/keyrings/debian-maintainers.gpg",
- "/home/joerg/keyring/keyrings/debian-role-keys.gpg",
- "/home/joerg/keyring/keyrings/emeritus-keyring.pgp",
- "/home/joerg/keyring/keyrings/emeritus-keyring.gpg",
- "/home/joerg/keyring/keyrings/removed-keys.gpg",
- "/home/joerg/keyring/keyrings/removed-keys.pgp"
- ]
-
- keyringargs = " ".join(["--keyring %s" % x for x in keyrings ])
-
- # Build the command line
- status_read, status_write = os.pipe()
- cmd = "gpgv --status-fd %s %s %s" % (status_write, keyringargs, sig_filename)
-
- # Invoke gpgv on the file
- (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
-
- # Process the status-fd output
- (keywords, internal_error) = process_gpgv_output(status)
-
- # If we failed to parse the status-fd output, let's just whine and bail now
- if internal_error:
- warn("Couldn't parse signature")
- return None
-
- # usually one would check for bad things here. We, however, do not care.
-
- # Next check gpgv exited with a zero return code
- if exit_status:
- warn("Couldn't parse signature")
- return None
-
- # Sanity check the good stuff we expect
- if not keywords.has_key("VALIDSIG"):
- warn("Couldn't parse signature")
- else:
- args = keywords["VALIDSIG"]
- if len(args) < 1:
- warn("Couldn't parse signature")
- else:
- fingerprint = args[0]
-
- return fingerprint
-
-################################################################################
-
-def do_update(self):
- print "Adding known_changes table"
-
- try:
- c = self.db.cursor()
- c.execute("""
- CREATE TABLE known_changes (
- id SERIAL PRIMARY KEY,
- changesname TEXT NOT NULL,
- seen TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(),
- source TEXT NOT NULL,
- binaries TEXT NOT NULL,
- architecture TEXT NOT NULL,
- version TEXT NOT NULL,
- distribution TEXT NOT NULL,
- urgency TEXT NOT NULL,
- maintainer TEXT NOT NULL,
- fingerprint TEXT NOT NULL,
- changedby TEXT NOT NULL,
- date TEXT NOT NULL,
- UNIQUE (changesname)
- )
- """)
- c.execute("CREATE INDEX changesname_ind ON known_changes(changesname)")
- c.execute("CREATE INDEX changestimestamp_ind ON known_changes(seen)")
- c.execute("CREATE INDEX changessource_ind ON known_changes(source)")
- c.execute("CREATE INDEX changesdistribution_ind ON known_changes(distribution)")
- c.execute("CREATE INDEX changesurgency_ind ON known_changes(urgency)")
-
- print "Done. Now looking for old changes files"
- count = 0
- failure = 0
- cnf = Config()
- for directory in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
- checkdir = cnf["Dir::Queue::%s" % (directory) ]
- if os.path.exists(checkdir):
- print "Looking into %s" % (checkdir)
- for dirpath, dirnames, filenames in os.walk(checkdir, topdown=False):
- if not filenames:
- # Empty directory (or only subdirectories), next
- continue
- for changesfile in filenames:
- if not changesfile.endswith(".changes"):
- # Only interested in changes files.
- continue
- try:
- count += 1
- print "Directory %s, file %7d, failures %3d. (%s)" % (dirpath[-10:], count, failure, changesfile)
- changes = Changes()
- changes.changes_file = changesfile
- changesfile = os.path.join(dirpath, changesfile)
- changes.changes = parse_changes(changesfile, signing_rules=-1)
- changes.changes["fingerprint"], = check_signature(changesfile)
- changes.add_known_changes(directory)
- except InvalidDscError, line:
- warn("syntax error in .dsc file '%s', line %s." % (f, line))
- failure += 1
- except ChangesUnicodeError:
- warn("found invalid changes file, not properly utf-8 encoded")
- failure += 1
-
-
- c.execute("GRANT ALL ON known_changes TO ftpmaster;")
- c.execute("GRANT SELECT ON known_changes TO public;")
-
- c.execute("UPDATE config SET value = '20' WHERE name = 'db_revision'")
- self.db.commit()
-
- except psycopg2.ProgrammingError, msg:
- self.db.rollback()
- raise DBUpdateError, "Unable to apply source format update 15, rollback issued. Error message : %s" % (str(msg))
""" Imports a keyring into the database """
# Copyright (C) 2007 Anthony Towns <aj@erisian.com.au>
+# Copyright (C) 2009 Mark Hymers <mhy@debian.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
################################################################################
import sys, os, re
-import apt_pkg, ldap, email.Utils
+import apt_pkg, ldap
from daklib.config import Config
from daklib.dbconn import *
-
# Globals
Options = None
for (keyid, uid, name) in q.fetchall():
byname[uid] = (keyid, name)
byid[keyid] = (uid, name)
+
return (byname, byid)
def get_fingerprint_info(session):
################################################################################
-def get_ldap_name(entry):
- name = []
- for k in ["cn", "mn", "sn"]:
- ret = entry.get(k)
- if ret and ret[0] != "" and ret[0] != "-":
- name.append(ret[0])
- return " ".join(name)
-
-################################################################################
-
-class Keyring(object):
- gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
- " --with-colons --fingerprint --fingerprint"
- keys = {}
- fpr_lookup = {}
-
- def de_escape_gpg_str(self, str):
- esclist = re.split(r'(\\x..)', str)
- for x in range(1,len(esclist),2):
- esclist[x] = "%c" % (int(esclist[x][2:],16))
- return "".join(esclist)
-
- def __init__(self, keyring):
- self.cnf = Config()
- k = os.popen(self.gpg_invocation % keyring, "r")
- keys = self.keys
- key = None
- fpr_lookup = self.fpr_lookup
- signingkey = False
- for line in k.xreadlines():
- field = line.split(":")
- if field[0] == "pub":
- key = field[4]
- (name, addr) = email.Utils.parseaddr(field[9])
- name = re.sub(r"\s*[(].*[)]", "", name)
- if name == "" or addr == "" or "@" not in addr:
- name = field[9]
- addr = "invalid-uid"
- name = self.de_escape_gpg_str(name)
- keys[key] = {"email": addr}
- if name != "": keys[key]["name"] = name
- keys[key]["aliases"] = [name]
- keys[key]["fingerprints"] = []
- signingkey = True
- elif key and field[0] == "sub" and len(field) >= 12:
- signingkey = ("s" in field[11])
- elif key and field[0] == "uid":
- (name, addr) = email.Utils.parseaddr(field[9])
- if name and name not in keys[key]["aliases"]:
- keys[key]["aliases"].append(name)
- elif signingkey and field[0] == "fpr":
- keys[key]["fingerprints"].append(field[9])
- fpr_lookup[field[9]] = key
-
- def generate_desired_users(self):
- if Options["Generate-Users"]:
- format = Options["Generate-Users"]
- return self.generate_users_from_keyring(format)
- if Options["Import-Ldap-Users"]:
- return self.import_users_from_ldap()
- return ({}, {})
-
- def import_users_from_ldap(self):
- LDAPDn = self.cnf["Import-LDAP-Fingerprints::LDAPDn"]
- LDAPServer = self.cnf["Import-LDAP-Fingerprints::LDAPServer"]
- l = ldap.open(LDAPServer)
- l.simple_bind_s("","")
- Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
- "(&(keyfingerprint=*)(gidnumber=%s))" % (self.cnf["Import-Users-From-Passwd::ValidGID"]),
- ["uid", "keyfingerprint", "cn", "mn", "sn"])
-
- ldap_fin_uid_id = {}
-
- byuid = {}
- byname = {}
- keys = self.keys
- fpr_lookup = self.fpr_lookup
-
- for i in Attrs:
- entry = i[1]
- uid = entry["uid"][0]
- name = get_ldap_name(entry)
- fingerprints = entry["keyFingerPrint"]
- keyid = None
- for f in fingerprints:
- key = fpr_lookup.get(f, None)
- if key not in keys: continue
- keys[key]["uid"] = uid
-
- if keyid != None: continue
- keyid = get_or_set_uid(uid).uid
- byuid[keyid] = (uid, name)
- byname[uid] = (keyid, name)
-
- return (byname, byuid)
-
- def generate_users_from_keyring(self, format):
- byuid = {}
- byname = {}
- keys = self.keys
- any_invalid = False
- for x in keys.keys():
- if keys[x]["email"] == "invalid-uid":
- any_invalid = True
- keys[x]["uid"] = format % "invalid-uid"
- else:
- uid = format % keys[x]["email"]
- keyid = get_or_set_uid(uid).uid
- byuid[keyid] = (uid, keys[x]["name"])
- byname[uid] = (keyid, keys[x]["name"])
- keys[x]["uid"] = uid
- if any_invalid:
- uid = format % "invalid-uid"
- keyid = get_or_set_uid(uid).uid
- byuid[keyid] = (uid, "ungeneratable user id")
- byname[uid] = (keyid, "ungeneratable user id")
- return (byname, byuid)
-
-################################################################################
-
def usage (exit_code=0):
print """Usage: dak import-keyring [OPTION]... [KEYRING]
-h, --help show this help and exit.
### Parse options
Options = cnf.SubTree("Import-Keyring::Options")
+
if Options["Help"]:
usage()
usage(1)
### Keep track of changes made
-
changes = [] # (uid, changes strings)
### Initialise
### Parse the keyring
keyringname = keyring_names[0]
- keyring = Keyring(keyringname)
-
- is_dm = "false"
- if cnf.has_key("Import-Keyring::"+keyringname+"::Debian-Maintainer"):
- session.execute("UPDATE keyrings SET debian_maintainer = :dm WHERE name = :name",
- {'dm': cnf["Import-Keyring::"+keyringname+"::Debian-Maintainer"],
- 'name': keyringname.split("/")[-1]})
+ keyring = get_keyring(keyringname, session)
+ if not keyring:
+ print "E: Can't load keyring %s from database" % keyringname
+ sys.exit(1)
- is_dm = cnf["Import-Keyring::"+keyringname+"::Debian-Maintainer"]
-
- keyring_id = get_or_set_keyring(
- keyringname.split("/")[-1], session,
- ).keyring_id
+ keyring.load_keys(keyringname)
### Generate new uid entries if they're needed (from LDAP or the keyring)
- (desuid_byname, desuid_byid) = keyring.generate_desired_users()
+ if Options["Generate-Users"]:
+ format = Options["Generate-Users"]
+ (desuid_byname, desuid_byid) = keyring.generate_users_from_keyring(Options["Generate-Users"], session)
+ elif Options["Import-Ldap-Users"]:
+ (desuid_byname, desuid_byid) = keyring.import_users_from_ldap(session)
+ else:
+ (desuid_byname, desuid_byid) = ({}, {})
### Cache all the existing uid entries
(db_uid_byname, db_uid_byid) = get_uid_info(session)
for keyid in desuid_byid.keys():
uid = (keyid, desuid_byid[keyid][0])
name = desuid_byid[keyid][1]
- oname = db_uid_byname[keyid][1]
+ oname = db_uid_byid[keyid][1]
if name and oname != name:
changes.append((uid[1], "Full name: %s" % (name)))
session.execute("UPDATE uid SET name = :name WHERE id = :keyid",
if keyid == None:
keyid = db_fin_info.get(keyring.keys[z]["fingerprints"][0], [None])[0]
for y in keyring.keys[z]["fingerprints"]:
- fpr[y] = (keyid,keyring_id)
+ fpr[y] = (keyid, keyring.keyring_id)
# For any keys that used to be in this keyring, disassociate them.
# We don't change the uid, leaving that for historical info; if
# the id should change, it'll be set when importing another keyring.
for f,(u,fid,kr) in db_fin_info.iteritems():
- if kr != keyring_id: continue
- if f in fpr: continue
+ if kr != keyring.keyring_id:
+ continue
+
+ if f in fpr:
+ continue
+
changes.append((db_uid_byid.get(u, [None])[0], "Removed key: %s" % (f)))
- session.execute("UPDATE fingerprint SET keyring = NULL WHERE id = :fprid", {'fprid': fid})
+ session.execute("""UPDATE fingerprint
+ SET keyring = NULL,
+ source_acl_id = NULL,
+ binary_acl_id = NULL,
+ binary_reject = TRUE
+ WHERE id = :fprid""", {'fprid': fid})
+
+ session.execute("""DELETE FROM binary_acl_map WHERE fingerprint_id = :fprid""", {'fprid': fid})
# For the keys in this keyring, add/update any fingerprints that've
# changed.
for f in fpr:
newuid = fpr[f][0]
newuiduid = db_uid_byid.get(newuid, [None])[0]
+
(olduid, oldfid, oldkid) = db_fin_info.get(f, [-1,-1,-1])
- if olduid == None: olduid = -1
- if oldkid == None: oldkid = -1
+
+ if olduid == None:
+ olduid = -1
+
+ if oldkid == None:
+ oldkid = -1
+
if oldfid == -1:
changes.append((newuiduid, "Added key: %s" % (f)))
+ fp = Fingerprint()
+ fp.fingerprint = f
+ fp.keyring_id = keyring.keyring_id
if newuid:
- session.execute("""INSERT INTO fingerprint (fingerprint, uid, keyring)
- VALUES (:fpr, :uid, :keyring)""",
- {'fpr': f, 'uid': uid, 'keyring': keyring_id})
- else:
- session.execute("""INSERT INTO fingerprint (fingerprint, keyring)
- VALUES (:fpr, :keyring)""",
- {'fpr': f, 'keyring': keyring_id})
+ fp.uid_id = newuid
+
+ fp.binary_acl_id = keyring.default_binary_acl_id
+ fp.source_acl_id = keyring.default_source_acl_id
+ fp.default_binary_reject = keyring.default_binary_reject
+ session.add(fp)
+ session.flush()
+
+ for k in keyring.keyring_acl_map:
+ ba = BinaryACLMap()
+ ba.fingerprint_id = fp.fingerprint_id
+ ba.architecture_id = k.architecture_id
+ session.add(ba)
+ session.flush()
+
else:
if newuid and olduid != newuid:
if olduid != -1:
else:
changes.append((newuiduid, "Linked key: %s" % f))
changes.append((newuiduid, " (formerly unowned)"))
+
session.execute("UPDATE fingerprint SET uid = :uid WHERE id = :fpr",
{'uid': newuid, 'fpr': oldfid})
- if oldkid != keyring_id:
+ # Don't move a key from a keyring with a higher priority to a lower one
+ if oldkid != keyring.keyring_id:
+ movekey = False
+ if oldkid == -1:
+ movekey = True
+ else:
+ try:
+ oldkeyring = session.query(Keyring).filter_by(keyring_id=oldkid).one()
+ except NotFoundError:
+ print "ERROR: Cannot find old keyring with id %s" % oldkid
+ sys.exit(1)
+
+ if oldkeyring.priority < keyring.priority:
+ movekey = True
+
# Only change the keyring if it won't result in a loss of permissions
- q = session.execute("SELECT debian_maintainer FROM keyrings WHERE id = :keyring",
- {'keyring': keyring_id})
- if is_dm == "false" and not q.fetchall()[0][0]:
- session.execute("UPDATE fingerprint SET keyring = :keyring WHERE id = :fpr",
- {'keyring': keyring_id, 'fpr': oldfid})
+ if movekey:
+ session.execute("""DELETE FROM binary_acl_map WHERE fingerprint_id = :fprid""", {'fprid': oldfid})
+
+ session.execute("""UPDATE fingerprint
+ SET keyring = :keyring,
+ source_acl_id = :source_acl_id,
+ binary_acl_id = :binary_acl_id,
+ binary_reject = :binary_reject
+ WHERE id = :fpr""",
+ {'keyring': keyring.keyring_id,
+ 'source_acl_id': keyring.default_source_acl_id,
+ 'binary_acl_id': keyring.default_binary_acl_id,
+ 'binary_reject': keyring.default_binary_reject,
+ 'fpr': oldfid})
+
+ session.flush()
+
+ for k in keyring.keyring_acl_map:
+ ba = BinaryACLMap()
+ ba.fingerprint_id = oldfid
+ ba.architecture_id = k.architecture_id
+ session.add(ba)
+ session.flush()
+
else:
- print "Key %s exists in both DM and DD keyrings. Not demoting." % (f)
+ print "Key %s exists in both %s and %s keyrings. Not demoting." % (oldkeyring.keyring_name,
+ keyring.keyring_name)
# All done!
session.commit()
+ # Print a summary
changesd = {}
for (k, v) in changes:
- if k not in changesd: changesd[k] = ""
+ if k not in changesd:
+ changesd[k] = ""
changesd[k] += " %s\n" % (v)
keys = changesd.keys()
################################################################################
def recheck(upload, session):
- files = upload.pkg.files
-
- cnf = Config()
- for f in files.keys():
- # The .orig.tar.gz can disappear out from under us is it's a
- # duplicate of one in the archive.
- if not files.has_key(f):
- continue
- # Check that the source still exists
- if files[f]["type"] == "deb":
- source_version = files[f]["source version"]
- source_package = files[f]["source package"]
- if not upload.pkg.changes["architecture"].has_key("source") \
- and not upload.source_exists(source_package, source_version, upload.pkg.changes["distribution"].keys()):
- source_epochless_version = re_no_epoch.sub('', source_version)
- dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
- found = 0
- for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
- if cnf.has_key("Dir::Queue::%s" % (q)):
- if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
- found = 1
- if not found:
- upload.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
-
- # Version and file overwrite checks
- if files[f]["type"] == "deb":
- upload.check_binary_against_db(f, session)
- elif files[f]["type"] == "dsc":
- upload.check_source_against_db(f, session)
- upload.check_dsc_against_db(f, session)
-
+ upload.recheck()
if len(upload.rejects) > 0:
answer = "XXX"
if Options["No-Action"] or Options["Automatic"] or Options["Trainee"]:
answer = 'S'
- print "REJECT\n" + upload.rejects.join("\n"),
+ print "REJECT\n%s" % '\n'.join(upload.rejects)
prompt = "[R]eject, Skip, Quit ?"
while prompt.find(answer) == -1:
answer = answer[:1].upper()
if answer == 'R':
- upload.do_reject(manual=0, reject_message=upload.rejects.join("\n"))
+ upload.do_reject(manual=0, reject_message='\n'.join(upload.rejects))
os.unlink(upload.pkg.changes_file[:-8]+".dak")
return 0
elif answer == 'S':
print """Usage: dak process-new [OPTION]... [CHANGES]...
-a, --automatic automatic run
-h, --help show this help and exit.
- -C, --comments-dir=DIR use DIR as comments-dir, for [o-]p-u-new
-m, --manual-reject=MSG manual reject with `msg'
-n, --no-action don't do anything
-t, --trainee FTP Trainee mode
finally:
os.unlink(path)
-# def move_to_dir (upload, dest, perms=0660, changesperms=0664):
-# utils.move (upload.pkg.changes_file, dest, perms=changesperms)
-# file_keys = upload.pkg.files.keys()
-# for f in file_keys:
-# utils.move (f, dest, perms=perms)
-
-# def is_source_in_queue_dir(qdir):
-# entries = [ x for x in os.listdir(qdir) if x.startswith(Upload.pkg.changes["source"])
-# and x.endswith(".changes") ]
-# for entry in entries:
-# # read the .dak
-# u = queue.Upload(Cnf)
-# u.pkg.changes_file = os.path.join(qdir, entry)
-# u.update_vars()
-# if not u.pkg.changes["architecture"].has_key("source"):
-# # another binary upload, ignore
-# continue
-# if Upload.pkg.changes["version"] != u.pkg.changes["version"]:
-# # another version, ignore
-# continue
-# # found it!
-# return True
-# return False
-
-# def move_to_holding(suite, queue_dir):
-# print "Moving to %s holding area." % (suite.upper(),)
-# if Options["No-Action"]:
-# return
-# Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
-# Upload.dump_vars(queue_dir)
-# move_to_dir(queue_dir, perms=0664)
-# os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
def _accept(upload):
if Options["No-Action"]:
return
upload.accept(summary, short_summary, targetdir=Config()["Dir::Queue::Newstage"])
os.unlink(upload.pkg.changes_file[:-8]+".dak")
-# def do_accept_stableupdate(upload,suite, q):
-# cnf = Config()
-# queue_dir = cnf["Dir::Queue::%s" % (q,)]
-# if not upload.pkg.changes["architecture"].has_key("source"):
-# # It is not a sourceful upload. So its source may be either in p-u
-# # holding, in new, in accepted or already installed.
-# if is_source_in_queue_dir(queue_dir):
-# # It's in p-u holding, so move it there.
-# print "Binary-only upload, source in %s." % (q,)
-# move_to_holding(suite, queue_dir)
-# elif Upload.source_exists(Upload.pkg.changes["source"],
-# Upload.pkg.changes["version"]):
-# # dak tells us that there is source available. At time of
-# # writing this means that it is installed, so put it into
-# # accepted.
-# print "Binary-only upload, source installed."
-# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-# _accept()
-# elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]):
-# # The source is in accepted, the binary cleared NEW: accept it.
-# print "Binary-only upload, source in accepted."
-# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-# _accept()
-# elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]):
-# # It's in NEW. We expect the source to land in p-u holding
-# # pretty soon.
-# print "Binary-only upload, source in new."
-# move_to_holding(suite, queue_dir)
-# elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]):
-# # It's in newstage. Accept into the holding area
-# print "Binary-only upload, source in newstage."
-# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-# _accept()
-# else:
-# # No case applicable. Bail out. Return will cause the upload
-# # to be skipped.
-# print "ERROR"
-# print "Stable update failed. Source not found."
-# return
-# else:
-# # We are handling a sourceful upload. Move to accepted if currently
-# # in p-u holding and to p-u holding otherwise.
-# if is_source_in_queue_dir(queue_dir):
-# print "Sourceful upload in %s, accepting." % (q,)
-# _accept()
-# else:
-# move_to_holding(suite, queue_dir)
-
def do_accept(upload):
print "ACCEPT"
cnf = Config()
if not Options["No-Action"]:
(summary, short_summary) = upload.build_summaries()
-# if cnf.FindB("Dinstall::SecurityQueueHandling"):
-# upload.dump_vars(cnf["Dir::Queue::Embargoed"])
-# move_to_dir(cnf["Dir::Queue::Embargoed"])
-# upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
-# # Check for override disparities
-# upload.Subst["__SUMMARY__"] = summary
-# else:
- # Stable updates need to be copied to proposed-updates holding
- # area instead of accepted. Sourceful uploads need to go
- # to it directly, binaries only if the source has not yet been
- # accepted into p-u.
- for suite, q in [("proposed-updates", "ProposedUpdates"),
- ("oldstable-proposed-updates", "OldProposedUpdates")]:
- if not upload.pkg.changes["distribution"].has_key(suite):
- continue
- utils.fubar("stable accept not supported yet")
-# return do_accept_stableupdate(suite, q)
- # Just a normal upload, accept it...
- _accept(upload)
-
-def check_status(files):
- new = byhand = 0
- for f in files.keys():
- if files[f]["type"] == "byhand":
- byhand = 1
- elif files[f].has_key("new"):
- new = 1
- return (new, byhand)
+
+ if cnf.FindB("Dinstall::SecurityQueueHandling"):
+ upload.dump_vars(cnf["Dir::Queue::Embargoed"])
+ upload.move_to_dir(cnf["Dir::Queue::Embargoed"])
+ upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
+ # Check for override disparities
+ upload.Subst["__SUMMARY__"] = summary
+ else:
+ # Just a normal upload, accept it...
+ _accept(upload)
def do_pkg(changes_file, session):
u = Upload()
################################################################################
-# def do_comments(dir, opref, npref, line, fn):
-# for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
-# lines = open("%s/%s" % (dir, comm)).readlines()
-# if len(lines) == 0 or lines[0] != line + "\n": continue
-# changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
-# and x.endswith(".changes") ]
-# changes_files = sort_changes(changes_files)
-# for f in changes_files:
-# f = utils.validate_changes_file_arg(f, 0)
-# if not f: continue
-# print "\n" + f
-# fn(f, "".join(lines[1:]))
-
-# if opref != npref and not Options["No-Action"]:
-# newcomm = npref + comm[len(opref):]
-# os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
-
-# ################################################################################
-
-# def comment_accept(changes_file, comments):
-# Upload.pkg.changes_file = changes_file
-# Upload.init_vars()
-# Upload.update_vars()
-# Upload.update_subst()
-# files = Upload.pkg.files
-
-# if not recheck():
-# return # dak wants to REJECT, crap
-
-# (new, byhand) = check_status(files)
-# if not new and not byhand:
-# do_accept()
-
-# ################################################################################
-
-# def comment_reject(changes_file, comments):
-# Upload.pkg.changes_file = changes_file
-# Upload.init_vars()
-# Upload.update_vars()
-# Upload.update_subst()
-
-# if not recheck():
-# pass # dak has its own reasons to reject as well, which is fine
-
-# reject(comments)
-# print "REJECT\n" + reject_message,
-# if not Options["No-Action"]:
-# Upload.do_reject(0, reject_message)
-# os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
-################################################################################
-
def main():
global Options, Logger, Sections, Priorities
Arguments = [('a',"automatic","Process-New::Options::Automatic"),
('h',"help","Process-New::Options::Help"),
- ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
('t',"trainee","Process-New::Options::Trainee"),
('n',"no-action","Process-New::Options::No-Action")]
- for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir", "trainee"]:
+ for i in ["automatic", "help", "manual-reject", "no-action", "version", "trainee"]:
if not cnf.has_key("Process-New::Options::%s" % (i)):
cnf["Process-New::Options::%s" % (i)] = ""
changes_files = apt_pkg.ParseCommandLine(cnf.Cnf,Arguments,sys.argv)
- if len(changes_files) == 0 and not cnf.get("Process-New::Options::Comments-Dir",""):
+ if len(changes_files) == 0:
changes_files = utils.get_changes_files(cnf["Dir::Queue::New"])
Options = cnf.SubTree("Process-New::Options")
# Kill me now? **FIXME**
cnf["Dinstall::Options::No-Mail"] = ""
-# commentsdir = cnf.get("Process-New::Options::Comments-Dir","")
-# if commentsdir:
-# if changes_files != []:
-# sys.stderr.write("Can't specify any changes files if working with comments-dir")
-# sys.exit(1)
-# do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
-# do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
-# else:
- if True:
- for changes_file in changes_files:
- changes_file = utils.validate_changes_file_arg(changes_file, 0)
- if not changes_file:
- continue
- print "\n" + changes_file
-
- do_pkg (changes_file, session)
+ for changes_file in changes_files:
+ changes_file = utils.validate_changes_file_arg(changes_file, 0)
+ if not changes_file:
+ continue
+ print "\n" + changes_file
+
+ do_pkg (changes_file, session)
end()
################################################################################
-default_config = "/etc/dak/dak.conf"
+default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
def which_conf_file():
if os.getenv("DAK_CONFIG"):
################################################################################
import os
+import re
import psycopg2
import traceback
################################################################################
+class BinContents(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<BinContents (%s, %s)>' % (self.binary, self.filename)
+
+__all__.append('BinContents')
+
+################################################################################
+
class DBBinary(object):
def __init__(self, *args, **kwargs):
pass
################################################################################
+class BinaryACL(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<BinaryACL %s>' % self.binary_acl_id
+
+__all__.append('BinaryACL')
+
+################################################################################
+
+class BinaryACLMap(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<BinaryACLMap %s>' % self.binary_acl_map_id
+
+__all__.append('BinaryACLMap')
+
+################################################################################
+
class Component(object):
def __init__(self, *args, **kwargs):
pass
################################################################################
-class ContentFilename(object):
- def __init__(self, *args, **kwargs):
- pass
-
- def __repr__(self):
- return '<ContentFilename %s>' % self.filename
-
-__all__.append('ContentFilename')
-
@session_wrapper
def get_or_set_contents_file_id(filename, session=None):
"""
# Insert paths
pathcache = {}
for fullpath in fullpaths:
- # Get the necessary IDs ...
- (path, file) = os.path.split(fullpath)
-
- filepath_id = get_or_set_contents_path_id(path, session)
- filename_id = get_or_set_contents_file_id(file, session)
+ if fullpath.startswith( './' ):
+ fullpath = fullpath[2:]
- pathcache[fullpath] = (filepath_id, filename_id)
-
- for fullpath, dat in pathcache.items():
- ca = ContentAssociation()
- ca.binary_id = binary_id
- ca.filepath_id = dat[0]
- ca.filename_id = dat[1]
- session.add(ca)
+ session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )", { 'filename': fullpath, 'id': binary_id} )
- # Only commit if we set up the session ourself
+ session.commit()
if privatetrans:
- session.commit()
session.close()
- else:
- session.flush()
-
return True
except:
__all__.append('Fingerprint')
+@session_wrapper
+def get_fingerprint(fpr, session=None):
+ """
+ Returns Fingerprint object for given fpr.
+
+ @type fpr: string
+ @param fpr: The fpr to find / add
+
+ @type session: SQLAlchemy
+ @param session: Optional SQL session object (a temporary one will be
+ generated if not supplied).
+
+ @rtype: Fingerprint
+ @return: the Fingerprint object for the given fpr or None
+ """
+
+ q = session.query(Fingerprint).filter_by(fingerprint=fpr)
+
+ try:
+ ret = q.one()
+ except NoResultFound:
+ ret = None
+
+ return ret
+
+__all__.append('get_fingerprint')
+
@session_wrapper
def get_or_set_fingerprint(fpr, session=None):
"""
################################################################################
+# Helper routine for Keyring class
+def get_ldap_name(entry):
+ name = []
+ for k in ["cn", "mn", "sn"]:
+ ret = entry.get(k)
+ if ret and ret[0] != "" and ret[0] != "-":
+ name.append(ret[0])
+ return " ".join(name)
+
+################################################################################
+
class Keyring(object):
+ gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
+ " --with-colons --fingerprint --fingerprint"
+
+ keys = {}
+ fpr_lookup = {}
+
def __init__(self, *args, **kwargs):
pass
def __repr__(self):
return '<Keyring %s>' % self.keyring_name
+ def de_escape_gpg_str(self, str):
+ esclist = re.split(r'(\\x..)', str)
+ for x in range(1,len(esclist),2):
+ esclist[x] = "%c" % (int(esclist[x][2:],16))
+ return "".join(esclist)
+
+ def load_keys(self, keyring):
+ import email.Utils
+
+ if not self.keyring_id:
+ raise Exception('Must be initialized with database information')
+
+ k = os.popen(self.gpg_invocation % keyring, "r")
+ key = None
+ signingkey = False
+
+ for line in k.xreadlines():
+ field = line.split(":")
+ if field[0] == "pub":
+ key = field[4]
+ (name, addr) = email.Utils.parseaddr(field[9])
+ name = re.sub(r"\s*[(].*[)]", "", name)
+ if name == "" or addr == "" or "@" not in addr:
+ name = field[9]
+ addr = "invalid-uid"
+ name = self.de_escape_gpg_str(name)
+ self.keys[key] = {"email": addr}
+ if name != "":
+ self.keys[key]["name"] = name
+ self.keys[key]["aliases"] = [name]
+ self.keys[key]["fingerprints"] = []
+ signingkey = True
+ elif key and field[0] == "sub" and len(field) >= 12:
+ signingkey = ("s" in field[11])
+ elif key and field[0] == "uid":
+ (name, addr) = email.Utils.parseaddr(field[9])
+ if name and name not in self.keys[key]["aliases"]:
+ self.keys[key]["aliases"].append(name)
+ elif signingkey and field[0] == "fpr":
+ self.keys[key]["fingerprints"].append(field[9])
+ self.fpr_lookup[field[9]] = key
+
+ def import_users_from_ldap(self, session):
+ import ldap
+ cnf = Config()
+
+ LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
+ LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
+
+ l = ldap.open(LDAPServer)
+ l.simple_bind_s("","")
+ Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
+ "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
+ ["uid", "keyfingerprint", "cn", "mn", "sn"])
+
+ ldap_fin_uid_id = {}
+
+ byuid = {}
+ byname = {}
+
+ for i in Attrs:
+ entry = i[1]
+ uid = entry["uid"][0]
+ name = get_ldap_name(entry)
+ fingerprints = entry["keyFingerPrint"]
+ keyid = None
+ for f in fingerprints:
+ key = self.fpr_lookup.get(f, None)
+ if key not in self.keys:
+ continue
+ self.keys[key]["uid"] = uid
+
+ if keyid != None:
+ continue
+ keyid = get_or_set_uid(uid, session).uid_id
+ byuid[keyid] = (uid, name)
+ byname[uid] = (keyid, name)
+
+ return (byname, byuid)
+
+ def generate_users_from_keyring(self, format, session):
+ byuid = {}
+ byname = {}
+ any_invalid = False
+ for x in self.keys.keys():
+ if self.keys[x]["email"] == "invalid-uid":
+ any_invalid = True
+ self.keys[x]["uid"] = format % "invalid-uid"
+ else:
+ uid = format % self.keys[x]["email"]
+ keyid = get_or_set_uid(uid, session).uid_id
+ byuid[keyid] = (uid, self.keys[x]["name"])
+ byname[uid] = (keyid, self.keys[x]["name"])
+ self.keys[x]["uid"] = uid
+
+ if any_invalid:
+ uid = format % "invalid-uid"
+ keyid = get_or_set_uid(uid, session).uid_id
+ byuid[keyid] = (uid, "ungeneratable user id")
+ byname[uid] = (keyid, "ungeneratable user id")
+
+ return (byname, byuid)
+
__all__.append('Keyring')
@session_wrapper
-def get_or_set_keyring(keyring, session=None):
+def get_keyring(keyring, session=None):
"""
- If C{keyring} does not have an entry in the C{keyrings} table yet, create one
- and return the new Keyring
+ If C{keyring} does not have an entry in the C{keyrings} table yet, return None
If C{keyring} already has an entry, simply return the existing Keyring
@type keyring: string
try:
return q.one()
except NoResultFound:
- obj = Keyring(keyring_name=keyring)
- session.add(obj)
- session.commit_or_flush()
- return obj
+ return None
+
+__all__.append('get_keyring')
+
+################################################################################
+
+class KeyringACLMap(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<KeyringACLMap %s>' % self.keyring_acl_map_id
-__all__.append('get_or_set_keyring')
+__all__.append('KeyringACLMap')
################################################################################
################################################################################
+class SourceACL(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<SourceACL %s>' % self.source_acl_id
+
+__all__.append('SourceACL')
+
+################################################################################
+
class SrcAssociation(object):
def __init__(self, *args, **kwargs):
pass
################################################################################
+class UploadBlock(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def __repr__(self):
+ return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
+
+__all__.append('UploadBlock')
+
+################################################################################
+
class DBConn(Singleton):
"""
database module init.
self.tbl_archive = Table('archive', self.db_meta, autoload=True)
self.tbl_bin_associations = Table('bin_associations', self.db_meta, autoload=True)
self.tbl_binaries = Table('binaries', self.db_meta, autoload=True)
+ self.tbl_binary_acl = Table('binary_acl', self.db_meta, autoload=True)
+ self.tbl_binary_acl_map = Table('binary_acl_map', self.db_meta, autoload=True)
self.tbl_component = Table('component', self.db_meta, autoload=True)
self.tbl_config = Table('config', self.db_meta, autoload=True)
self.tbl_content_associations = Table('content_associations', self.db_meta, autoload=True)
self.tbl_fingerprint = Table('fingerprint', self.db_meta, autoload=True)
self.tbl_keyrings = Table('keyrings', self.db_meta, autoload=True)
self.tbl_known_changes = Table('known_changes', self.db_meta, autoload=True)
+ self.tbl_keyring_acl_map = Table('keyring_acl_map', self.db_meta, autoload=True)
self.tbl_location = Table('location', self.db_meta, autoload=True)
self.tbl_maintainer = Table('maintainer', self.db_meta, autoload=True)
self.tbl_new_comments = Table('new_comments', self.db_meta, autoload=True)
self.tbl_queue_build = Table('queue_build', self.db_meta, autoload=True)
self.tbl_section = Table('section', self.db_meta, autoload=True)
self.tbl_source = Table('source', self.db_meta, autoload=True)
+ self.tbl_source_acl = Table('source_acl', self.db_meta, autoload=True)
self.tbl_src_associations = Table('src_associations', self.db_meta, autoload=True)
self.tbl_src_format = Table('src_format', self.db_meta, autoload=True)
self.tbl_src_uploaders = Table('src_uploaders', self.db_meta, autoload=True)
self.tbl_suite_architectures = Table('suite_architectures', self.db_meta, autoload=True)
self.tbl_suite_src_formats = Table('suite_src_formats', self.db_meta, autoload=True)
self.tbl_uid = Table('uid', self.db_meta, autoload=True)
+ self.tbl_upload_blocks = Table('upload_blocks', self.db_meta, autoload=True)
def __setupmappers(self):
mapper(Architecture, self.tbl_architecture,
binary_id = self.tbl_bin_associations.c.bin,
binary = relation(DBBinary)))
+
mapper(DBBinary, self.tbl_binaries,
properties = dict(binary_id = self.tbl_binaries.c.id,
package = self.tbl_binaries.c.package,
binassociations = relation(BinAssociation,
primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
+ mapper(BinaryACL, self.tbl_binary_acl,
+ properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
+
+ mapper(BinaryACLMap, self.tbl_binary_acl_map,
+ properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
+ fingerprint = relation(Fingerprint, backref="binary_acl_map"),
+ architecture = relation(Architecture)))
+
mapper(Component, self.tbl_component,
properties = dict(component_id = self.tbl_component.c.id,
component_name = self.tbl_component.c.name))
mapper(DBConfig, self.tbl_config,
properties = dict(config_id = self.tbl_config.c.id))
- mapper(ContentAssociation, self.tbl_content_associations,
- properties = dict(ca_id = self.tbl_content_associations.c.id,
- filename_id = self.tbl_content_associations.c.filename,
- filename = relation(ContentFilename),
- filepath_id = self.tbl_content_associations.c.filepath,
- filepath = relation(ContentFilepath),
- binary_id = self.tbl_content_associations.c.binary_pkg,
- binary = relation(DBBinary)))
-
-
- mapper(ContentFilename, self.tbl_content_file_names,
- properties = dict(cafilename_id = self.tbl_content_file_names.c.id,
- filename = self.tbl_content_file_names.c.file))
-
- mapper(ContentFilepath, self.tbl_content_file_paths,
- properties = dict(cafilepath_id = self.tbl_content_file_paths.c.id,
- filepath = self.tbl_content_file_paths.c.path))
-
mapper(DSCFile, self.tbl_dsc_files,
properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
source_id = self.tbl_dsc_files.c.source,
uid_id = self.tbl_fingerprint.c.uid,
uid = relation(Uid),
keyring_id = self.tbl_fingerprint.c.keyring,
- keyring = relation(Keyring)))
+ keyring = relation(Keyring),
+ source_acl = relation(SourceACL),
+ binary_acl = relation(BinaryACL)))
mapper(Keyring, self.tbl_keyrings,
properties = dict(keyring_name = self.tbl_keyrings.c.name,
mapper(KnownChange, self.tbl_known_changes,
properties = dict(known_change_id = self.tbl_known_changes.c.id))
+ mapper(KeyringACLMap, self.tbl_keyring_acl_map,
+ properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
+ keyring = relation(Keyring, backref="keyring_acl_map"),
+ architecture = relation(Architecture)))
+
mapper(Location, self.tbl_location,
properties = dict(location_id = self.tbl_location.c.id,
component_id = self.tbl_location.c.component,
properties = dict(overridetype = self.tbl_override_type.c.type,
overridetype_id = self.tbl_override_type.c.id))
- mapper(PendingContentAssociation, self.tbl_pending_content_associations,
- properties = dict(pca_id = self.tbl_pending_content_associations.c.id,
- filepath_id = self.tbl_pending_content_associations.c.filepath,
- filepath = relation(ContentFilepath),
- filename_id = self.tbl_pending_content_associations.c.filename,
- filename = relation(ContentFilename)))
-
mapper(Priority, self.tbl_priority,
properties = dict(priority_id = self.tbl_priority.c.id))
srcfiles = relation(DSCFile,
primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
srcassociations = relation(SrcAssociation,
- primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source))))
+ primaryjoin=(self.tbl_source.c.id==self.tbl_src_associations.c.source)),
+ srcuploaders = relation(SrcUploader)))
+
+ mapper(SourceACL, self.tbl_source_acl,
+ properties = dict(source_acl_id = self.tbl_source_acl.c.id))
mapper(SrcAssociation, self.tbl_src_associations,
properties = dict(sa_id = self.tbl_src_associations.c.id,
properties = dict(uid_id = self.tbl_uid.c.id,
fingerprint = relation(Fingerprint)))
+ mapper(UploadBlock, self.tbl_upload_blocks,
+ properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
+ fingerprint = relation(Fingerprint, backref="uploadblocks"),
+ uid = relation(Uid, backref="uploadblocks")))
+
## Connection functions
def __createconn(self):
from config import Config
###############################################################################
-def lookup_uid_from_fingerprint(fpr, session):
- uid = None
- uid_name = ""
- # This is a stupid default, but see the comments below
- is_dm = False
-
- user = get_uid_from_fingerprint(fpr, session)
-
- if user is not None:
- uid = user.uid
- if user.name is None:
- uid_name = ''
- else:
- uid_name = user.name
-
- # Check the relevant fingerprint (which we have to have)
- for f in user.fingerprint:
- if f.fingerprint == fpr:
- is_dm = f.keyring.debian_maintainer
- break
-
- return (uid, uid_name, is_dm)
+def check_status(files):
+ new = byhand = 0
+ for f in files.keys():
+ if files[f]["type"] == "byhand":
+ byhand = 1
+ elif files[f].has_key("new"):
+ new = 1
+ return (new, byhand)
###############################################################################
self.pkg.reset()
def package_info(self):
- msg = ''
-
- if len(self.rejects) > 0:
- msg += "Reject Reasons:\n"
- msg += "\n".join(self.rejects)
+ """
+ Format various messages from this Upload to send to the maintainer.
+ """
- if len(self.warnings) > 0:
- msg += "Warnings:\n"
- msg += "\n".join(self.warnings)
+ msgs = (
+ ('Reject Reasons', self.rejects),
+ ('Warnings', self.warnings),
+ ('Notes', self.notes),
+ )
- if len(self.notes) > 0:
- msg += "Notes:\n"
- msg += "\n".join(self.notes)
+ msg = ''
+ for title, messages in msgs:
+ if messages:
+ msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
return msg
'OldProposedUpdates', 'Embargoed', 'Unembargoed')
for queue in queues:
- if 'Dir::Queue::%s' % directory not in cnf:
+ if not cnf.get('Dir::Queue::%s' % queue):
continue
queuefile_path = os.path.join(
- cnf['Dir::Queue::%s' % directory], filename
+ cnf['Dir::Queue::%s' % queue], filename
)
if not os.path.exists(queuefile_path):
def check_lintian(self):
cnf = Config()
+ # Don't reject binary uploads
+ if not self.pkg.changes['architecture'].has_key('source'):
+ return
+
# Only check some distributions
valid_dist = False
for dist in ('unstable', 'experimental'):
elif etag in lintiantags['error']:
# The tag is overriden - but is not allowed to be
self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
- log("overidden tag is overridden", etag)
+ log("ftpmaster does not allow tag to be overridable", etag)
else:
# Tag is known, it is not overriden, direct reject.
self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
- log("auto rejecting", etag)
# Now tell if they *might* override it.
if etag in lintiantags['warning']:
+ log("auto rejecting", "overridable", etag)
self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
+ else:
+ log("auto rejecting", "not overridable", etag)
###########################################################################
def check_urgency(self):
except:
self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
+ def check_if_upload_is_sponsored(self, uid_email, uid_name):
+ if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
+ sponsored = False
+ elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
+ sponsored = False
+ if uid_name == "":
+ sponsored = True
+ else:
+ sponsored = True
+ if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
+ sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
+ if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
+ self.pkg.changes["changedbyemail"] not in sponsor_addresses):
+ self.pkg.changes["sponsoremail"] = uid_email
+
+ return sponsored
+
+
###########################################################################
+ # check_signed_by_key checks
+ ###########################################################################
+
+ def check_signed_by_key(self):
+ """Ensure the .changes is signed by an authorized uploader."""
+ session = DBConn().session()
+
+ # First of all we check that the person has proper upload permissions
+ # and that this upload isn't blocked
+ fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
+
+ if fpr is None:
+ self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
+ return
+
+ # TODO: Check that import-keyring adds UIDs properly
+ if not fpr.uid:
+ self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
+ return
+
+ # Check that the fingerprint which uploaded has permission to do so
+ self.check_upload_permissions(fpr, session)
+
+ # Check that this package is not in a transition
+ self.check_transition(session)
+
+ session.close()
+
+
+ def check_upload_permissions(self, fpr, session):
+ # Check any one-off upload blocks
+ self.check_upload_blocks(fpr, session)
+
+ # Start with DM as a special case
+ # DM is a special case unfortunately, so we check it first
+ # (keys with no source access get more access than DMs in one
+ # way; DMs can only upload for their packages whether source
+ # or binary, whereas keys with no access might be able to
+ # upload some binaries)
+ if fpr.source_acl.access_level == 'dm':
+ self.check_dm_source_upload(fpr, session)
+ else:
+ # Check source-based permissions for other types
+ if self.pkg.changes["architecture"].has_key("source"):
+ if fpr.source_acl.access_level is None:
+ rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
+ rej += '\nPlease contact ftpmaster if you think this is incorrect'
+ self.rejects.append(rej)
+ return
+ else:
+ # If not a DM, we allow full upload rights
+ uid_email = "%s@debian.org" % (fpr.uid.uid)
+ self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
+
+
+ # Check binary upload permissions
+ # By this point we know that DMs can't have got here unless they
+ # are allowed to deal with the package concerned so just apply
+ # normal checks
+ if fpr.binary_acl.access_level == 'full':
+ return
+
+ # Otherwise we're in the map case
+ tmparches = self.pkg.changes["architecture"].copy()
+ tmparches.pop('source', None)
+
+ for bam in fpr.binary_acl_map:
+ tmparches.pop(bam.architecture.arch_string, None)
+
+ if len(tmparches.keys()) > 0:
+ if fpr.binary_reject:
+ rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
+ rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
+ self.rejects.append(rej)
+ else:
+ # TODO: This is where we'll implement reject vs throw away binaries later
+ rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
+ rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
+ rej += "\nFingerprint: %s", (fpr.fingerprint)
+ self.rejects.append(rej)
+
+
+ def check_upload_blocks(self, fpr, session):
+ """Check whether any upload blocks apply to this source, source
+ version, uid / fpr combination"""
+
+ def block_rej_template(fb):
+ rej = 'Manual upload block in place for package %s' % fb.source
+ if fb.version is not None:
+ rej += ', version %s' % fb.version
+ return rej
+
+ for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
+ # version is None if the block applies to all versions
+ if fb.version is None or fb.version == self.pkg.changes['version']:
+ # Check both fpr and uid - either is enough to cause a reject
+ if fb.fpr is not None:
+ if fb.fpr.fingerprint == fpr.fingerprint:
+ self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
+ if fb.uid is not None:
+ if fb.uid == fpr.uid:
+ self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
+
+
+ def check_dm_upload(self, fpr, session):
+ # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
+ ## none of the uploaded packages are NEW
+ rej = False
+ for f in self.pkg.files.keys():
+ if self.pkg.files[f].has_key("byhand"):
+ self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
+ rej = True
+ if self.pkg.files[f].has_key("new"):
+ self.rejects.append("%s may not upload NEW file %s" % (uid, f))
+ rej = True
+
+ if rej:
+ return
+
+ ## the most recent version of the package uploaded to unstable or
+ ## experimental includes the field "DM-Upload-Allowed: yes" in the source
+ ## section of its control file
+ q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
+ q = q.join(SrcAssociation)
+ q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
+ q = q.order_by(desc('source.version')).limit(1)
+
+ r = q.all()
+
+ if len(r) != 1:
+ rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
+ self.rejects.append(rej)
+ return
+
+ r = r[0]
+ if not r.dm_upload_allowed:
+ rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
+ self.rejects.append(rej)
+ return
+
+ ## the Maintainer: field of the uploaded .changes file corresponds with
+ ## the owner of the key used (ie, non-developer maintainers may not sponsor
+ ## uploads)
+ if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
+ self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
+
+ ## the most recent version of the package uploaded to unstable or
+ ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
+ ## non-developer maintainers cannot NMU or hijack packages)
+
+ # srcuploaders includes the maintainer
+ accept = False
+ for sup in r.srcuploaders:
+ (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
+ # Eww - I hope we never have two people with the same name in Debian
+ if email == fpr.uid.uid or name == fpr.uid.name:
+ accept = True
+ break
+
+ if not accept:
+ self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
+ return
+
+ ## none of the packages are being taken over from other source packages
+ for b in self.pkg.changes["binary"].keys():
+ for suite in self.pkg.changes["distribution"].keys():
+ q = session.query(DBSource)
+ q = q.join(DBBinary).filter_by(package=b)
+ q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
+
+ for s in q.all():
+ if s.source != self.pkg.changes["source"]:
+ self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
+
+
+
def check_transition(self, session):
cnf = Config()
return
###########################################################################
- def check_signed_by_key(self):
- """Ensure the .changes is signed by an authorized uploader."""
- session = DBConn().session()
-
- self.check_transition(session)
-
- (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
-
- # match claimed name with actual name:
- if uid is None:
- # This is fundamentally broken but need us to refactor how we get
- # the UIDs/Fingerprints in order for us to fix it properly
- uid, uid_email = self.pkg.changes["fingerprint"], uid
- may_nmu, may_sponsor = 1, 1
- # XXX by default new dds don't have a fingerprint/uid in the db atm,
- # and can't get one in there if we don't allow nmu/sponsorship
- elif is_dm is False:
- # If is_dm is False, we allow full upload rights
- uid_email = "%s@debian.org" % (uid)
- may_nmu, may_sponsor = 1, 1
- else:
- # Assume limited upload rights unless we've discovered otherwise
- uid_email = uid
- may_nmu, may_sponsor = 0, 0
-
- if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
- sponsored = 0
- elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
- sponsored = 0
- if uid_name == "": sponsored = 1
- else:
- sponsored = 1
- if ("source" in self.pkg.changes["architecture"] and
- uid_email and utils.is_email_alias(uid_email)):
- sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
- if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
- self.pkg.changes["changedbyemail"] not in sponsor_addresses):
- self.pkg.changes["sponsoremail"] = uid_email
-
- if sponsored and not may_sponsor:
- self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
-
- if not sponsored and not may_nmu:
- should_reject = True
- highest_sid, highest_version = None, None
-
- # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
- # It ignores higher versions with the dm_upload_allowed flag set to false
- # I'm keeping the existing behaviour for now until I've gone back and
- # checked exactly what the GR says - mhy
- for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
- if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
- highest_sid = si.source_id
- highest_version = si.version
-
- if highest_sid is None:
- self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
- else:
- for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
- (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
- if email == uid_email or name == uid_name:
- should_reject = False
- break
-
- if should_reject is True:
- self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
-
- for b in self.pkg.changes["binary"].keys():
- for suite in self.pkg.changes["distribution"].keys():
- q = session.query(DBSource)
- q = q.join(DBBinary).filter_by(package=b)
- q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
-
- for s in q.all():
- if s.source != self.pkg.changes["source"]:
- self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
-
- for f in self.pkg.files.keys():
- if self.pkg.files[f].has_key("byhand"):
- self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
- if self.pkg.files[f].has_key("new"):
- self.rejects.append("%s may not upload NEW file %s" % (uid, f))
-
- session.close()
-
+ # End check_signed_by_key checks
###########################################################################
+
def build_summaries(self):
""" Build a summary of changes the upload introduces. """
if actual_size != int(dsc_entry["size"]):
self.rejects.append("size for %s doesn't match %s." % (found, file))
+ ################################################################################
+ # This is used by process-new and process-holding to recheck a changes file
+ # at the time we're running. It mainly wraps various other internal functions
+ # and is similar to accepted_checks - these should probably be tidied up
+ # and combined
+ def recheck(self, session):
+ cnf = Config()
+ for f in self.pkg.files.keys():
+ # The .orig.tar.gz can disappear out from under us is it's a
+ # duplicate of one in the archive.
+ if not self.pkg.files.has_key(f):
+ continue
+
+ entry = self.pkg.files[f]
+
+ # Check that the source still exists
+ if entry["type"] == "deb":
+ source_version = entry["source version"]
+ source_package = entry["source package"]
+ if not self.pkg.changes["architecture"].has_key("source") \
+ and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
+ source_epochless_version = re_no_epoch.sub('', source_version)
+ dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
+ found = False
+ for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
+ if cnf.has_key("Dir::Queue::%s" % (q)):
+ if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
+ found = True
+ if not found:
+ self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
+
+ # Version and file overwrite checks
+ if entry["type"] == "deb":
+ self.check_binary_against_db(f, session)
+ elif entry["type"] == "dsc":
+ self.check_source_against_db(f, session)
+ self.check_dsc_against_db(f, session)
+
################################################################################
def accepted_checks(self, overwrite_checks, session):
# Recheck anything that relies on the database; since that's not
from textutils import fix_maintainer
from regexes import re_html_escaping, html_escaping, re_single_line_field, \
re_multi_line_field, re_srchasver, re_taint_free, \
- re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
+ re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
+ re_is_orig_source
from formats import parse_format, validate_changes_format
from srcformats import get_format_from_string
try:
try:
file_handle = open_file(f)
-
+
# Check for the hash entry, to not trigger a KeyError.
if not files[f].has_key(hash_key(hashname)):
rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
where))
continue
-
+
# Actually check the hash for correctness.
if hashfunc(file_handle) != files[f][hash_key(hashname)]:
rejmsg.append("%s: %s check failed in %s" % (f, hashname,