X-Git-Url: https://git.donarmstrong.com/?a=blobdiff_plain;f=daklib%2Futils.py;h=3e299abc51709a4b4b3b3c308039068048195cc9;hb=34853ea46fbd8c389cc8e2cc93ce449a3c277762;hp=0896d57839eb335f30f375a602519cb727deb15c;hpb=ce828cf5a3557613771eab8f0ad59586bbbcecbb;p=dak.git diff --git a/daklib/utils.py b/daklib/utils.py index 0896d578..3e299abc 100755 --- a/daklib/utils.py +++ b/daklib/utils.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import commands +import datetime import email.Header import os import pwd @@ -33,14 +34,19 @@ import sys import tempfile import traceback import stat +import apt_inst import apt_pkg import time import re import email as modemail import subprocess -from dbconn import DBConn, get_architecture, get_component, get_suite +from dbconn import DBConn, get_architecture, get_component, get_suite, \ + get_override_type, Keyring, session_wrapper, \ + get_active_keyring_paths, get_primary_keyring_path +from sqlalchemy import desc from dak_exceptions import * +from gpg import SignedFile from textutils import fix_maintainer from regexes import re_html_escaping, html_escaping, re_single_line_field, \ re_multi_line_field, re_srchasver, re_taint_free, \ @@ -70,7 +76,9 @@ def dak_getstatusoutput(cmd): pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - output = "".join(pipe.stdout.readlines()) + output = pipe.stdout.read() + + pipe.wait() if output[-1:] == '\n': output = output[:-1] @@ -109,7 +117,7 @@ def open_file(filename, mode='r'): try: f = open(filename, mode) except IOError: - raise CantOpenError, filename + raise CantOpenError(filename) return f ################################################################################ @@ -132,7 +140,7 @@ def our_raw_input(prompt=""): ################################################################################ -def extract_component_from_section(section): +def extract_component_from_section(section, session=None): component = "" if section.find('/') != -1: @@ -140,16 +148,25 @@ def extract_component_from_section(section): # Expand default component if component == "": - if Cnf.has_key("Component::%s" % section): - component = section - else: + comp = get_component(section, session) + if comp is None: component = "main" + else: + component = comp.component_name return (section, component) ################################################################################ -def parse_deb822(contents, signing_rules=0): +def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None): + require_signature = True + if keyrings == None: + keyrings = [] + require_signature = False + + signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature) + contents = signed_file.contents + error = "" changes = {} @@ -157,7 +174,7 @@ def parse_deb822(contents, signing_rules=0): lines = contents.splitlines(True) if len(lines) == 0: - raise ParseChangesError, "[Empty changes file]" + raise ParseChangesError("[Empty changes file]") # Reindex by line number so we can easily verify the format of # .dsc files... @@ -167,38 +184,16 @@ def parse_deb822(contents, signing_rules=0): index += 1 indexed_lines[index] = line[:-1] - inside_signature = 0 - num_of_lines = len(indexed_lines.keys()) index = 0 first = -1 while index < num_of_lines: index += 1 line = indexed_lines[index] - if line == "": - if signing_rules == 1: - index += 1 - if index > num_of_lines: - raise InvalidDscError, index - line = indexed_lines[index] - if not line.startswith("-----BEGIN PGP SIGNATURE"): - raise InvalidDscError, index - inside_signature = 0 - break - else: - continue - if line.startswith("-----BEGIN PGP SIGNATURE"): + if line == "" and signing_rules == 1: + if index != num_of_lines: + raise InvalidDscError(index) break - if line.startswith("-----BEGIN PGP SIGNED MESSAGE"): - inside_signature = 1 - if signing_rules == 1: - while index < num_of_lines and line != "": - index += 1 - line = indexed_lines[index] - continue - # If we're not inside the signed data, don't process anything - if signing_rules >= 0 and not inside_signature: - continue slf = re_single_line_field.match(line) if slf: field = slf.groups()[0].lower() @@ -211,7 +206,7 @@ def parse_deb822(contents, signing_rules=0): mlf = re_multi_line_field.match(line) if mlf: if first == -1: - raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line) + raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line)) if first == 1 and changes[field] != "": changes[field] += '\n' first = 0 @@ -219,10 +214,7 @@ def parse_deb822(contents, signing_rules=0): continue error += line - if signing_rules == 1 and inside_signature: - raise InvalidDscError, index - - changes["filecontents"] = "".join(lines) + changes["filecontents"] = armored_contents if changes.has_key("source"): # Strip the source version in brackets from the source field, @@ -233,13 +225,13 @@ def parse_deb822(contents, signing_rules=0): changes["source-version"] = srcver.group(2) if error: - raise ParseChangesError, error + raise ParseChangesError(error) return changes ################################################################################ -def parse_changes(filename, signing_rules=0, dsc_file=0): +def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None): """ Parses a changes file and returns a dictionary where each field is a key. The mandatory first argument is the filename of the .changes @@ -267,8 +259,8 @@ def parse_changes(filename, signing_rules=0, dsc_file=0): try: unicode(content, 'utf-8') except UnicodeError: - raise ChangesUnicodeError, "Changes file not proper utf-8" - changes = parse_deb822(content, signing_rules) + raise ChangesUnicodeError("Changes file not proper utf-8") + changes = parse_deb822(content, signing_rules, keyrings=keyrings) if not dsc_file: @@ -282,7 +274,7 @@ def parse_changes(filename, signing_rules=0, dsc_file=0): missingfields.append(keyword) if len(missingfields): - raise ParseChangesError, "Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields) + raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields)) return changes @@ -360,7 +352,7 @@ def check_size(where, files): for f in files.keys(): try: entry = os.stat(f) - except OSError, exc: + except OSError as exc: if exc.errno == 2: # TODO: This happens when the file is in the pool. continue @@ -409,10 +401,10 @@ def check_dsc_files(dsc_filename, dsc=None, dsc_files=None): (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')), (r'diff.gz', ('debian_diff',)), (r'tar.gz', ('native_tar_gz', 'native_tar')), - (r'debian\.tar\.(gz|bz2)', ('debian_tar',)), - (r'orig\.tar\.(gz|bz2)', ('orig_tar',)), - (r'tar\.(gz|bz2)', ('native_tar',)), - (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)), + (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)), + (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)), + (r'tar\.(gz|bz2|xz)', ('native_tar',)), + (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)), ) for f in dsc_files.keys(): @@ -536,8 +528,7 @@ def parse_checksums(where, files, manifest, hashname): files[checkfile][hash_key(hashname)] = checksum for f in files.keys(): if not files[f].has_key(hash_key(hashname)): - rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile, - hashname, where)) + rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where)) return rejmsg ################################################################################ @@ -569,7 +560,7 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"): else: (md5, size, name) = s except ValueError: - raise ParseChangesError, i + raise ParseChangesError(i) if section == "": section = "-" @@ -586,9 +577,46 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"): ################################################################################ +# see http://bugs.debian.org/619131 +def build_package_list(dsc, session = None): + if not dsc.has_key("package-list"): + return {} + + packages = {} + + for line in dsc["package-list"].split("\n"): + if not line: + break + + fields = line.split() + name = fields[0] + package_type = fields[1] + (section, component) = extract_component_from_section(fields[2]) + priority = fields[3] + + # Validate type if we have a session + if session and get_override_type(package_type, session) is None: + # Maybe just warn and ignore? exit(1) might be a bit hard... + utils.fubar("invalid type (%s) in Package-List." % (package_type)) + + if name not in packages or packages[name]["type"] == "dsc": + packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[]) + + return packages + +################################################################################ + def send_mail (message, filename=""): """sendmail wrapper, takes _either_ a message string or a file as arguments""" + maildir = Cnf.get('Dir::Mail') + if maildir: + path = os.path.join(maildir, datetime.datetime.now().isoformat()) + path = find_next_free(path) + fh = open(path, 'w') + print >>fh, message, + fh.close() + # Check whether we're supposed to be sending mail if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]: return @@ -657,14 +685,14 @@ def send_mail (message, filename=""): os.unlink (filename); return; - fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700); + fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700); os.write (fd, message_raw.as_string(True)); os.close (fd); # Invoke sendmail (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename)) if (result != 0): - raise SendmailFailedError, output + raise SendmailFailedError(output) # Clean up any temporary files if message: @@ -682,14 +710,14 @@ def poolify (source, component): ################################################################################ -def move (src, dest, overwrite = 0, perms = 0664): +def move (src, dest, overwrite = 0, perms = 0o664): if os.path.exists(dest) and os.path.isdir(dest): dest_dir = dest else: dest_dir = os.path.dirname(dest) if not os.path.exists(dest_dir): umask = os.umask(00000) - os.makedirs(dest_dir, 02775) + os.makedirs(dest_dir, 0o2775) os.umask(umask) #print "Moving %s to %s..." % (src, dest) if os.path.exists(dest) and os.path.isdir(dest): @@ -705,14 +733,14 @@ def move (src, dest, overwrite = 0, perms = 0664): os.chmod(dest, perms) os.unlink(src) -def copy (src, dest, overwrite = 0, perms = 0664): +def copy (src, dest, overwrite = 0, perms = 0o664): if os.path.exists(dest) and os.path.isdir(dest): dest_dir = dest else: dest_dir = os.path.dirname(dest) if not os.path.exists(dest_dir): umask = os.umask(00000) - os.makedirs(dest_dir, 02775) + os.makedirs(dest_dir, 0o2775) os.umask(umask) #print "Copying %s to %s..." % (src, dest) if os.path.exists(dest) and os.path.isdir(dest): @@ -743,11 +771,11 @@ def which_conf_file (): res = socket.getfqdn() # In case we allow local config files per user, try if one exists - if Cnf.FindB("Config::" + res + "::AllowLocalConfig"): + if Cnf.find_b("Config::" + res + "::AllowLocalConfig"): homedir = os.getenv("HOME") confpath = os.path.join(homedir, "/etc/dak.conf") if os.path.exists(confpath): - apt_pkg.ReadConfigFileISC(Cnf,default_config) + apt_pkg.ReadConfigFileISC(Cnf,confpath) # We are still in here, so there is no local config file or we do # not allow local files. Do the normal stuff. @@ -759,7 +787,7 @@ def which_conf_file (): def which_apt_conf_file (): res = socket.getfqdn() # In case we allow local config files per user, try if one exists - if Cnf.FindB("Config::" + res + "::AllowLocalConfig"): + if Cnf.find_b("Config::" + res + "::AllowLocalConfig"): homedir = os.getenv("HOME") confpath = os.path.join(homedir, "/etc/dak.conf") if os.path.exists(confpath): @@ -855,7 +883,7 @@ def changes_compare (a, b): # Sort by source version a_version = a_changes.get("version", "0") b_version = b_changes.get("version", "0") - q = apt_pkg.VersionCompare(a_version, b_version) + q = apt_pkg.version_compare(a_version, b_version) if q: return q @@ -992,8 +1020,8 @@ def parse_args(Options): suite_ids_list = [] for suitename in split_args(Options["Suite"]): suite = get_suite(suitename, session=session) - if suite.suite_id is None: - warn("suite '%s' not recognised." % (suite.suite_name)) + if not suite or suite.suite_id is None: + warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename)) else: suite_ids_list.append(suite.suite_id) if suite_ids_list: @@ -1045,43 +1073,6 @@ def parse_args(Options): ################################################################################ -# Inspired(tm) by Bryn Keller's print_exc_plus (See -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215) - -def print_exc(): - tb = sys.exc_info()[2] - while tb.tb_next: - tb = tb.tb_next - stack = [] - frame = tb.tb_frame - while frame: - stack.append(frame) - frame = frame.f_back - stack.reverse() - traceback.print_exc() - for frame in stack: - print "\nFrame %s in %s at line %s" % (frame.f_code.co_name, - frame.f_code.co_filename, - frame.f_lineno) - for key, value in frame.f_locals.items(): - print "\t%20s = " % key, - try: - print value - except: - print "" - -################################################################################ - -def try_with_debug(function): - try: - function() - except SystemExit: - raise - except: - print_exc() - -################################################################################ - def arch_compare_sw (a, b): """ Function for use in sorting lists of architectures. @@ -1223,7 +1214,7 @@ def retrieve_key (filename, keyserver=None, keyring=None): if not keyserver: keyserver = Cnf["Dinstall::KeyServer"] if not keyring: - keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0] + keyring = get_primary_keyring_path() # Ensure the filename contains no shell meta-characters or other badness if not re_taint_free.match(filename): @@ -1260,13 +1251,13 @@ def retrieve_key (filename, keyserver=None, keyring=None): def gpg_keyring_args(keyrings=None): if not keyrings: - keyrings = Cnf.ValueList("Dinstall::GPGKeyring") + keyrings = get_active_keyring_paths() return " ".join(["--keyring %s" % x for x in keyrings]) ################################################################################ - -def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None): +@session_wrapper +def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None): """ Check the signature of a file and return the fingerprint if the signature is valid or 'None' if it's not. The first argument is the @@ -1294,7 +1285,7 @@ def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=No return (None, rejects) if not keyrings: - keyrings = Cnf.ValueList("Dinstall::GPGKeyring") + keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ] # Autofetch the signing key if that's enabled if autofetch == None: @@ -1405,7 +1396,7 @@ def gpg_get_key_addresses(fingerprint): addresses = key_uid_email_cache.get(fingerprint) if addresses != None: return addresses - addresses = set() + addresses = list() cmd = "gpg --no-default-keyring %s --fingerprint %s" \ % (gpg_keyring_args(), fingerprint) (result, output) = commands.getstatusoutput(cmd) @@ -1413,45 +1404,12 @@ def gpg_get_key_addresses(fingerprint): for l in output.split('\n'): m = re_gpg_uid.match(l) if m: - addresses.add(m.group(1)) + addresses.append(m.group(1)) key_uid_email_cache[fingerprint] = addresses return addresses ################################################################################ -# Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603 - -def wrap(paragraph, max_length, prefix=""): - line = "" - s = "" - have_started = 0 - words = paragraph.split() - - for word in words: - word_size = len(word) - if word_size > max_length: - if have_started: - s += line + '\n' + prefix - s += word + '\n' + prefix - else: - if have_started: - new_length = len(line) + word_size + 1 - if new_length > max_length: - s += line + '\n' + prefix - line = word - else: - line += ' ' + word - else: - line = word - have_started = 1 - - if have_started: - s += line - - return s - -################################################################################ - def clean_symlink (src, dest, root): """ Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'. @@ -1519,7 +1477,7 @@ def get_changes_files(from_dir): # Much of the rest of p-u/p-a depends on being in the right place os.chdir(from_dir) changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')] - except OSError, e: + except OSError as e: fubar("Failed to read list from directory %s (%s)" % (from_dir, e)) return changes_files @@ -1528,9 +1486,112 @@ def get_changes_files(from_dir): apt_pkg.init() -Cnf = apt_pkg.newConfiguration() +Cnf = apt_pkg.Configuration() if not os.getenv("DAK_TEST"): - apt_pkg.ReadConfigFileISC(Cnf,default_config) + apt_pkg.read_config_file_isc(Cnf,default_config) if which_conf_file() != default_config: - apt_pkg.ReadConfigFileISC(Cnf,which_conf_file()) + apt_pkg.read_config_file_isc(Cnf,which_conf_file()) + +################################################################################ + +def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"): + """ + Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm + Well, actually it parsed a local copy, but let's document the source + somewhere ;) + + returns a dict associating source package name with a list of open wnpp + bugs (Yes, there might be more than one) + """ + + line = [] + try: + f = open(file) + lines = f.readlines() + except IOError as e: + print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file + lines = [] + wnpp = {} + + for line in lines: + splited_line = line.split(": ", 1) + if len(splited_line) > 1: + wnpp[splited_line[0]] = splited_line[1].split("|") + + for source in wnpp.keys(): + bugs = [] + for wnpp_bug in wnpp[source]: + bug_no = re.search("(\d)+", wnpp_bug).group() + if bug_no: + bugs.append(bug_no) + wnpp[source] = bugs + return wnpp + +################################################################################ + +def get_packages_from_ftp(root, suite, component, architecture): + """ + Returns an object containing apt_pkg-parseable data collected by + aggregating Packages.gz files gathered for each architecture. + + @type root: string + @param root: path to ftp archive root directory + + @type suite: string + @param suite: suite to extract files from + + @type component: string + @param component: component to extract files from + + @type architecture: string + @param architecture: architecture to extract files from + + @rtype: TagFile + @return: apt_pkg class containing package data + + """ + filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture) + (fd, temp_file) = temp_filename() + (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file)) + if (result != 0): + fubar("Gunzip invocation failed!\n%s\n" % (output), result) + filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture) + if os.path.exists(filename): + (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file)) + if (result != 0): + fubar("Gunzip invocation failed!\n%s\n" % (output), result) + packages = open_file(temp_file) + Packages = apt_pkg.ParseTagFile(packages) + os.unlink(temp_file) + return Packages + +################################################################################ + +def deb_extract_control(fh): + """extract DEBIAN/control from a binary package""" + return apt_inst.DebFile(fh).control.extractdata("control") + +################################################################################ + +def mail_addresses_for_upload(maintainer, changed_by, fingerprint): + """Mail addresses to contact for an upload + + Args: + maintainer (str): Maintainer field of the changes file + changed_by (str): Changed-By field of the changes file + fingerprint (str): Fingerprint of the PGP key used to sign the upload + + Returns: + List of RFC 2047-encoded mail addresses to contact regarding this upload + """ + addresses = [maintainer] + if changed_by != maintainer: + addresses.append(changed_by) + + fpr_addresses = gpg_get_key_addresses(fingerprint) + if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses: + addresses.append(fpr_addresses[0]) + + encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ] + return encoded_addresses