2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39 import email as modemail
42 from dbconn import DBConn, get_architecture, get_component, get_suite, get_override_type, Keyring, session_wrapper
43 from dak_exceptions import *
44 from gpg import SignedFile
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
51 from formats import parse_format, validate_changes_format
52 from srcformats import get_format_from_string
53 from collections import defaultdict
55 ################################################################################
57 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
58 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
60 alias_cache = None #: Cache for email alias checks
61 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
67 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
68 # code in lenny's Python. This also affects commands.getoutput and
70 def dak_getstatusoutput(cmd):
71 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
72 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
74 output = pipe.stdout.read()
78 if output[-1:] == '\n':
86 commands.getstatusoutput = dak_getstatusoutput
88 ################################################################################
91 """ Escape html chars """
92 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
94 ################################################################################
96 def open_file(filename, mode='r'):
98 Open C{file}, return fileobject.
100 @type filename: string
101 @param filename: path/filename to open
104 @param mode: open mode
107 @return: open fileobject
109 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
113 f = open(filename, mode)
115 raise CantOpenError, filename
118 ################################################################################
120 def our_raw_input(prompt=""):
124 sys.stdout.write(prompt)
133 sys.stderr.write("\nUser interrupt (^D).\n")
136 ################################################################################
138 def extract_component_from_section(section):
141 if section.find('/') != -1:
142 component = section.split('/')[0]
144 # Expand default component
146 if Cnf.has_key("Component::%s" % section):
151 return (section, component)
153 ################################################################################
156 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
158 keyrings = [ k.keyring_name for k in session.query(Keyring).filter(Keyring.active == True).all() ]
159 require_signature = True
160 if signing_rules == -1:
161 require_signature = False
163 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
164 contents = signed_file.contents
169 # Split the lines in the input, keeping the linebreaks.
170 lines = contents.splitlines(True)
173 raise ParseChangesError, "[Empty changes file]"
175 # Reindex by line number so we can easily verify the format of
181 indexed_lines[index] = line[:-1]
183 num_of_lines = len(indexed_lines.keys())
186 while index < num_of_lines:
188 line = indexed_lines[index]
189 if line == "" and signing_rules == 1:
190 if index != num_of_lines:
191 raise InvalidDscError, index
193 slf = re_single_line_field.match(line)
195 field = slf.groups()[0].lower()
196 changes[field] = slf.groups()[1]
200 changes[field] += '\n'
202 mlf = re_multi_line_field.match(line)
205 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
206 if first == 1 and changes[field] != "":
207 changes[field] += '\n'
209 changes[field] += mlf.groups()[0] + '\n'
213 changes["filecontents"] = armored_contents
215 if changes.has_key("source"):
216 # Strip the source version in brackets from the source field,
217 # put it in the "source-version" field instead.
218 srcver = re_srchasver.search(changes["source"])
220 changes["source"] = srcver.group(1)
221 changes["source-version"] = srcver.group(2)
224 raise ParseChangesError, error
228 ################################################################################
231 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None, session=None):
233 Parses a changes file and returns a dictionary where each field is a
234 key. The mandatory first argument is the filename of the .changes
237 signing_rules is an optional argument:
239 - If signing_rules == -1, no signature is required.
240 - If signing_rules == 0 (the default), a signature is required.
241 - If signing_rules == 1, it turns on the same strict format checking
244 The rules for (signing_rules == 1)-mode are:
246 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
247 followed by any PGP header data and must end with a blank line.
249 - The data section must end with a blank line and must be followed by
250 "-----BEGIN PGP SIGNATURE-----".
253 changes_in = open_file(filename)
254 content = changes_in.read()
257 unicode(content, 'utf-8')
259 raise ChangesUnicodeError, "Changes file not proper utf-8"
260 changes = parse_deb822(content, signing_rules, keyrings=keyrings, session=session)
264 # Finally ensure that everything needed for .changes is there
265 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
266 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
269 for keyword in must_keywords:
270 if not changes.has_key(keyword.lower()):
271 missingfields.append(keyword)
273 if len(missingfields):
274 raise ParseChangesError, "Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields)
278 ################################################################################
280 def hash_key(hashname):
281 return '%ssum' % hashname
283 ################################################################################
285 def create_hash(where, files, hashname, hashfunc):
287 create_hash extends the passed files dict with the given hash by
288 iterating over all files on disk and passing them to the hashing
293 for f in files.keys():
295 file_handle = open_file(f)
296 except CantOpenError:
297 rejmsg.append("Could not open file %s for checksumming" % (f))
300 files[f][hash_key(hashname)] = hashfunc(file_handle)
305 ################################################################################
307 def check_hash(where, files, hashname, hashfunc):
309 check_hash checks the given hash in the files dict against the actual
310 files on disk. The hash values need to be present consistently in
311 all file entries. It does not modify its input in any way.
315 for f in files.keys():
319 file_handle = open_file(f)
321 # Check for the hash entry, to not trigger a KeyError.
322 if not files[f].has_key(hash_key(hashname)):
323 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
327 # Actually check the hash for correctness.
328 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
329 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
331 except CantOpenError:
332 # TODO: This happens when the file is in the pool.
333 # warn("Cannot open file %s" % f)
340 ################################################################################
342 def check_size(where, files):
344 check_size checks the file sizes in the passed files dict against the
349 for f in files.keys():
354 # TODO: This happens when the file is in the pool.
358 actual_size = entry[stat.ST_SIZE]
359 size = int(files[f]["size"])
360 if size != actual_size:
361 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
362 % (f, actual_size, size, where))
365 ################################################################################
367 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
369 Verify that the files listed in the Files field of the .dsc are
370 those expected given the announced Format.
372 @type dsc_filename: string
373 @param dsc_filename: path of .dsc file
376 @param dsc: the content of the .dsc parsed by C{parse_changes()}
378 @type dsc_files: dict
379 @param dsc_files: the file list returned by C{build_file_list()}
382 @return: all errors detected
386 # Parse the file if needed
388 dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1);
390 if dsc_files is None:
391 dsc_files = build_file_list(dsc, is_a_dsc=1)
393 # Ensure .dsc lists proper set of source files according to the format
395 has = defaultdict(lambda: 0)
398 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
399 (r'diff.gz', ('debian_diff',)),
400 (r'tar.gz', ('native_tar_gz', 'native_tar')),
401 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
402 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
403 (r'tar\.(gz|bz2)', ('native_tar',)),
404 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
407 for f in dsc_files.keys():
408 m = re_issource.match(f)
410 rejmsg.append("%s: %s in Files field not recognised as source."
414 # Populate 'has' dictionary by resolving keys in lookup table
416 for regex, keys in ftype_lookup:
417 if re.match(regex, m.group(3)):
423 # File does not match anything in lookup table; reject
425 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
427 # Check for multiple files
428 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
429 if has[file_type] > 1:
430 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
432 # Source format specific tests
434 format = get_format_from_string(dsc['format'])
436 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
439 except UnknownFormatError:
440 # Not an error here for now
445 ################################################################################
447 def check_hash_fields(what, manifest):
449 check_hash_fields ensures that there are no checksum fields in the
450 given dict that we do not know about.
454 hashes = map(lambda x: x[0], known_hashes)
455 for field in manifest:
456 if field.startswith("checksums-"):
457 hashname = field.split("-",1)[1]
458 if hashname not in hashes:
459 rejmsg.append("Unsupported checksum field for %s "\
460 "in %s" % (hashname, what))
463 ################################################################################
465 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
466 if format >= version:
467 # The version should contain the specified hash.
470 # Import hashes from the changes
471 rejmsg = parse_checksums(".changes", files, changes, hashname)
475 # We need to calculate the hash because it can't possibly
478 return func(".changes", files, hashname, hashfunc)
480 # We could add the orig which might be in the pool to the files dict to
481 # access the checksums easily.
483 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
485 ensure_dsc_hashes' task is to ensure that each and every *present* hash
486 in the dsc is correct, i.e. identical to the changes file and if necessary
487 the pool. The latter task is delegated to check_hash.
491 if not dsc.has_key('Checksums-%s' % (hashname,)):
493 # Import hashes from the dsc
494 parse_checksums(".dsc", dsc_files, dsc, hashname)
496 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
499 ################################################################################
501 def parse_checksums(where, files, manifest, hashname):
503 field = 'checksums-%s' % hashname
504 if not field in manifest:
506 for line in manifest[field].split('\n'):
509 clist = line.strip().split(' ')
511 checksum, size, checkfile = clist
513 rejmsg.append("Cannot parse checksum line [%s]" % (line))
515 if not files.has_key(checkfile):
516 # TODO: check for the file's entry in the original files dict, not
517 # the one modified by (auto)byhand and other weird stuff
518 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
519 # (file, hashname, where))
521 if not files[checkfile]["size"] == size:
522 rejmsg.append("%s: size differs for files and checksums-%s entry "\
523 "in %s" % (checkfile, hashname, where))
525 files[checkfile][hash_key(hashname)] = checksum
526 for f in files.keys():
527 if not files[f].has_key(hash_key(hashname)):
528 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
532 ################################################################################
534 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
536 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
539 # Make sure we have a Files: field to parse...
540 if not changes.has_key(field):
541 raise NoFilesFieldError
543 # Validate .changes Format: field
545 validate_changes_format(parse_format(changes['format']), field)
547 includes_section = (not is_a_dsc) and field == "files"
549 # Parse each entry/line:
550 for i in changes[field].split('\n'):
554 section = priority = ""
557 (md5, size, section, priority, name) = s
559 (md5, size, name) = s
561 raise ParseChangesError, i
568 (section, component) = extract_component_from_section(section)
570 files[name] = dict(size=size, section=section,
571 priority=priority, component=component)
572 files[name][hashname] = md5
576 ################################################################################
578 # see http://bugs.debian.org/619131
579 def build_package_set(dsc, session = None):
580 if not dsc.has_key("package-set"):
585 for line in dsc["package-set"].split("\n"):
589 (name, section, priority) = line.split()
590 (section, component) = extract_component_from_section(section)
593 if name.find(":") != -1:
594 (package_type, name) = name.split(":", 1)
595 if package_type == "src":
598 # Validate type if we have a session
599 if session and get_override_type(package_type, session) is None:
600 # Maybe just warn and ignore? exit(1) might be a bit hard...
601 utils.fubar("invalid type (%s) in Package-Set." % (package_type))
608 if package_type == "dsc":
611 if not packages.has_key(name) or packages[name]["type"] == "dsc":
612 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
616 ################################################################################
618 def send_mail (message, filename=""):
619 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
621 # Check whether we're supposed to be sending mail
622 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
625 # If we've been passed a string dump it into a temporary file
627 (fd, filename) = tempfile.mkstemp()
628 os.write (fd, message)
631 if Cnf.has_key("Dinstall::MailWhiteList") and \
632 Cnf["Dinstall::MailWhiteList"] != "":
633 message_in = open_file(filename)
634 message_raw = modemail.message_from_file(message_in)
638 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
640 for line in whitelist_in:
641 if not re_whitespace_comment.match(line):
642 if re_re_mark.match(line):
643 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
645 whitelist.append(re.compile(re.escape(line.strip())))
650 fields = ["To", "Bcc", "Cc"]
653 value = message_raw.get(field, None)
656 for item in value.split(","):
657 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
663 if not mail_whitelisted:
664 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
668 # Doesn't have any mail in whitelist so remove the header
670 del message_raw[field]
672 message_raw.replace_header(field, ', '.join(match))
674 # Change message fields in order if we don't have a To header
675 if not message_raw.has_key("To"):
678 if message_raw.has_key(field):
679 message_raw[fields[-1]] = message_raw[field]
680 del message_raw[field]
683 # Clean up any temporary files
684 # and return, as we removed all recipients.
686 os.unlink (filename);
689 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
690 os.write (fd, message_raw.as_string(True));
694 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
696 raise SendmailFailedError, output
698 # Clean up any temporary files
702 ################################################################################
704 def poolify (source, component):
707 if source[:3] == "lib":
708 return component + source[:4] + '/' + source + '/'
710 return component + source[:1] + '/' + source + '/'
712 ################################################################################
714 def move (src, dest, overwrite = 0, perms = 0664):
715 if os.path.exists(dest) and os.path.isdir(dest):
718 dest_dir = os.path.dirname(dest)
719 if not os.path.exists(dest_dir):
720 umask = os.umask(00000)
721 os.makedirs(dest_dir, 02775)
723 #print "Moving %s to %s..." % (src, dest)
724 if os.path.exists(dest) and os.path.isdir(dest):
725 dest += '/' + os.path.basename(src)
726 # Don't overwrite unless forced to
727 if os.path.exists(dest):
729 fubar("Can't move %s to %s - file already exists." % (src, dest))
731 if not os.access(dest, os.W_OK):
732 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
733 shutil.copy2(src, dest)
734 os.chmod(dest, perms)
737 def copy (src, dest, overwrite = 0, perms = 0664):
738 if os.path.exists(dest) and os.path.isdir(dest):
741 dest_dir = os.path.dirname(dest)
742 if not os.path.exists(dest_dir):
743 umask = os.umask(00000)
744 os.makedirs(dest_dir, 02775)
746 #print "Copying %s to %s..." % (src, dest)
747 if os.path.exists(dest) and os.path.isdir(dest):
748 dest += '/' + os.path.basename(src)
749 # Don't overwrite unless forced to
750 if os.path.exists(dest):
752 raise FileExistsError
754 if not os.access(dest, os.W_OK):
755 raise CantOverwriteError
756 shutil.copy2(src, dest)
757 os.chmod(dest, perms)
759 ################################################################################
762 res = socket.getfqdn()
763 database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
764 if database_hostname:
765 return database_hostname
769 def which_conf_file ():
770 if os.getenv('DAK_CONFIG'):
771 return os.getenv('DAK_CONFIG')
773 res = socket.getfqdn()
774 # In case we allow local config files per user, try if one exists
775 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
776 homedir = os.getenv("HOME")
777 confpath = os.path.join(homedir, "/etc/dak.conf")
778 if os.path.exists(confpath):
779 apt_pkg.ReadConfigFileISC(Cnf,default_config)
781 # We are still in here, so there is no local config file or we do
782 # not allow local files. Do the normal stuff.
783 if Cnf.get("Config::" + res + "::DakConfig"):
784 return Cnf["Config::" + res + "::DakConfig"]
786 return default_config
788 def which_apt_conf_file ():
789 res = socket.getfqdn()
790 # In case we allow local config files per user, try if one exists
791 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
792 homedir = os.getenv("HOME")
793 confpath = os.path.join(homedir, "/etc/dak.conf")
794 if os.path.exists(confpath):
795 apt_pkg.ReadConfigFileISC(Cnf,default_config)
797 if Cnf.get("Config::" + res + "::AptConfig"):
798 return Cnf["Config::" + res + "::AptConfig"]
800 return default_apt_config
802 def which_alias_file():
803 hostname = socket.getfqdn()
804 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
805 if os.path.exists(aliasfn):
810 ################################################################################
812 def TemplateSubst(subst_map, filename):
813 """ Perform a substition of template """
814 templatefile = open_file(filename)
815 template = templatefile.read()
816 for k, v in subst_map.iteritems():
817 template = template.replace(k, str(v))
821 ################################################################################
823 def fubar(msg, exit_code=1):
824 sys.stderr.write("E: %s\n" % (msg))
828 sys.stderr.write("W: %s\n" % (msg))
830 ################################################################################
832 # Returns the user name with a laughable attempt at rfc822 conformancy
833 # (read: removing stray periods).
835 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
838 return pwd.getpwuid(os.getuid())[0]
840 ################################################################################
850 return ("%d%s" % (c, t))
852 ################################################################################
854 def cc_fix_changes (changes):
855 o = changes.get("architecture", "")
857 del changes["architecture"]
858 changes["architecture"] = {}
860 changes["architecture"][j] = 1
862 def changes_compare (a, b):
863 """ Sort by source name, source version, 'have source', and then by filename """
865 a_changes = parse_changes(a)
870 b_changes = parse_changes(b)
874 cc_fix_changes (a_changes)
875 cc_fix_changes (b_changes)
877 # Sort by source name
878 a_source = a_changes.get("source")
879 b_source = b_changes.get("source")
880 q = cmp (a_source, b_source)
884 # Sort by source version
885 a_version = a_changes.get("version", "0")
886 b_version = b_changes.get("version", "0")
887 q = apt_pkg.VersionCompare(a_version, b_version)
891 # Sort by 'have source'
892 a_has_source = a_changes["architecture"].get("source")
893 b_has_source = b_changes["architecture"].get("source")
894 if a_has_source and not b_has_source:
896 elif b_has_source and not a_has_source:
899 # Fall back to sort by filename
902 ################################################################################
904 def find_next_free (dest, too_many=100):
907 while os.path.exists(dest) and extra < too_many:
908 dest = orig_dest + '.' + repr(extra)
910 if extra >= too_many:
911 raise NoFreeFilenameError
914 ################################################################################
916 def result_join (original, sep = '\t'):
918 for i in xrange(len(original)):
919 if original[i] == None:
920 resultlist.append("")
922 resultlist.append(original[i])
923 return sep.join(resultlist)
925 ################################################################################
927 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
929 for line in str.split('\n'):
931 if line or include_blank_lines:
932 out += "%s%s\n" % (prefix, line)
933 # Strip trailing new line
938 ################################################################################
940 def validate_changes_file_arg(filename, require_changes=1):
942 'filename' is either a .changes or .dak file. If 'filename' is a
943 .dak file, it's changed to be the corresponding .changes file. The
944 function then checks if the .changes file a) exists and b) is
945 readable and returns the .changes filename if so. If there's a
946 problem, the next action depends on the option 'require_changes'
949 - If 'require_changes' == -1, errors are ignored and the .changes
950 filename is returned.
951 - If 'require_changes' == 0, a warning is given and 'None' is returned.
952 - If 'require_changes' == 1, a fatal error is raised.
957 orig_filename = filename
958 if filename.endswith(".dak"):
959 filename = filename[:-4]+".changes"
961 if not filename.endswith(".changes"):
962 error = "invalid file type; not a changes file"
964 if not os.access(filename,os.R_OK):
965 if os.path.exists(filename):
966 error = "permission denied"
968 error = "file not found"
971 if require_changes == 1:
972 fubar("%s: %s." % (orig_filename, error))
973 elif require_changes == 0:
974 warn("Skipping %s - %s" % (orig_filename, error))
976 else: # We only care about the .dak file
981 ################################################################################
984 return (arch != "source" and arch != "all")
986 ################################################################################
988 def join_with_commas_and(list):
989 if len(list) == 0: return "nothing"
990 if len(list) == 1: return list[0]
991 return ", ".join(list[:-1]) + " and " + list[-1]
993 ################################################################################
998 (pkg, version, constraint) = atom
1000 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
1003 pp_deps.append(pp_dep)
1004 return " |".join(pp_deps)
1006 ################################################################################
1011 ################################################################################
1013 def parse_args(Options):
1014 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
1015 # XXX: This should go away and everything which calls it be converted
1016 # to use SQLA properly. For now, we'll just fix it not to use
1017 # the old Pg interface though
1018 session = DBConn().session()
1020 if Options["Suite"]:
1022 for suitename in split_args(Options["Suite"]):
1023 suite = get_suite(suitename, session=session)
1024 if suite.suite_id is None:
1025 warn("suite '%s' not recognised." % (suite.suite_name))
1027 suite_ids_list.append(suite.suite_id)
1029 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1031 fubar("No valid suite given.")
1036 if Options["Component"]:
1037 component_ids_list = []
1038 for componentname in split_args(Options["Component"]):
1039 component = get_component(componentname, session=session)
1040 if component is None:
1041 warn("component '%s' not recognised." % (componentname))
1043 component_ids_list.append(component.component_id)
1044 if component_ids_list:
1045 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1047 fubar("No valid component given.")
1051 # Process architecture
1052 con_architectures = ""
1054 if Options["Architecture"]:
1056 for archname in split_args(Options["Architecture"]):
1057 if archname == "source":
1060 arch = get_architecture(archname, session=session)
1062 warn("architecture '%s' not recognised." % (archname))
1064 arch_ids_list.append(arch.arch_id)
1066 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1068 if not check_source:
1069 fubar("No valid architecture given.")
1073 return (con_suites, con_architectures, con_components, check_source)
1075 ################################################################################
1077 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1078 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1081 tb = sys.exc_info()[2]
1088 frame = frame.f_back
1090 traceback.print_exc()
1092 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1093 frame.f_code.co_filename,
1095 for key, value in frame.f_locals.items():
1096 print "\t%20s = " % key,
1100 print "<unable to print>"
1102 ################################################################################
1104 def try_with_debug(function):
1112 ################################################################################
1114 def arch_compare_sw (a, b):
1116 Function for use in sorting lists of architectures.
1118 Sorts normally except that 'source' dominates all others.
1121 if a == "source" and b == "source":
1130 ################################################################################
1132 def split_args (s, dwim=1):
1134 Split command line arguments which can be separated by either commas
1135 or whitespace. If dwim is set, it will complain about string ending
1136 in comma since this usually means someone did 'dak ls -a i386, m68k
1137 foo' or something and the inevitable confusion resulting from 'm68k'
1138 being treated as an argument is undesirable.
1141 if s.find(",") == -1:
1144 if s[-1:] == "," and dwim:
1145 fubar("split_args: found trailing comma, spurious space maybe?")
1148 ################################################################################
1150 def gpgv_get_status_output(cmd, status_read, status_write):
1152 Our very own version of commands.getouputstatus(), hacked to support
1156 cmd = ['/bin/sh', '-c', cmd]
1157 p2cread, p2cwrite = os.pipe()
1158 c2pread, c2pwrite = os.pipe()
1159 errout, errin = os.pipe()
1169 for i in range(3, 256):
1170 if i != status_write:
1176 os.execvp(cmd[0], cmd)
1182 os.dup2(c2pread, c2pwrite)
1183 os.dup2(errout, errin)
1185 output = status = ""
1187 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1190 r = os.read(fd, 8196)
1192 more_data.append(fd)
1193 if fd == c2pwrite or fd == errin:
1195 elif fd == status_read:
1198 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1200 pid, exit_status = os.waitpid(pid, 0)
1202 os.close(status_write)
1203 os.close(status_read)
1213 return output, status, exit_status
1215 ################################################################################
1217 def process_gpgv_output(status):
1218 # Process the status-fd output
1221 for line in status.split('\n'):
1225 split = line.split()
1227 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1229 (gnupg, keyword) = split[:2]
1230 if gnupg != "[GNUPG:]":
1231 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1234 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1235 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1238 keywords[keyword] = args
1240 return (keywords, internal_error)
1242 ################################################################################
1244 def retrieve_key (filename, keyserver=None, keyring=None):
1246 Retrieve the key that signed 'filename' from 'keyserver' and
1247 add it to 'keyring'. Returns nothing on success, or an error message
1251 # Defaults for keyserver and keyring
1253 keyserver = Cnf["Dinstall::KeyServer"]
1255 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1257 # Ensure the filename contains no shell meta-characters or other badness
1258 if not re_taint_free.match(filename):
1259 return "%s: tainted filename" % (filename)
1261 # Invoke gpgv on the file
1262 status_read, status_write = os.pipe()
1263 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1264 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1266 # Process the status-fd output
1267 (keywords, internal_error) = process_gpgv_output(status)
1269 return internal_error
1271 if not keywords.has_key("NO_PUBKEY"):
1272 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1274 fingerprint = keywords["NO_PUBKEY"][0]
1275 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1276 # it'll try to create a lockfile in /dev. A better solution might
1277 # be a tempfile or something.
1278 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1279 % (Cnf["Dinstall::SigningKeyring"])
1280 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1281 % (keyring, keyserver, fingerprint)
1282 (result, output) = commands.getstatusoutput(cmd)
1284 return "'%s' failed with exit code %s" % (cmd, result)
1288 ################################################################################
1290 def gpg_keyring_args(keyrings=None):
1292 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1294 return " ".join(["--keyring %s" % x for x in keyrings])
1296 ################################################################################
1298 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1300 Check the signature of a file and return the fingerprint if the
1301 signature is valid or 'None' if it's not. The first argument is the
1302 filename whose signature should be checked. The second argument is a
1303 reject function and is called when an error is found. The reject()
1304 function must allow for two arguments: the first is the error message,
1305 the second is an optional prefix string. It's possible for reject()
1306 to be called more than once during an invocation of check_signature().
1307 The third argument is optional and is the name of the files the
1308 detached signature applies to. The fourth argument is optional and is
1309 a *list* of keyrings to use. 'autofetch' can either be None, True or
1310 False. If None, the default behaviour specified in the config will be
1316 # Ensure the filename contains no shell meta-characters or other badness
1317 if not re_taint_free.match(sig_filename):
1318 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1319 return (None, rejects)
1321 if data_filename and not re_taint_free.match(data_filename):
1322 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1323 return (None, rejects)
1326 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1328 # Autofetch the signing key if that's enabled
1329 if autofetch == None:
1330 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1332 error_msg = retrieve_key(sig_filename)
1334 rejects.append(error_msg)
1335 return (None, rejects)
1337 # Build the command line
1338 status_read, status_write = os.pipe()
1339 cmd = "gpgv --status-fd %s %s %s %s" % (
1340 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1342 # Invoke gpgv on the file
1343 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1345 # Process the status-fd output
1346 (keywords, internal_error) = process_gpgv_output(status)
1348 # If we failed to parse the status-fd output, let's just whine and bail now
1350 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1351 rejects.append(internal_error, "")
1352 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1353 return (None, rejects)
1355 # Now check for obviously bad things in the processed output
1356 if keywords.has_key("KEYREVOKED"):
1357 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1358 if keywords.has_key("BADSIG"):
1359 rejects.append("bad signature on %s." % (sig_filename))
1360 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1361 rejects.append("failed to check signature on %s." % (sig_filename))
1362 if keywords.has_key("NO_PUBKEY"):
1363 args = keywords["NO_PUBKEY"]
1366 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1367 if keywords.has_key("BADARMOR"):
1368 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1369 if keywords.has_key("NODATA"):
1370 rejects.append("no signature found in %s." % (sig_filename))
1371 if keywords.has_key("EXPKEYSIG"):
1372 args = keywords["EXPKEYSIG"]
1375 rejects.append("Signature made by expired key 0x%s" % (key))
1376 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1377 args = keywords["KEYEXPIRED"]
1381 if timestamp.count("T") == 0:
1383 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1385 expiredate = "unknown (%s)" % (timestamp)
1387 expiredate = timestamp
1388 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1390 if len(rejects) > 0:
1391 return (None, rejects)
1393 # Next check gpgv exited with a zero return code
1395 rejects.append("gpgv failed while checking %s." % (sig_filename))
1397 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1399 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1400 return (None, rejects)
1402 # Sanity check the good stuff we expect
1403 if not keywords.has_key("VALIDSIG"):
1404 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1406 args = keywords["VALIDSIG"]
1408 rejects.append("internal error while checking signature on %s." % (sig_filename))
1410 fingerprint = args[0]
1411 if not keywords.has_key("GOODSIG"):
1412 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1413 if not keywords.has_key("SIG_ID"):
1414 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1416 # Finally ensure there's not something we don't recognise
1417 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1418 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1419 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1421 for keyword in keywords.keys():
1422 if not known_keywords.has_key(keyword):
1423 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1425 if len(rejects) > 0:
1426 return (None, rejects)
1428 return (fingerprint, [])
1430 ################################################################################
1432 def gpg_get_key_addresses(fingerprint):
1433 """retreive email addresses from gpg key uids for a given fingerprint"""
1434 addresses = key_uid_email_cache.get(fingerprint)
1435 if addresses != None:
1438 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1439 % (gpg_keyring_args(), fingerprint)
1440 (result, output) = commands.getstatusoutput(cmd)
1442 for l in output.split('\n'):
1443 m = re_gpg_uid.match(l)
1445 addresses.add(m.group(1))
1446 key_uid_email_cache[fingerprint] = addresses
1449 ################################################################################
1451 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1453 def wrap(paragraph, max_length, prefix=""):
1457 words = paragraph.split()
1460 word_size = len(word)
1461 if word_size > max_length:
1463 s += line + '\n' + prefix
1464 s += word + '\n' + prefix
1467 new_length = len(line) + word_size + 1
1468 if new_length > max_length:
1469 s += line + '\n' + prefix
1482 ################################################################################
1484 def clean_symlink (src, dest, root):
1486 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1489 src = src.replace(root, '', 1)
1490 dest = dest.replace(root, '', 1)
1491 dest = os.path.dirname(dest)
1492 new_src = '../' * len(dest.split('/'))
1493 return new_src + src
1495 ################################################################################
1497 def temp_filename(directory=None, prefix="dak", suffix=""):
1499 Return a secure and unique filename by pre-creating it.
1500 If 'directory' is non-null, it will be the directory the file is pre-created in.
1501 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1502 If 'suffix' is non-null, the filename will end with it.
1504 Returns a pair (fd, name).
1507 return tempfile.mkstemp(suffix, prefix, directory)
1509 ################################################################################
1511 def temp_dirname(parent=None, prefix="dak", suffix=""):
1513 Return a secure and unique directory by pre-creating it.
1514 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1515 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1516 If 'suffix' is non-null, the filename will end with it.
1518 Returns a pathname to the new directory
1521 return tempfile.mkdtemp(suffix, prefix, parent)
1523 ################################################################################
1525 def is_email_alias(email):
1526 """ checks if the user part of the email is listed in the alias file """
1528 if alias_cache == None:
1529 aliasfn = which_alias_file()
1532 for l in open(aliasfn):
1533 alias_cache.add(l.split(':')[0])
1534 uid = email.split('@')[0]
1535 return uid in alias_cache
1537 ################################################################################
1539 def get_changes_files(from_dir):
1541 Takes a directory and lists all .changes files in it (as well as chdir'ing
1542 to the directory; this is due to broken behaviour on the part of p-u/p-a
1543 when you're not in the right place)
1545 Returns a list of filenames
1548 # Much of the rest of p-u/p-a depends on being in the right place
1550 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1552 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1554 return changes_files
1556 ################################################################################
1560 Cnf = apt_pkg.newConfiguration()
1561 if not os.getenv("DAK_TEST"):
1562 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1564 if which_conf_file() != default_config:
1565 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1567 ################################################################################
1569 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1571 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1572 Well, actually it parsed a local copy, but let's document the source
1575 returns a dict associating source package name with a list of open wnpp
1576 bugs (Yes, there might be more than one)
1582 lines = f.readlines()
1584 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1589 splited_line = line.split(": ", 1)
1590 if len(splited_line) > 1:
1591 wnpp[splited_line[0]] = splited_line[1].split("|")
1593 for source in wnpp.keys():
1595 for wnpp_bug in wnpp[source]:
1596 bug_no = re.search("(\d)+", wnpp_bug).group()
1602 ################################################################################
1604 def get_packages_from_ftp(root, suite, component, architecture):
1606 Returns an object containing apt_pkg-parseable data collected by
1607 aggregating Packages.gz files gathered for each architecture.
1610 @param root: path to ftp archive root directory
1613 @param suite: suite to extract files from
1615 @type component: string
1616 @param component: component to extract files from
1618 @type architecture: string
1619 @param architecture: architecture to extract files from
1622 @return: apt_pkg class containing package data
1625 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1626 (fd, temp_file) = temp_filename()
1627 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1629 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1630 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1631 if os.path.exists(filename):
1632 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1634 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1635 packages = open_file(temp_file)
1636 Packages = apt_pkg.ParseTagFile(packages)
1637 os.unlink(temp_file)