2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_taint_free, re_gpg_uid, re_re_mark, \
49 re_whitespace_comment, re_issource
51 from srcformats import srcformats
52 from collections import defaultdict
54 ################################################################################
56 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
57 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
59 alias_cache = None #: Cache for email alias checks
60 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
67 def dak_getstatusoutput(cmd):
68 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
69 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
71 output = "".join(pipe.stdout.readlines())
78 commands.getstatusoutput = dak_getstatusoutput
80 ################################################################################
83 """ Escape html chars """
84 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
86 ################################################################################
88 def open_file(filename, mode='r'):
90 Open C{file}, return fileobject.
92 @type filename: string
93 @param filename: path/filename to open
96 @param mode: open mode
99 @return: open fileobject
101 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
105 f = open(filename, mode)
107 raise CantOpenError, filename
110 ################################################################################
112 def our_raw_input(prompt=""):
114 sys.stdout.write(prompt)
120 sys.stderr.write("\nUser interrupt (^D).\n")
123 ################################################################################
125 def extract_component_from_section(section):
128 if section.find('/') != -1:
129 component = section.split('/')[0]
131 # Expand default component
133 if Cnf.has_key("Component::%s" % section):
138 return (section, component)
140 ################################################################################
142 def parse_deb822(contents, signing_rules=0):
146 # Split the lines in the input, keeping the linebreaks.
147 lines = contents.splitlines(True)
150 raise ParseChangesError, "[Empty changes file]"
152 # Reindex by line number so we can easily verify the format of
158 indexed_lines[index] = line[:-1]
162 num_of_lines = len(indexed_lines.keys())
165 while index < num_of_lines:
167 line = indexed_lines[index]
169 if signing_rules == 1:
171 if index > num_of_lines:
172 raise InvalidDscError, index
173 line = indexed_lines[index]
174 if not line.startswith("-----BEGIN PGP SIGNATURE"):
175 raise InvalidDscError, index
180 if line.startswith("-----BEGIN PGP SIGNATURE"):
182 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
184 if signing_rules == 1:
185 while index < num_of_lines and line != "":
187 line = indexed_lines[index]
189 # If we're not inside the signed data, don't process anything
190 if signing_rules >= 0 and not inside_signature:
192 slf = re_single_line_field.match(line)
194 field = slf.groups()[0].lower()
195 changes[field] = slf.groups()[1]
199 changes[field] += '\n'
201 mlf = re_multi_line_field.match(line)
204 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
205 if first == 1 and changes[field] != "":
206 changes[field] += '\n'
208 changes[field] += mlf.groups()[0] + '\n'
212 if signing_rules == 1 and inside_signature:
213 raise InvalidDscError, index
215 changes["filecontents"] = "".join(lines)
217 if changes.has_key("source"):
218 # Strip the source version in brackets from the source field,
219 # put it in the "source-version" field instead.
220 srcver = re_srchasver.search(changes["source"])
222 changes["source"] = srcver.group(1)
223 changes["source-version"] = srcver.group(2)
226 raise ParseChangesError, error
230 ################################################################################
232 def parse_changes(filename, signing_rules=0):
234 Parses a changes file and returns a dictionary where each field is a
235 key. The mandatory first argument is the filename of the .changes
238 signing_rules is an optional argument:
240 - If signing_rules == -1, no signature is required.
241 - If signing_rules == 0 (the default), a signature is required.
242 - If signing_rules == 1, it turns on the same strict format checking
245 The rules for (signing_rules == 1)-mode are:
247 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
248 followed by any PGP header data and must end with a blank line.
250 - The data section must end with a blank line and must be followed by
251 "-----BEGIN PGP SIGNATURE-----".
254 changes_in = open_file(filename)
255 content = changes_in.read()
258 unicode(content, 'utf-8')
260 raise ChangesUnicodeError, "Changes file not proper utf-8"
261 return parse_deb822(content, signing_rules)
263 ################################################################################
265 def hash_key(hashname):
266 return '%ssum' % hashname
268 ################################################################################
270 def create_hash(where, files, hashname, hashfunc):
272 create_hash extends the passed files dict with the given hash by
273 iterating over all files on disk and passing them to the hashing
278 for f in files.keys():
280 file_handle = open_file(f)
281 except CantOpenError:
282 rejmsg.append("Could not open file %s for checksumming" % (f))
285 files[f][hash_key(hashname)] = hashfunc(file_handle)
290 ################################################################################
292 def check_hash(where, files, hashname, hashfunc):
294 check_hash checks the given hash in the files dict against the actual
295 files on disk. The hash values need to be present consistently in
296 all file entries. It does not modify its input in any way.
300 for f in files.keys():
304 file_handle = open_file(f)
306 # Check for the hash entry, to not trigger a KeyError.
307 if not files[f].has_key(hash_key(hashname)):
308 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
312 # Actually check the hash for correctness.
313 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
314 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
316 except CantOpenError:
317 # TODO: This happens when the file is in the pool.
318 # warn("Cannot open file %s" % f)
325 ################################################################################
327 def check_size(where, files):
329 check_size checks the file sizes in the passed files dict against the
334 for f in files.keys():
339 # TODO: This happens when the file is in the pool.
343 actual_size = entry[stat.ST_SIZE]
344 size = int(files[f]["size"])
345 if size != actual_size:
346 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
347 % (f, actual_size, size, where))
350 ################################################################################
352 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
354 Verify that the files listed in the Files field of the .dsc are
355 those expected given the announced Format.
357 @type dsc_filename: string
358 @param dsc_filename: path of .dsc file
361 @param dsc: the content of the .dsc parsed by C{parse_changes()}
363 @type dsc_files: dict
364 @param dsc_files: the file list returned by C{build_file_list()}
367 @return: all errors detected
371 # Parse the file if needed
373 dsc = parse_changes(dsc_filename, signing_rules=1);
375 if dsc_files is None:
376 dsc_files = build_file_list(dsc, is_a_dsc=1)
378 # Ensure .dsc lists proper set of source files according to the format
380 has = defaultdict(lambda: 0)
383 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
384 (r'diff.gz', ('debian_diff',)),
385 (r'tar.gz', ('native_tar_gz', 'native_tar')),
386 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
387 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
388 (r'tar\.(gz|bz2)', ('native_tar',)),
389 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
392 for f in dsc_files.keys():
393 m = re_issource.match(f)
395 rejmsg.append("%s: %s in Files field not recognised as source."
399 # Populate 'has' dictionary by resolving keys in lookup table
401 for regex, keys in ftype_lookup:
402 if re.match(regex, m.group(3)):
408 # File does not match anything in lookup table; reject
410 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
412 # Check for multiple files
413 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
414 if has[file_type] > 1:
415 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
417 # Source format specific tests
418 for format in srcformats:
419 if format.re_format.match(dsc['format']):
421 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
427 ################################################################################
429 def check_hash_fields(what, manifest):
431 check_hash_fields ensures that there are no checksum fields in the
432 given dict that we do not know about.
436 hashes = map(lambda x: x[0], known_hashes)
437 for field in manifest:
438 if field.startswith("checksums-"):
439 hashname = field.split("-",1)[1]
440 if hashname not in hashes:
441 rejmsg.append("Unsupported checksum field for %s "\
442 "in %s" % (hashname, what))
445 ################################################################################
447 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
448 if format >= version:
449 # The version should contain the specified hash.
452 # Import hashes from the changes
453 rejmsg = parse_checksums(".changes", files, changes, hashname)
457 # We need to calculate the hash because it can't possibly
460 return func(".changes", files, hashname, hashfunc)
462 # We could add the orig which might be in the pool to the files dict to
463 # access the checksums easily.
465 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
467 ensure_dsc_hashes' task is to ensure that each and every *present* hash
468 in the dsc is correct, i.e. identical to the changes file and if necessary
469 the pool. The latter task is delegated to check_hash.
473 if not dsc.has_key('Checksums-%s' % (hashname,)):
475 # Import hashes from the dsc
476 parse_checksums(".dsc", dsc_files, dsc, hashname)
478 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
481 ################################################################################
483 def parse_checksums(where, files, manifest, hashname):
485 field = 'checksums-%s' % hashname
486 if not field in manifest:
488 for line in manifest[field].split('\n'):
491 clist = line.strip().split(' ')
493 checksum, size, checkfile = clist
495 rejmsg.append("Cannot parse checksum line [%s]" % (line))
497 if not files.has_key(checkfile):
498 # TODO: check for the file's entry in the original files dict, not
499 # the one modified by (auto)byhand and other weird stuff
500 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
501 # (file, hashname, where))
503 if not files[checkfile]["size"] == size:
504 rejmsg.append("%s: size differs for files and checksums-%s entry "\
505 "in %s" % (checkfile, hashname, where))
507 files[checkfile][hash_key(hashname)] = checksum
508 for f in files.keys():
509 if not files[f].has_key(hash_key(hashname)):
510 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
514 ################################################################################
516 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
518 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
521 # Make sure we have a Files: field to parse...
522 if not changes.has_key(field):
523 raise NoFilesFieldError
525 # Make sure we recognise the format of the Files: field
526 format = re_verwithext.search(changes.get("format", "0.0"))
528 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
530 format = format.groups()
531 if format[1] == None:
532 format = int(float(format[0])), 0, format[2]
534 format = int(format[0]), int(format[1]), format[2]
535 if format[2] == None:
539 # format = (0,0) are missing format headers of which we still
540 # have some in the archive.
541 if format != (1,0) and format != (0,0) and \
542 format != (3,0,"quilt") and format != (3,0,"native"):
543 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
545 if (format < (1,5) or format > (1,8)):
546 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
547 if field != "files" and format < (1,8):
548 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
550 includes_section = (not is_a_dsc) and field == "files"
552 # Parse each entry/line:
553 for i in changes[field].split('\n'):
557 section = priority = ""
560 (md5, size, section, priority, name) = s
562 (md5, size, name) = s
564 raise ParseChangesError, i
571 (section, component) = extract_component_from_section(section)
573 files[name] = Dict(size=size, section=section,
574 priority=priority, component=component)
575 files[name][hashname] = md5
579 ################################################################################
581 def send_mail (message, filename=""):
582 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
584 # If we've been passed a string dump it into a temporary file
586 (fd, filename) = tempfile.mkstemp()
587 os.write (fd, message)
590 if Cnf.has_key("Dinstall::MailWhiteList") and \
591 Cnf["Dinstall::MailWhiteList"] != "":
592 message_in = open_file(filename)
593 message_raw = modemail.message_from_file(message_in)
597 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
599 for line in whitelist_in:
600 if not re_whitespace_comment.match(line):
601 if re_re_mark.match(line):
602 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
604 whitelist.append(re.compile(re.escape(line.strip())))
609 fields = ["To", "Bcc", "Cc"]
612 value = message_raw.get(field, None)
615 for item in value.split(","):
616 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
622 if not mail_whitelisted:
623 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
627 # Doesn't have any mail in whitelist so remove the header
629 del message_raw[field]
631 message_raw.replace_header(field, string.join(match, ", "))
633 # Change message fields in order if we don't have a To header
634 if not message_raw.has_key("To"):
637 if message_raw.has_key(field):
638 message_raw[fields[-1]] = message_raw[field]
639 del message_raw[field]
642 # Clean up any temporary files
643 # and return, as we removed all recipients.
645 os.unlink (filename);
648 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
649 os.write (fd, message_raw.as_string(True));
653 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
655 raise SendmailFailedError, output
657 # Clean up any temporary files
661 ################################################################################
663 def poolify (source, component):
666 if source[:3] == "lib":
667 return component + source[:4] + '/' + source + '/'
669 return component + source[:1] + '/' + source + '/'
671 ################################################################################
673 def move (src, dest, overwrite = 0, perms = 0664):
674 if os.path.exists(dest) and os.path.isdir(dest):
677 dest_dir = os.path.dirname(dest)
678 if not os.path.exists(dest_dir):
679 umask = os.umask(00000)
680 os.makedirs(dest_dir, 02775)
682 #print "Moving %s to %s..." % (src, dest)
683 if os.path.exists(dest) and os.path.isdir(dest):
684 dest += '/' + os.path.basename(src)
685 # Don't overwrite unless forced to
686 if os.path.exists(dest):
688 fubar("Can't move %s to %s - file already exists." % (src, dest))
690 if not os.access(dest, os.W_OK):
691 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
692 shutil.copy2(src, dest)
693 os.chmod(dest, perms)
696 def copy (src, dest, overwrite = 0, perms = 0664):
697 if os.path.exists(dest) and os.path.isdir(dest):
700 dest_dir = os.path.dirname(dest)
701 if not os.path.exists(dest_dir):
702 umask = os.umask(00000)
703 os.makedirs(dest_dir, 02775)
705 #print "Copying %s to %s..." % (src, dest)
706 if os.path.exists(dest) and os.path.isdir(dest):
707 dest += '/' + os.path.basename(src)
708 # Don't overwrite unless forced to
709 if os.path.exists(dest):
711 raise FileExistsError
713 if not os.access(dest, os.W_OK):
714 raise CantOverwriteError
715 shutil.copy2(src, dest)
716 os.chmod(dest, perms)
718 ################################################################################
721 res = socket.gethostbyaddr(socket.gethostname())
722 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
723 if database_hostname:
724 return database_hostname
728 def which_conf_file ():
729 res = socket.gethostbyaddr(socket.gethostname())
730 # In case we allow local config files per user, try if one exists
731 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
732 homedir = os.getenv("HOME")
733 confpath = os.path.join(homedir, "/etc/dak.conf")
734 if os.path.exists(confpath):
735 apt_pkg.ReadConfigFileISC(Cnf,default_config)
737 # We are still in here, so there is no local config file or we do
738 # not allow local files. Do the normal stuff.
739 if Cnf.get("Config::" + res[0] + "::DakConfig"):
740 return Cnf["Config::" + res[0] + "::DakConfig"]
742 return default_config
744 def which_apt_conf_file ():
745 res = socket.gethostbyaddr(socket.gethostname())
746 # In case we allow local config files per user, try if one exists
747 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
748 homedir = os.getenv("HOME")
749 confpath = os.path.join(homedir, "/etc/dak.conf")
750 if os.path.exists(confpath):
751 apt_pkg.ReadConfigFileISC(Cnf,default_config)
753 if Cnf.get("Config::" + res[0] + "::AptConfig"):
754 return Cnf["Config::" + res[0] + "::AptConfig"]
756 return default_apt_config
758 def which_alias_file():
759 hostname = socket.gethostbyaddr(socket.gethostname())[0]
760 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
761 if os.path.exists(aliasfn):
766 ################################################################################
768 def TemplateSubst(map, filename):
769 """ Perform a substition of template """
770 templatefile = open_file(filename)
771 template = templatefile.read()
773 template = template.replace(x, str(map[x]))
777 ################################################################################
779 def fubar(msg, exit_code=1):
780 sys.stderr.write("E: %s\n" % (msg))
784 sys.stderr.write("W: %s\n" % (msg))
786 ################################################################################
788 # Returns the user name with a laughable attempt at rfc822 conformancy
789 # (read: removing stray periods).
791 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
794 return pwd.getpwuid(os.getuid())[0]
796 ################################################################################
806 return ("%d%s" % (c, t))
808 ################################################################################
810 def cc_fix_changes (changes):
811 o = changes.get("architecture", "")
813 del changes["architecture"]
814 changes["architecture"] = {}
816 changes["architecture"][j] = 1
818 def changes_compare (a, b):
819 """ Sort by source name, source version, 'have source', and then by filename """
821 a_changes = parse_changes(a)
826 b_changes = parse_changes(b)
830 cc_fix_changes (a_changes)
831 cc_fix_changes (b_changes)
833 # Sort by source name
834 a_source = a_changes.get("source")
835 b_source = b_changes.get("source")
836 q = cmp (a_source, b_source)
840 # Sort by source version
841 a_version = a_changes.get("version", "0")
842 b_version = b_changes.get("version", "0")
843 q = apt_pkg.VersionCompare(a_version, b_version)
847 # Sort by 'have source'
848 a_has_source = a_changes["architecture"].get("source")
849 b_has_source = b_changes["architecture"].get("source")
850 if a_has_source and not b_has_source:
852 elif b_has_source and not a_has_source:
855 # Fall back to sort by filename
858 ################################################################################
860 def find_next_free (dest, too_many=100):
863 while os.path.exists(dest) and extra < too_many:
864 dest = orig_dest + '.' + repr(extra)
866 if extra >= too_many:
867 raise NoFreeFilenameError
870 ################################################################################
872 def result_join (original, sep = '\t'):
874 for i in xrange(len(original)):
875 if original[i] == None:
876 resultlist.append("")
878 resultlist.append(original[i])
879 return sep.join(resultlist)
881 ################################################################################
883 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
885 for line in str.split('\n'):
887 if line or include_blank_lines:
888 out += "%s%s\n" % (prefix, line)
889 # Strip trailing new line
894 ################################################################################
896 def validate_changes_file_arg(filename, require_changes=1):
898 'filename' is either a .changes or .dak file. If 'filename' is a
899 .dak file, it's changed to be the corresponding .changes file. The
900 function then checks if the .changes file a) exists and b) is
901 readable and returns the .changes filename if so. If there's a
902 problem, the next action depends on the option 'require_changes'
905 - If 'require_changes' == -1, errors are ignored and the .changes
906 filename is returned.
907 - If 'require_changes' == 0, a warning is given and 'None' is returned.
908 - If 'require_changes' == 1, a fatal error is raised.
913 orig_filename = filename
914 if filename.endswith(".dak"):
915 filename = filename[:-4]+".changes"
917 if not filename.endswith(".changes"):
918 error = "invalid file type; not a changes file"
920 if not os.access(filename,os.R_OK):
921 if os.path.exists(filename):
922 error = "permission denied"
924 error = "file not found"
927 if require_changes == 1:
928 fubar("%s: %s." % (orig_filename, error))
929 elif require_changes == 0:
930 warn("Skipping %s - %s" % (orig_filename, error))
932 else: # We only care about the .dak file
937 ################################################################################
940 return (arch != "source" and arch != "all")
942 ################################################################################
944 def join_with_commas_and(list):
945 if len(list) == 0: return "nothing"
946 if len(list) == 1: return list[0]
947 return ", ".join(list[:-1]) + " and " + list[-1]
949 ################################################################################
954 (pkg, version, constraint) = atom
956 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
959 pp_deps.append(pp_dep)
960 return " |".join(pp_deps)
962 ################################################################################
967 ################################################################################
969 def parse_args(Options):
970 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
971 # XXX: This should go away and everything which calls it be converted
972 # to use SQLA properly. For now, we'll just fix it not to use
973 # the old Pg interface though
974 session = DBConn().session()
978 for suitename in split_args(Options["Suite"]):
979 suite = get_suite(suitename, session=session)
980 if suite.suite_id is None:
981 warn("suite '%s' not recognised." % (suite.suite_name))
983 suite_ids_list.append(suite.suite_id)
985 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
987 fubar("No valid suite given.")
992 if Options["Component"]:
993 component_ids_list = []
994 for componentname in split_args(Options["Component"]):
995 component = get_component(componentname, session=session)
996 if component is None:
997 warn("component '%s' not recognised." % (componentname))
999 component_ids_list.append(component.component_id)
1000 if component_ids_list:
1001 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1003 fubar("No valid component given.")
1007 # Process architecture
1008 con_architectures = ""
1010 if Options["Architecture"]:
1012 for archname in split_args(Options["Architecture"]):
1013 if archname == "source":
1016 arch = get_architecture(archname, session=session)
1018 warn("architecture '%s' not recognised." % (archname))
1020 arch_ids_list.append(arch.arch_id)
1022 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1024 if not check_source:
1025 fubar("No valid architecture given.")
1029 return (con_suites, con_architectures, con_components, check_source)
1031 ################################################################################
1033 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1034 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1037 tb = sys.exc_info()[2]
1044 frame = frame.f_back
1046 traceback.print_exc()
1048 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1049 frame.f_code.co_filename,
1051 for key, value in frame.f_locals.items():
1052 print "\t%20s = " % key,
1056 print "<unable to print>"
1058 ################################################################################
1060 def try_with_debug(function):
1068 ################################################################################
1070 def arch_compare_sw (a, b):
1072 Function for use in sorting lists of architectures.
1074 Sorts normally except that 'source' dominates all others.
1077 if a == "source" and b == "source":
1086 ################################################################################
1088 def split_args (s, dwim=1):
1090 Split command line arguments which can be separated by either commas
1091 or whitespace. If dwim is set, it will complain about string ending
1092 in comma since this usually means someone did 'dak ls -a i386, m68k
1093 foo' or something and the inevitable confusion resulting from 'm68k'
1094 being treated as an argument is undesirable.
1097 if s.find(",") == -1:
1100 if s[-1:] == "," and dwim:
1101 fubar("split_args: found trailing comma, spurious space maybe?")
1104 ################################################################################
1106 def Dict(**dict): return dict
1108 ########################################
1110 def gpgv_get_status_output(cmd, status_read, status_write):
1112 Our very own version of commands.getouputstatus(), hacked to support
1116 cmd = ['/bin/sh', '-c', cmd]
1117 p2cread, p2cwrite = os.pipe()
1118 c2pread, c2pwrite = os.pipe()
1119 errout, errin = os.pipe()
1129 for i in range(3, 256):
1130 if i != status_write:
1136 os.execvp(cmd[0], cmd)
1142 os.dup2(c2pread, c2pwrite)
1143 os.dup2(errout, errin)
1145 output = status = ""
1147 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1150 r = os.read(fd, 8196)
1152 more_data.append(fd)
1153 if fd == c2pwrite or fd == errin:
1155 elif fd == status_read:
1158 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1160 pid, exit_status = os.waitpid(pid, 0)
1162 os.close(status_write)
1163 os.close(status_read)
1173 return output, status, exit_status
1175 ################################################################################
1177 def process_gpgv_output(status):
1178 # Process the status-fd output
1181 for line in status.split('\n'):
1185 split = line.split()
1187 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1189 (gnupg, keyword) = split[:2]
1190 if gnupg != "[GNUPG:]":
1191 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1194 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1195 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1198 keywords[keyword] = args
1200 return (keywords, internal_error)
1202 ################################################################################
1204 def retrieve_key (filename, keyserver=None, keyring=None):
1206 Retrieve the key that signed 'filename' from 'keyserver' and
1207 add it to 'keyring'. Returns nothing on success, or an error message
1211 # Defaults for keyserver and keyring
1213 keyserver = Cnf["Dinstall::KeyServer"]
1215 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1217 # Ensure the filename contains no shell meta-characters or other badness
1218 if not re_taint_free.match(filename):
1219 return "%s: tainted filename" % (filename)
1221 # Invoke gpgv on the file
1222 status_read, status_write = os.pipe()
1223 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1224 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1226 # Process the status-fd output
1227 (keywords, internal_error) = process_gpgv_output(status)
1229 return internal_error
1231 if not keywords.has_key("NO_PUBKEY"):
1232 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1234 fingerprint = keywords["NO_PUBKEY"][0]
1235 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1236 # it'll try to create a lockfile in /dev. A better solution might
1237 # be a tempfile or something.
1238 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1239 % (Cnf["Dinstall::SigningKeyring"])
1240 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1241 % (keyring, keyserver, fingerprint)
1242 (result, output) = commands.getstatusoutput(cmd)
1244 return "'%s' failed with exit code %s" % (cmd, result)
1248 ################################################################################
1250 def gpg_keyring_args(keyrings=None):
1252 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1254 return " ".join(["--keyring %s" % x for x in keyrings])
1256 ################################################################################
1258 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1260 Check the signature of a file and return the fingerprint if the
1261 signature is valid or 'None' if it's not. The first argument is the
1262 filename whose signature should be checked. The second argument is a
1263 reject function and is called when an error is found. The reject()
1264 function must allow for two arguments: the first is the error message,
1265 the second is an optional prefix string. It's possible for reject()
1266 to be called more than once during an invocation of check_signature().
1267 The third argument is optional and is the name of the files the
1268 detached signature applies to. The fourth argument is optional and is
1269 a *list* of keyrings to use. 'autofetch' can either be None, True or
1270 False. If None, the default behaviour specified in the config will be
1276 # Ensure the filename contains no shell meta-characters or other badness
1277 if not re_taint_free.match(sig_filename):
1278 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1279 return (None, rejects)
1281 if data_filename and not re_taint_free.match(data_filename):
1282 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1283 return (None, rejects)
1286 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1288 # Autofetch the signing key if that's enabled
1289 if autofetch == None:
1290 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1292 error_msg = retrieve_key(sig_filename)
1294 rejects.append(error_msg)
1295 return (None, rejects)
1297 # Build the command line
1298 status_read, status_write = os.pipe()
1299 cmd = "gpgv --status-fd %s %s %s %s" % (
1300 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1302 # Invoke gpgv on the file
1303 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1305 # Process the status-fd output
1306 (keywords, internal_error) = process_gpgv_output(status)
1308 # If we failed to parse the status-fd output, let's just whine and bail now
1310 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1311 rejects.append(internal_error, "")
1312 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1313 return (None, rejects)
1315 # Now check for obviously bad things in the processed output
1316 if keywords.has_key("KEYREVOKED"):
1317 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1318 if keywords.has_key("BADSIG"):
1319 rejects.append("bad signature on %s." % (sig_filename))
1320 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1321 rejects.append("failed to check signature on %s." % (sig_filename))
1322 if keywords.has_key("NO_PUBKEY"):
1323 args = keywords["NO_PUBKEY"]
1326 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1327 if keywords.has_key("BADARMOR"):
1328 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1329 if keywords.has_key("NODATA"):
1330 rejects.append("no signature found in %s." % (sig_filename))
1331 if keywords.has_key("EXPKEYSIG"):
1332 args = keywords["EXPKEYSIG"]
1335 rejects.append("Signature made by expired key 0x%s" % (key))
1336 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1337 args = keywords["KEYEXPIRED"]
1341 if timestamp.count("T") == 0:
1343 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1345 expiredate = "unknown (%s)" % (timestamp)
1347 expiredate = timestamp
1348 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1350 if len(rejects) > 0:
1351 return (None, rejects)
1353 # Next check gpgv exited with a zero return code
1355 rejects.append("gpgv failed while checking %s." % (sig_filename))
1357 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1359 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1360 return (None, rejects)
1362 # Sanity check the good stuff we expect
1363 if not keywords.has_key("VALIDSIG"):
1364 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1366 args = keywords["VALIDSIG"]
1368 rejects.append("internal error while checking signature on %s." % (sig_filename))
1370 fingerprint = args[0]
1371 if not keywords.has_key("GOODSIG"):
1372 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1373 if not keywords.has_key("SIG_ID"):
1374 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1376 # Finally ensure there's not something we don't recognise
1377 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1378 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1379 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1381 for keyword in keywords.keys():
1382 if not known_keywords.has_key(keyword):
1383 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1385 if len(rejects) > 0:
1386 return (None, rejects)
1388 return (fingerprint, [])
1390 ################################################################################
1392 def gpg_get_key_addresses(fingerprint):
1393 """retreive email addresses from gpg key uids for a given fingerprint"""
1394 addresses = key_uid_email_cache.get(fingerprint)
1395 if addresses != None:
1398 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1399 % (gpg_keyring_args(), fingerprint)
1400 (result, output) = commands.getstatusoutput(cmd)
1402 for l in output.split('\n'):
1403 m = re_gpg_uid.match(l)
1405 addresses.add(m.group(1))
1406 key_uid_email_cache[fingerprint] = addresses
1409 ################################################################################
1411 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1413 def wrap(paragraph, max_length, prefix=""):
1417 words = paragraph.split()
1420 word_size = len(word)
1421 if word_size > max_length:
1423 s += line + '\n' + prefix
1424 s += word + '\n' + prefix
1427 new_length = len(line) + word_size + 1
1428 if new_length > max_length:
1429 s += line + '\n' + prefix
1442 ################################################################################
1444 def clean_symlink (src, dest, root):
1446 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1449 src = src.replace(root, '', 1)
1450 dest = dest.replace(root, '', 1)
1451 dest = os.path.dirname(dest)
1452 new_src = '../' * len(dest.split('/'))
1453 return new_src + src
1455 ################################################################################
1457 def temp_filename(directory=None, prefix="dak", suffix=""):
1459 Return a secure and unique filename by pre-creating it.
1460 If 'directory' is non-null, it will be the directory the file is pre-created in.
1461 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1462 If 'suffix' is non-null, the filename will end with it.
1464 Returns a pair (fd, name).
1467 return tempfile.mkstemp(suffix, prefix, directory)
1469 ################################################################################
1471 def temp_dirname(parent=None, prefix="dak", suffix=""):
1473 Return a secure and unique directory by pre-creating it.
1474 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1475 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1476 If 'suffix' is non-null, the filename will end with it.
1478 Returns a pathname to the new directory
1481 return tempfile.mkdtemp(suffix, prefix, parent)
1483 ################################################################################
1485 def is_email_alias(email):
1486 """ checks if the user part of the email is listed in the alias file """
1488 if alias_cache == None:
1489 aliasfn = which_alias_file()
1492 for l in open(aliasfn):
1493 alias_cache.add(l.split(':')[0])
1494 uid = email.split('@')[0]
1495 return uid in alias_cache
1497 ################################################################################
1499 def get_changes_files(dir):
1501 Takes a directory and lists all .changes files in it (as well as chdir'ing
1502 to the directory; this is due to broken behaviour on the part of p-u/p-a
1503 when you're not in the right place)
1505 Returns a list of filenames
1508 # Much of the rest of p-u/p-a depends on being in the right place
1510 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1512 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1514 return changes_files
1516 ################################################################################
1520 Cnf = apt_pkg.newConfiguration()
1521 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1523 if which_conf_file() != default_config:
1524 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1526 ###############################################################################