2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
30 from dak_exceptions import *
33 ################################################################################
35 default_config = "/etc/dak/dak.conf"
36 default_apt_config = "/etc/dak/apt.conf"
39 key_uid_email_cache = {}
41 # (hashname, function, earliest_changes_version)
42 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
43 ("sha256", apt_pkg.sha256sum, (1, 8))]
45 ################################################################################
48 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
50 ################################################################################
52 def open_file(filename, mode='r'):
54 f = open(filename, mode)
56 raise CantOpenError, filename
59 ################################################################################
61 def our_raw_input(prompt=""):
63 sys.stdout.write(prompt)
69 sys.stderr.write("\nUser interrupt (^D).\n")
72 ################################################################################
74 def extract_component_from_section(section):
77 if section.find('/') != -1:
78 component = section.split('/')[0]
80 # Expand default component
82 if Cnf.has_key("Component::%s" % section):
87 return (section, component)
89 ################################################################################
91 def parse_deb822(contents, signing_rules=0):
95 # Split the lines in the input, keeping the linebreaks.
96 lines = contents.splitlines(True)
99 raise ParseChangesError, "[Empty changes file]"
101 # Reindex by line number so we can easily verify the format of
107 indexed_lines[index] = line[:-1]
111 num_of_lines = len(indexed_lines.keys())
114 while index < num_of_lines:
116 line = indexed_lines[index]
118 if signing_rules == 1:
120 if index > num_of_lines:
121 raise InvalidDscError, index
122 line = indexed_lines[index]
123 if not line.startswith("-----BEGIN PGP SIGNATURE"):
124 raise InvalidDscError, index
129 if line.startswith("-----BEGIN PGP SIGNATURE"):
131 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
133 if signing_rules == 1:
134 while index < num_of_lines and line != "":
136 line = indexed_lines[index]
138 # If we're not inside the signed data, don't process anything
139 if signing_rules >= 0 and not inside_signature:
141 slf = re_single_line_field.match(line)
143 field = slf.groups()[0].lower()
144 changes[field] = slf.groups()[1]
148 changes[field] += '\n'
150 mlf = re_multi_line_field.match(line)
153 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
154 if first == 1 and changes[field] != "":
155 changes[field] += '\n'
157 changes[field] += mlf.groups()[0] + '\n'
161 if signing_rules == 1 and inside_signature:
162 raise InvalidDscError, index
164 changes["filecontents"] = "".join(lines)
166 if changes.has_key("source"):
167 # Strip the source version in brackets from the source field,
168 # put it in the "source-version" field instead.
169 srcver = re_srchasver.search(changes["source"])
171 changes["source"] = srcver.group(1)
172 changes["source-version"] = srcver.group(2)
175 raise ParseChangesError, error
179 ################################################################################
181 def parse_changes(filename, signing_rules=0):
182 """Parses a changes file and returns a dictionary where each field is a
183 key. The mandatory first argument is the filename of the .changes
186 signing_rules is an optional argument:
188 o If signing_rules == -1, no signature is required.
189 o If signing_rules == 0 (the default), a signature is required.
190 o If signing_rules == 1, it turns on the same strict format checking
193 The rules for (signing_rules == 1)-mode are:
195 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
196 followed by any PGP header data and must end with a blank line.
198 o The data section must end with a blank line and must be followed by
199 "-----BEGIN PGP SIGNATURE-----".
202 changes_in = open_file(filename)
203 content = changes_in.read()
205 return parse_deb822(content, signing_rules)
207 ################################################################################
209 def hash_key(hashname):
210 return '%ssum' % hashname
212 ################################################################################
214 def create_hash(where, files, hashname, hashfunc):
215 """create_hash extends the passed files dict with the given hash by
216 iterating over all files on disk and passing them to the hashing
220 for f in files.keys():
222 file_handle = open_file(f)
223 except CantOpenError:
224 rejmsg.append("Could not open file %s for checksumming" % (f))
226 files[f][hash_key(hashname)] = hashfunc(file_handle)
231 ################################################################################
233 def check_hash(where, files, hashname, hashfunc):
234 """check_hash checks the given hash in the files dict against the actual
235 files on disk. The hash values need to be present consistently in
236 all file entries. It does not modify its input in any way."""
239 for f in files.keys():
243 file_handle = open_file(f)
245 # Check for the hash entry, to not trigger a KeyError.
246 if not files[f].has_key(hash_key(hashname)):
247 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
251 # Actually check the hash for correctness.
252 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
253 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
255 except CantOpenError:
256 # TODO: This happens when the file is in the pool.
257 # warn("Cannot open file %s" % f)
264 ################################################################################
266 def check_size(where, files):
267 """check_size checks the file sizes in the passed files dict against the
271 for f in files.keys():
276 # TODO: This happens when the file is in the pool.
280 actual_size = entry[stat.ST_SIZE]
281 size = int(files[f]["size"])
282 if size != actual_size:
283 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
284 % (f, actual_size, size, where))
287 ################################################################################
289 def check_hash_fields(what, manifest):
290 """check_hash_fields ensures that there are no checksum fields in the
291 given dict that we do not know about."""
294 hashes = map(lambda x: x[0], known_hashes)
295 for field in manifest:
296 if field.startswith("checksums-"):
297 hashname = field.split("-",1)[1]
298 if hashname not in hashes:
299 rejmsg.append("Unsupported checksum field for %s "\
300 "in %s" % (hashname, what))
303 ################################################################################
305 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
306 if format >= version:
307 # The version should contain the specified hash.
310 # Import hashes from the changes
311 rejmsg = parse_checksums(".changes", files, changes, hashname)
315 # We need to calculate the hash because it can't possibly
318 return func(".changes", files, hashname, hashfunc)
320 # We could add the orig which might be in the pool to the files dict to
321 # access the checksums easily.
323 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
324 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
325 in the dsc is correct, i.e. identical to the changes file and if necessary
326 the pool. The latter task is delegated to check_hash."""
329 if not dsc.has_key('Checksums-%s' % (hashname,)):
331 # Import hashes from the dsc
332 parse_checksums(".dsc", dsc_files, dsc, hashname)
334 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
337 ################################################################################
339 def ensure_hashes(changes, dsc, files, dsc_files):
342 # Make sure we recognise the format of the Files: field in the .changes
343 format = changes.get("format", "0.0").split(".", 1)
345 format = int(format[0]), int(format[1])
347 format = int(float(format[0])), 0
349 # We need to deal with the original changes blob, as the fields we need
350 # might not be in the changes dict serialised into the .dak anymore.
351 orig_changes = parse_deb822(changes['filecontents'])
353 # Copy the checksums over to the current changes dict. This will keep
354 # the existing modifications to it intact.
355 for field in orig_changes:
356 if field.startswith('checksums-'):
357 changes[field] = orig_changes[field]
359 # Check for unsupported hashes
360 rejmsg.extend(check_hash_fields(".changes", changes))
361 rejmsg.extend(check_hash_fields(".dsc", dsc))
363 # We have to calculate the hash if we have an earlier changes version than
364 # the hash appears in rather than require it exist in the changes file
365 for hashname, hashfunc, version in known_hashes:
366 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
368 if "source" in changes["architecture"]:
369 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
374 def parse_checksums(where, files, manifest, hashname):
376 field = 'checksums-%s' % hashname
377 if not field in manifest:
379 for line in manifest[field].split('\n'):
382 checksum, size, checkfile = line.strip().split(' ')
383 if not files.has_key(checkfile):
384 # TODO: check for the file's entry in the original files dict, not
385 # the one modified by (auto)byhand and other weird stuff
386 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
387 # (file, hashname, where))
389 if not files[checkfile]["size"] == size:
390 rejmsg.append("%s: size differs for files and checksums-%s entry "\
391 "in %s" % (checkfile, hashname, where))
393 files[checkfile][hash_key(hashname)] = checksum
394 for f in files.keys():
395 if not files[f].has_key(hash_key(hashname)):
396 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
400 ################################################################################
402 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
404 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
407 # Make sure we have a Files: field to parse...
408 if not changes.has_key(field):
409 raise NoFilesFieldError
411 # Make sure we recognise the format of the Files: field
412 format = re_verwithext.search(changes.get("format", "0.0"))
414 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
416 format = format.groups()
417 if format[1] == None:
418 format = int(float(format[0])), 0, format[2]
420 format = int(format[0]), int(format[1]), format[2]
421 if format[2] == None:
425 # format = (1,0) are the only formats we currently accept,
426 # format = (0,0) are missing format headers of which we still
427 # have some in the archive.
428 if format != (1,0) and format != (0,0):
429 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
431 if (format < (1,5) or format > (1,8)):
432 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
433 if field != "files" and format < (1,8):
434 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
436 includes_section = (not is_a_dsc) and field == "files"
438 # Parse each entry/line:
439 for i in changes[field].split('\n'):
443 section = priority = ""
446 (md5, size, section, priority, name) = s
448 (md5, size, name) = s
450 raise ParseChangesError, i
457 (section, component) = extract_component_from_section(section)
459 files[name] = Dict(size=size, section=section,
460 priority=priority, component=component)
461 files[name][hashname] = md5
465 ################################################################################
467 def force_to_utf8(s):
468 """Forces a string to UTF-8. If the string isn't already UTF-8,
469 it's assumed to be ISO-8859-1."""
474 latin1_s = unicode(s,'iso8859-1')
475 return latin1_s.encode('utf-8')
477 def rfc2047_encode(s):
478 """Encodes a (header) string per RFC2047 if necessary. If the
479 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
481 codecs.lookup('ascii')[1](s)
486 codecs.lookup('utf-8')[1](s)
487 h = email.Header.Header(s, 'utf-8', 998)
490 h = email.Header.Header(s, 'iso-8859-1', 998)
493 ################################################################################
495 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
496 # with it. I know - I'll fix the suckage and make things
499 def fix_maintainer (maintainer):
500 """Parses a Maintainer or Changed-By field and returns:
501 (1) an RFC822 compatible version,
502 (2) an RFC2047 compatible version,
506 The name is forced to UTF-8 for both (1) and (3). If the name field
507 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
508 switched to 'email (name)' format."""
509 maintainer = maintainer.strip()
511 return ('', '', '', '')
513 if maintainer.find("<") == -1:
516 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
517 email = maintainer[1:-1]
520 m = re_parse_maintainer.match(maintainer)
522 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
526 # Get an RFC2047 compliant version of the name
527 rfc2047_name = rfc2047_encode(name)
529 # Force the name to be UTF-8
530 name = force_to_utf8(name)
532 if name.find(',') != -1 or name.find('.') != -1:
533 rfc822_maint = "%s (%s)" % (email, name)
534 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
536 rfc822_maint = "%s <%s>" % (name, email)
537 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
539 if email.find("@") == -1 and email.find("buildd_") != 0:
540 raise ParseMaintError, "No @ found in email address part."
542 return (rfc822_maint, rfc2047_maint, name, email)
544 ################################################################################
546 # sendmail wrapper, takes _either_ a message string or a file as arguments
547 def send_mail (message, filename=""):
548 # If we've been passed a string dump it into a temporary file
550 (fd, filename) = tempfile.mkstemp()
551 os.write (fd, message)
555 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
557 raise SendmailFailedError, output
559 # Clean up any temporary files
563 ################################################################################
565 def poolify (source, component):
568 if source[:3] == "lib":
569 return component + source[:4] + '/' + source + '/'
571 return component + source[:1] + '/' + source + '/'
573 ################################################################################
575 def move (src, dest, overwrite = 0, perms = 0664):
576 if os.path.exists(dest) and os.path.isdir(dest):
579 dest_dir = os.path.dirname(dest)
580 if not os.path.exists(dest_dir):
581 umask = os.umask(00000)
582 os.makedirs(dest_dir, 02775)
584 #print "Moving %s to %s..." % (src, dest)
585 if os.path.exists(dest) and os.path.isdir(dest):
586 dest += '/' + os.path.basename(src)
587 # Don't overwrite unless forced to
588 if os.path.exists(dest):
590 fubar("Can't move %s to %s - file already exists." % (src, dest))
592 if not os.access(dest, os.W_OK):
593 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
594 shutil.copy2(src, dest)
595 os.chmod(dest, perms)
598 def copy (src, dest, overwrite = 0, perms = 0664):
599 if os.path.exists(dest) and os.path.isdir(dest):
602 dest_dir = os.path.dirname(dest)
603 if not os.path.exists(dest_dir):
604 umask = os.umask(00000)
605 os.makedirs(dest_dir, 02775)
607 #print "Copying %s to %s..." % (src, dest)
608 if os.path.exists(dest) and os.path.isdir(dest):
609 dest += '/' + os.path.basename(src)
610 # Don't overwrite unless forced to
611 if os.path.exists(dest):
613 raise FileExistsError
615 if not os.access(dest, os.W_OK):
616 raise CantOverwriteError
617 shutil.copy2(src, dest)
618 os.chmod(dest, perms)
620 ################################################################################
623 res = socket.gethostbyaddr(socket.gethostname())
624 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
625 if database_hostname:
626 return database_hostname
630 def which_conf_file ():
631 res = socket.gethostbyaddr(socket.gethostname())
632 if Cnf.get("Config::" + res[0] + "::DakConfig"):
633 return Cnf["Config::" + res[0] + "::DakConfig"]
635 return default_config
637 def which_apt_conf_file ():
638 res = socket.gethostbyaddr(socket.gethostname())
639 if Cnf.get("Config::" + res[0] + "::AptConfig"):
640 return Cnf["Config::" + res[0] + "::AptConfig"]
642 return default_apt_config
644 def which_alias_file():
645 hostname = socket.gethostbyaddr(socket.gethostname())[0]
646 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
647 if os.path.exists(aliasfn):
652 ################################################################################
654 # Escape characters which have meaning to SQL's regex comparison operator ('~')
655 # (woefully incomplete)
658 s = s.replace('+', '\\\\+')
659 s = s.replace('.', '\\\\.')
662 ################################################################################
664 # Perform a substition of template
665 def TemplateSubst(map, filename):
666 file = open_file(filename)
667 template = file.read()
669 template = template.replace(x,map[x])
673 ################################################################################
675 def fubar(msg, exit_code=1):
676 sys.stderr.write("E: %s\n" % (msg))
680 sys.stderr.write("W: %s\n" % (msg))
682 ################################################################################
684 # Returns the user name with a laughable attempt at rfc822 conformancy
685 # (read: removing stray periods).
687 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
689 ################################################################################
699 return ("%d%s" % (c, t))
701 ################################################################################
703 def cc_fix_changes (changes):
704 o = changes.get("architecture", "")
706 del changes["architecture"]
707 changes["architecture"] = {}
709 changes["architecture"][j] = 1
711 # Sort by source name, source version, 'have source', and then by filename
712 def changes_compare (a, b):
714 a_changes = parse_changes(a)
719 b_changes = parse_changes(b)
723 cc_fix_changes (a_changes)
724 cc_fix_changes (b_changes)
726 # Sort by source name
727 a_source = a_changes.get("source")
728 b_source = b_changes.get("source")
729 q = cmp (a_source, b_source)
733 # Sort by source version
734 a_version = a_changes.get("version", "0")
735 b_version = b_changes.get("version", "0")
736 q = apt_pkg.VersionCompare(a_version, b_version)
740 # Sort by 'have source'
741 a_has_source = a_changes["architecture"].get("source")
742 b_has_source = b_changes["architecture"].get("source")
743 if a_has_source and not b_has_source:
745 elif b_has_source and not a_has_source:
748 # Fall back to sort by filename
751 ################################################################################
753 def find_next_free (dest, too_many=100):
756 while os.path.exists(dest) and extra < too_many:
757 dest = orig_dest + '.' + repr(extra)
759 if extra >= too_many:
760 raise NoFreeFilenameError
763 ################################################################################
765 def result_join (original, sep = '\t'):
767 for i in xrange(len(original)):
768 if original[i] == None:
771 list.append(original[i])
772 return sep.join(list)
774 ################################################################################
776 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
778 for line in str.split('\n'):
780 if line or include_blank_lines:
781 out += "%s%s\n" % (prefix, line)
782 # Strip trailing new line
787 ################################################################################
789 def validate_changes_file_arg(filename, require_changes=1):
790 """'filename' is either a .changes or .dak file. If 'filename' is a
791 .dak file, it's changed to be the corresponding .changes file. The
792 function then checks if the .changes file a) exists and b) is
793 readable and returns the .changes filename if so. If there's a
794 problem, the next action depends on the option 'require_changes'
797 o If 'require_changes' == -1, errors are ignored and the .changes
798 filename is returned.
799 o If 'require_changes' == 0, a warning is given and 'None' is returned.
800 o If 'require_changes' == 1, a fatal error is raised.
804 orig_filename = filename
805 if filename.endswith(".dak"):
806 filename = filename[:-4]+".changes"
808 if not filename.endswith(".changes"):
809 error = "invalid file type; not a changes file"
811 if not os.access(filename,os.R_OK):
812 if os.path.exists(filename):
813 error = "permission denied"
815 error = "file not found"
818 if require_changes == 1:
819 fubar("%s: %s." % (orig_filename, error))
820 elif require_changes == 0:
821 warn("Skipping %s - %s" % (orig_filename, error))
823 else: # We only care about the .dak file
828 ################################################################################
831 return (arch != "source" and arch != "all")
833 ################################################################################
835 def join_with_commas_and(list):
836 if len(list) == 0: return "nothing"
837 if len(list) == 1: return list[0]
838 return ", ".join(list[:-1]) + " and " + list[-1]
840 ################################################################################
845 (pkg, version, constraint) = atom
847 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
850 pp_deps.append(pp_dep)
851 return " |".join(pp_deps)
853 ################################################################################
858 ################################################################################
860 # Handle -a, -c and -s arguments; returns them as SQL constraints
861 def parse_args(Options):
865 for suite in split_args(Options["Suite"]):
866 suite_id = database.get_suite_id(suite)
868 warn("suite '%s' not recognised." % (suite))
870 suite_ids_list.append(suite_id)
872 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
874 fubar("No valid suite given.")
879 if Options["Component"]:
880 component_ids_list = []
881 for component in split_args(Options["Component"]):
882 component_id = database.get_component_id(component)
883 if component_id == -1:
884 warn("component '%s' not recognised." % (component))
886 component_ids_list.append(component_id)
887 if component_ids_list:
888 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
890 fubar("No valid component given.")
894 # Process architecture
895 con_architectures = ""
896 if Options["Architecture"]:
899 for architecture in split_args(Options["Architecture"]):
900 if architecture == "source":
903 architecture_id = database.get_architecture_id(architecture)
904 if architecture_id == -1:
905 warn("architecture '%s' not recognised." % (architecture))
907 arch_ids_list.append(architecture_id)
909 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
912 fubar("No valid architecture given.")
916 return (con_suites, con_architectures, con_components, check_source)
918 ################################################################################
920 # Inspired(tm) by Bryn Keller's print_exc_plus (See
921 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
924 tb = sys.exc_info()[2]
933 traceback.print_exc()
935 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
936 frame.f_code.co_filename,
938 for key, value in frame.f_locals.items():
939 print "\t%20s = " % key,
943 print "<unable to print>"
945 ################################################################################
947 def try_with_debug(function):
955 ################################################################################
957 # Function for use in sorting lists of architectures.
958 # Sorts normally except that 'source' dominates all others.
960 def arch_compare_sw (a, b):
961 if a == "source" and b == "source":
970 ################################################################################
972 # Split command line arguments which can be separated by either commas
973 # or whitespace. If dwim is set, it will complain about string ending
974 # in comma since this usually means someone did 'dak ls -a i386, m68k
975 # foo' or something and the inevitable confusion resulting from 'm68k'
976 # being treated as an argument is undesirable.
978 def split_args (s, dwim=1):
979 if s.find(",") == -1:
982 if s[-1:] == "," and dwim:
983 fubar("split_args: found trailing comma, spurious space maybe?")
986 ################################################################################
988 def Dict(**dict): return dict
990 ########################################
992 # Our very own version of commands.getouputstatus(), hacked to support
994 def gpgv_get_status_output(cmd, status_read, status_write):
995 cmd = ['/bin/sh', '-c', cmd]
996 p2cread, p2cwrite = os.pipe()
997 c2pread, c2pwrite = os.pipe()
998 errout, errin = os.pipe()
1008 for i in range(3, 256):
1009 if i != status_write:
1015 os.execvp(cmd[0], cmd)
1021 os.dup2(c2pread, c2pwrite)
1022 os.dup2(errout, errin)
1024 output = status = ""
1026 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1029 r = os.read(fd, 8196)
1031 more_data.append(fd)
1032 if fd == c2pwrite or fd == errin:
1034 elif fd == status_read:
1037 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1039 pid, exit_status = os.waitpid(pid, 0)
1041 os.close(status_write)
1042 os.close(status_read)
1052 return output, status, exit_status
1054 ################################################################################
1056 def process_gpgv_output(status):
1057 # Process the status-fd output
1060 for line in status.split('\n'):
1064 split = line.split()
1066 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1068 (gnupg, keyword) = split[:2]
1069 if gnupg != "[GNUPG:]":
1070 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1073 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1074 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1077 keywords[keyword] = args
1079 return (keywords, internal_error)
1081 ################################################################################
1083 def retrieve_key (filename, keyserver=None, keyring=None):
1084 """Retrieve the key that signed 'filename' from 'keyserver' and
1085 add it to 'keyring'. Returns nothing on success, or an error message
1088 # Defaults for keyserver and keyring
1090 keyserver = Cnf["Dinstall::KeyServer"]
1092 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1094 # Ensure the filename contains no shell meta-characters or other badness
1095 if not re_taint_free.match(filename):
1096 return "%s: tainted filename" % (filename)
1098 # Invoke gpgv on the file
1099 status_read, status_write = os.pipe();
1100 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1101 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1103 # Process the status-fd output
1104 (keywords, internal_error) = process_gpgv_output(status)
1106 return internal_error
1108 if not keywords.has_key("NO_PUBKEY"):
1109 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1111 fingerprint = keywords["NO_PUBKEY"][0]
1112 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1113 # it'll try to create a lockfile in /dev. A better solution might
1114 # be a tempfile or something.
1115 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1116 % (Cnf["Dinstall::SigningKeyring"])
1117 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1118 % (keyring, keyserver, fingerprint)
1119 (result, output) = commands.getstatusoutput(cmd)
1121 return "'%s' failed with exit code %s" % (cmd, result)
1125 ################################################################################
1127 def gpg_keyring_args(keyrings=None):
1129 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1131 return " ".join(["--keyring %s" % x for x in keyrings])
1133 ################################################################################
1135 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1136 """Check the signature of a file and return the fingerprint if the
1137 signature is valid or 'None' if it's not. The first argument is the
1138 filename whose signature should be checked. The second argument is a
1139 reject function and is called when an error is found. The reject()
1140 function must allow for two arguments: the first is the error message,
1141 the second is an optional prefix string. It's possible for reject()
1142 to be called more than once during an invocation of check_signature().
1143 The third argument is optional and is the name of the files the
1144 detached signature applies to. The fourth argument is optional and is
1145 a *list* of keyrings to use. 'autofetch' can either be None, True or
1146 False. If None, the default behaviour specified in the config will be
1149 # Ensure the filename contains no shell meta-characters or other badness
1150 if not re_taint_free.match(sig_filename):
1151 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1154 if data_filename and not re_taint_free.match(data_filename):
1155 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1159 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1161 # Autofetch the signing key if that's enabled
1162 if autofetch == None:
1163 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1165 error_msg = retrieve_key(sig_filename)
1170 # Build the command line
1171 status_read, status_write = os.pipe();
1172 cmd = "gpgv --status-fd %s %s %s %s" % (
1173 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1175 # Invoke gpgv on the file
1176 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1178 # Process the status-fd output
1179 (keywords, internal_error) = process_gpgv_output(status)
1181 # If we failed to parse the status-fd output, let's just whine and bail now
1183 reject("internal error while performing signature check on %s." % (sig_filename))
1184 reject(internal_error, "")
1185 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1189 # Now check for obviously bad things in the processed output
1190 if keywords.has_key("KEYREVOKED"):
1191 reject("The key used to sign %s has been revoked." % (sig_filename))
1193 if keywords.has_key("BADSIG"):
1194 reject("bad signature on %s." % (sig_filename))
1196 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1197 reject("failed to check signature on %s." % (sig_filename))
1199 if keywords.has_key("NO_PUBKEY"):
1200 args = keywords["NO_PUBKEY"]
1203 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1205 if keywords.has_key("BADARMOR"):
1206 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1208 if keywords.has_key("NODATA"):
1209 reject("no signature found in %s." % (sig_filename))
1211 if keywords.has_key("EXPKEYSIG"):
1212 args = keywords["EXPKEYSIG"]
1215 reject("Signature made by expired key 0x%s" % (key))
1217 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1218 args = keywords["KEYEXPIRED"]
1222 if timestamp.count("T") == 0:
1223 expiredate = time.strftime("%Y-%m-%d", time.gmtime(timestamp))
1225 expiredate = timestamp
1226 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1232 # Next check gpgv exited with a zero return code
1234 reject("gpgv failed while checking %s." % (sig_filename))
1236 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1238 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1241 # Sanity check the good stuff we expect
1242 if not keywords.has_key("VALIDSIG"):
1243 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1246 args = keywords["VALIDSIG"]
1248 reject("internal error while checking signature on %s." % (sig_filename))
1251 fingerprint = args[0]
1252 if not keywords.has_key("GOODSIG"):
1253 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1255 if not keywords.has_key("SIG_ID"):
1256 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1259 # Finally ensure there's not something we don't recognise
1260 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1261 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1262 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1264 for keyword in keywords.keys():
1265 if not known_keywords.has_key(keyword):
1266 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1274 ################################################################################
1276 def gpg_get_key_addresses(fingerprint):
1277 """retreive email addresses from gpg key uids for a given fingerprint"""
1278 addresses = key_uid_email_cache.get(fingerprint)
1279 if addresses != None:
1282 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1283 % (gpg_keyring_args(), fingerprint)
1284 (result, output) = commands.getstatusoutput(cmd)
1286 for l in output.split('\n'):
1287 m = re_gpg_uid.match(l)
1289 addresses.add(m.group(1))
1290 key_uid_email_cache[fingerprint] = addresses
1293 ################################################################################
1295 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1297 def wrap(paragraph, max_length, prefix=""):
1301 words = paragraph.split()
1304 word_size = len(word)
1305 if word_size > max_length:
1307 s += line + '\n' + prefix
1308 s += word + '\n' + prefix
1311 new_length = len(line) + word_size + 1
1312 if new_length > max_length:
1313 s += line + '\n' + prefix
1326 ################################################################################
1328 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1329 # Returns fixed 'src'
1330 def clean_symlink (src, dest, root):
1331 src = src.replace(root, '', 1)
1332 dest = dest.replace(root, '', 1)
1333 dest = os.path.dirname(dest)
1334 new_src = '../' * len(dest.split('/'))
1335 return new_src + src
1337 ################################################################################
1339 def temp_filename(directory=None, prefix="dak", suffix=""):
1340 """Return a secure and unique filename by pre-creating it.
1341 If 'directory' is non-null, it will be the directory the file is pre-created in.
1342 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1343 If 'suffix' is non-null, the filename will end with it.
1345 Returns a pair (fd, name).
1348 return tempfile.mkstemp(suffix, prefix, directory)
1350 ################################################################################
1352 # checks if the user part of the email is listed in the alias file
1354 def is_email_alias(email):
1356 if alias_cache == None:
1357 aliasfn = which_alias_file()
1360 for l in open(aliasfn):
1361 alias_cache.add(l.split(':')[0])
1362 uid = email.split('@')[0]
1363 return uid in alias_cache
1365 ################################################################################
1369 Cnf = apt_pkg.newConfiguration()
1370 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1372 if which_conf_file() != default_config:
1373 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1375 ################################################################################