2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 from dak_exceptions import *
42 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
43 re_multi_line_field, re_srchasver, re_verwithext, \
44 re_parse_maintainer, re_taint_free, re_gpg_uid
46 ################################################################################
48 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
49 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
51 alias_cache = None #: Cache for email alias checks
52 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
54 # (hashname, function, earliest_changes_version)
55 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
56 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
58 ################################################################################
61 """ Escape html chars """
62 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
64 ################################################################################
66 def open_file(filename, mode='r'):
68 Open C{file}, return fileobject.
70 @type filename: string
71 @param filename: path/filename to open
74 @param mode: open mode
77 @return: open fileobject
79 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
83 f = open(filename, mode)
85 raise CantOpenError, filename
88 ################################################################################
90 def our_raw_input(prompt=""):
92 sys.stdout.write(prompt)
98 sys.stderr.write("\nUser interrupt (^D).\n")
101 ################################################################################
103 def extract_component_from_section(section):
106 if section.find('/') != -1:
107 component = section.split('/')[0]
109 # Expand default component
111 if Cnf.has_key("Component::%s" % section):
116 return (section, component)
118 ################################################################################
120 def parse_deb822(contents, signing_rules=0):
124 # Split the lines in the input, keeping the linebreaks.
125 lines = contents.splitlines(True)
128 raise ParseChangesError, "[Empty changes file]"
130 # Reindex by line number so we can easily verify the format of
136 indexed_lines[index] = line[:-1]
140 num_of_lines = len(indexed_lines.keys())
143 while index < num_of_lines:
145 line = indexed_lines[index]
147 if signing_rules == 1:
149 if index > num_of_lines:
150 raise InvalidDscError, index
151 line = indexed_lines[index]
152 if not line.startswith("-----BEGIN PGP SIGNATURE"):
153 raise InvalidDscError, index
158 if line.startswith("-----BEGIN PGP SIGNATURE"):
160 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
162 if signing_rules == 1:
163 while index < num_of_lines and line != "":
165 line = indexed_lines[index]
167 # If we're not inside the signed data, don't process anything
168 if signing_rules >= 0 and not inside_signature:
170 slf = re_single_line_field.match(line)
172 field = slf.groups()[0].lower()
173 changes[field] = slf.groups()[1]
177 changes[field] += '\n'
179 mlf = re_multi_line_field.match(line)
182 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
183 if first == 1 and changes[field] != "":
184 changes[field] += '\n'
186 changes[field] += mlf.groups()[0] + '\n'
190 if signing_rules == 1 and inside_signature:
191 raise InvalidDscError, index
193 changes["filecontents"] = "".join(lines)
195 if changes.has_key("source"):
196 # Strip the source version in brackets from the source field,
197 # put it in the "source-version" field instead.
198 srcver = re_srchasver.search(changes["source"])
200 changes["source"] = srcver.group(1)
201 changes["source-version"] = srcver.group(2)
204 raise ParseChangesError, error
208 ################################################################################
210 def parse_changes(filename, signing_rules=0):
212 Parses a changes file and returns a dictionary where each field is a
213 key. The mandatory first argument is the filename of the .changes
216 signing_rules is an optional argument:
218 - If signing_rules == -1, no signature is required.
219 - If signing_rules == 0 (the default), a signature is required.
220 - If signing_rules == 1, it turns on the same strict format checking
223 The rules for (signing_rules == 1)-mode are:
225 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
226 followed by any PGP header data and must end with a blank line.
228 - The data section must end with a blank line and must be followed by
229 "-----BEGIN PGP SIGNATURE-----".
232 changes_in = open_file(filename)
233 content = changes_in.read()
235 return parse_deb822(content, signing_rules)
237 ################################################################################
239 def hash_key(hashname):
240 return '%ssum' % hashname
242 ################################################################################
244 def create_hash(where, files, hashname, hashfunc):
246 create_hash extends the passed files dict with the given hash by
247 iterating over all files on disk and passing them to the hashing
252 for f in files.keys():
254 file_handle = open_file(f)
255 except CantOpenError:
256 rejmsg.append("Could not open file %s for checksumming" % (f))
258 files[f][hash_key(hashname)] = hashfunc(file_handle)
263 ################################################################################
265 def check_hash(where, files, hashname, hashfunc):
267 check_hash checks the given hash in the files dict against the actual
268 files on disk. The hash values need to be present consistently in
269 all file entries. It does not modify its input in any way.
273 for f in files.keys():
277 file_handle = open_file(f)
279 # Check for the hash entry, to not trigger a KeyError.
280 if not files[f].has_key(hash_key(hashname)):
281 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
285 # Actually check the hash for correctness.
286 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
287 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
289 except CantOpenError:
290 # TODO: This happens when the file is in the pool.
291 # warn("Cannot open file %s" % f)
298 ################################################################################
300 def check_size(where, files):
302 check_size checks the file sizes in the passed files dict against the
307 for f in files.keys():
312 # TODO: This happens when the file is in the pool.
316 actual_size = entry[stat.ST_SIZE]
317 size = int(files[f]["size"])
318 if size != actual_size:
319 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
320 % (f, actual_size, size, where))
323 ################################################################################
325 def check_hash_fields(what, manifest):
327 check_hash_fields ensures that there are no checksum fields in the
328 given dict that we do not know about.
332 hashes = map(lambda x: x[0], known_hashes)
333 for field in manifest:
334 if field.startswith("checksums-"):
335 hashname = field.split("-",1)[1]
336 if hashname not in hashes:
337 rejmsg.append("Unsupported checksum field for %s "\
338 "in %s" % (hashname, what))
341 ################################################################################
343 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
344 if format >= version:
345 # The version should contain the specified hash.
348 # Import hashes from the changes
349 rejmsg = parse_checksums(".changes", files, changes, hashname)
353 # We need to calculate the hash because it can't possibly
356 return func(".changes", files, hashname, hashfunc)
358 # We could add the orig which might be in the pool to the files dict to
359 # access the checksums easily.
361 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
363 ensure_dsc_hashes' task is to ensure that each and every *present* hash
364 in the dsc is correct, i.e. identical to the changes file and if necessary
365 the pool. The latter task is delegated to check_hash.
369 if not dsc.has_key('Checksums-%s' % (hashname,)):
371 # Import hashes from the dsc
372 parse_checksums(".dsc", dsc_files, dsc, hashname)
374 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
377 ################################################################################
379 def ensure_hashes(changes, dsc, files, dsc_files):
382 # Make sure we recognise the format of the Files: field in the .changes
383 format = changes.get("format", "0.0").split(".", 1)
385 format = int(format[0]), int(format[1])
387 format = int(float(format[0])), 0
389 # We need to deal with the original changes blob, as the fields we need
390 # might not be in the changes dict serialised into the .dak anymore.
391 orig_changes = parse_deb822(changes['filecontents'])
393 # Copy the checksums over to the current changes dict. This will keep
394 # the existing modifications to it intact.
395 for field in orig_changes:
396 if field.startswith('checksums-'):
397 changes[field] = orig_changes[field]
399 # Check for unsupported hashes
400 rejmsg.extend(check_hash_fields(".changes", changes))
401 rejmsg.extend(check_hash_fields(".dsc", dsc))
403 # We have to calculate the hash if we have an earlier changes version than
404 # the hash appears in rather than require it exist in the changes file
405 for hashname, hashfunc, version in known_hashes:
406 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
408 if "source" in changes["architecture"]:
409 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
414 def parse_checksums(where, files, manifest, hashname):
416 field = 'checksums-%s' % hashname
417 if not field in manifest:
419 for line in manifest[field].split('\n'):
422 checksum, size, checkfile = line.strip().split(' ')
423 if not files.has_key(checkfile):
424 # TODO: check for the file's entry in the original files dict, not
425 # the one modified by (auto)byhand and other weird stuff
426 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
427 # (file, hashname, where))
429 if not files[checkfile]["size"] == size:
430 rejmsg.append("%s: size differs for files and checksums-%s entry "\
431 "in %s" % (checkfile, hashname, where))
433 files[checkfile][hash_key(hashname)] = checksum
434 for f in files.keys():
435 if not files[f].has_key(hash_key(hashname)):
436 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
440 ################################################################################
442 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
444 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
447 # Make sure we have a Files: field to parse...
448 if not changes.has_key(field):
449 raise NoFilesFieldError
451 # Make sure we recognise the format of the Files: field
452 format = re_verwithext.search(changes.get("format", "0.0"))
454 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
456 format = format.groups()
457 if format[1] == None:
458 format = int(float(format[0])), 0, format[2]
460 format = int(format[0]), int(format[1]), format[2]
461 if format[2] == None:
465 # format = (1,0) are the only formats we currently accept,
466 # format = (0,0) are missing format headers of which we still
467 # have some in the archive.
468 if format != (1,0) and format != (0,0):
469 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
471 if (format < (1,5) or format > (1,8)):
472 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
473 if field != "files" and format < (1,8):
474 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
476 includes_section = (not is_a_dsc) and field == "files"
478 # Parse each entry/line:
479 for i in changes[field].split('\n'):
483 section = priority = ""
486 (md5, size, section, priority, name) = s
488 (md5, size, name) = s
490 raise ParseChangesError, i
497 (section, component) = extract_component_from_section(section)
499 files[name] = Dict(size=size, section=section,
500 priority=priority, component=component)
501 files[name][hashname] = md5
505 ################################################################################
507 def force_to_utf8(s):
509 Forces a string to UTF-8. If the string isn't already UTF-8,
510 it's assumed to be ISO-8859-1.
516 latin1_s = unicode(s,'iso8859-1')
517 return latin1_s.encode('utf-8')
519 def rfc2047_encode(s):
521 Encodes a (header) string per RFC2047 if necessary. If the
522 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.
525 codecs.lookup('ascii')[1](s)
530 codecs.lookup('utf-8')[1](s)
531 h = email.Header.Header(s, 'utf-8', 998)
534 h = email.Header.Header(s, 'iso-8859-1', 998)
537 ################################################################################
539 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
540 # with it. I know - I'll fix the suckage and make things
543 def fix_maintainer (maintainer):
545 Parses a Maintainer or Changed-By field and returns:
546 1. an RFC822 compatible version,
547 2. an RFC2047 compatible version,
551 The name is forced to UTF-8 for both 1. and 3.. If the name field
552 contains '.' or ',' (as allowed by Debian policy), 1. and 2. are
553 switched to 'email (name)' format.
556 maintainer = maintainer.strip()
558 return ('', '', '', '')
560 if maintainer.find("<") == -1:
563 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
564 email = maintainer[1:-1]
567 m = re_parse_maintainer.match(maintainer)
569 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
573 # Get an RFC2047 compliant version of the name
574 rfc2047_name = rfc2047_encode(name)
576 # Force the name to be UTF-8
577 name = force_to_utf8(name)
579 if name.find(',') != -1 or name.find('.') != -1:
580 rfc822_maint = "%s (%s)" % (email, name)
581 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
583 rfc822_maint = "%s <%s>" % (name, email)
584 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
586 if email.find("@") == -1 and email.find("buildd_") != 0:
587 raise ParseMaintError, "No @ found in email address part."
589 return (rfc822_maint, rfc2047_maint, name, email)
591 ################################################################################
593 def send_mail (message, filename=""):
594 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
596 # If we've been passed a string dump it into a temporary file
598 (fd, filename) = tempfile.mkstemp()
599 os.write (fd, message)
603 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
605 raise SendmailFailedError, output
607 # Clean up any temporary files
611 ################################################################################
613 def poolify (source, component):
616 if source[:3] == "lib":
617 return component + source[:4] + '/' + source + '/'
619 return component + source[:1] + '/' + source + '/'
621 ################################################################################
623 def move (src, dest, overwrite = 0, perms = 0664):
624 if os.path.exists(dest) and os.path.isdir(dest):
627 dest_dir = os.path.dirname(dest)
628 if not os.path.exists(dest_dir):
629 umask = os.umask(00000)
630 os.makedirs(dest_dir, 02775)
632 #print "Moving %s to %s..." % (src, dest)
633 if os.path.exists(dest) and os.path.isdir(dest):
634 dest += '/' + os.path.basename(src)
635 # Don't overwrite unless forced to
636 if os.path.exists(dest):
638 fubar("Can't move %s to %s - file already exists." % (src, dest))
640 if not os.access(dest, os.W_OK):
641 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
642 shutil.copy2(src, dest)
643 os.chmod(dest, perms)
646 def copy (src, dest, overwrite = 0, perms = 0664):
647 if os.path.exists(dest) and os.path.isdir(dest):
650 dest_dir = os.path.dirname(dest)
651 if not os.path.exists(dest_dir):
652 umask = os.umask(00000)
653 os.makedirs(dest_dir, 02775)
655 #print "Copying %s to %s..." % (src, dest)
656 if os.path.exists(dest) and os.path.isdir(dest):
657 dest += '/' + os.path.basename(src)
658 # Don't overwrite unless forced to
659 if os.path.exists(dest):
661 raise FileExistsError
663 if not os.access(dest, os.W_OK):
664 raise CantOverwriteError
665 shutil.copy2(src, dest)
666 os.chmod(dest, perms)
668 ################################################################################
671 res = socket.gethostbyaddr(socket.gethostname())
672 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
673 if database_hostname:
674 return database_hostname
678 def which_conf_file ():
679 res = socket.gethostbyaddr(socket.gethostname())
680 if Cnf.get("Config::" + res[0] + "::DakConfig"):
681 return Cnf["Config::" + res[0] + "::DakConfig"]
683 return default_config
685 def which_apt_conf_file ():
686 res = socket.gethostbyaddr(socket.gethostname())
687 if Cnf.get("Config::" + res[0] + "::AptConfig"):
688 return Cnf["Config::" + res[0] + "::AptConfig"]
690 return default_apt_config
692 def which_alias_file():
693 hostname = socket.gethostbyaddr(socket.gethostname())[0]
694 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
695 if os.path.exists(aliasfn):
700 ################################################################################
702 # Escape characters which have meaning to SQL's regex comparison operator ('~')
703 # (woefully incomplete)
706 s = s.replace('+', '\\\\+')
707 s = s.replace('.', '\\\\.')
710 ################################################################################
712 def TemplateSubst(map, filename):
713 """ Perform a substition of template """
714 templatefile = open_file(filename)
715 template = templatefile.read()
717 template = template.replace(x,map[x])
721 ################################################################################
723 def fubar(msg, exit_code=1):
724 sys.stderr.write("E: %s\n" % (msg))
728 sys.stderr.write("W: %s\n" % (msg))
730 ################################################################################
732 # Returns the user name with a laughable attempt at rfc822 conformancy
733 # (read: removing stray periods).
735 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
737 ################################################################################
747 return ("%d%s" % (c, t))
749 ################################################################################
751 def cc_fix_changes (changes):
752 o = changes.get("architecture", "")
754 del changes["architecture"]
755 changes["architecture"] = {}
757 changes["architecture"][j] = 1
759 def changes_compare (a, b):
760 """ Sort by source name, source version, 'have source', and then by filename """
762 a_changes = parse_changes(a)
767 b_changes = parse_changes(b)
771 cc_fix_changes (a_changes)
772 cc_fix_changes (b_changes)
774 # Sort by source name
775 a_source = a_changes.get("source")
776 b_source = b_changes.get("source")
777 q = cmp (a_source, b_source)
781 # Sort by source version
782 a_version = a_changes.get("version", "0")
783 b_version = b_changes.get("version", "0")
784 q = apt_pkg.VersionCompare(a_version, b_version)
788 # Sort by 'have source'
789 a_has_source = a_changes["architecture"].get("source")
790 b_has_source = b_changes["architecture"].get("source")
791 if a_has_source and not b_has_source:
793 elif b_has_source and not a_has_source:
796 # Fall back to sort by filename
799 ################################################################################
801 def find_next_free (dest, too_many=100):
804 while os.path.exists(dest) and extra < too_many:
805 dest = orig_dest + '.' + repr(extra)
807 if extra >= too_many:
808 raise NoFreeFilenameError
811 ################################################################################
813 def result_join (original, sep = '\t'):
815 for i in xrange(len(original)):
816 if original[i] == None:
817 resultlist.append("")
819 resultlist.append(original[i])
820 return sep.join(resultlist)
822 ################################################################################
824 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
826 for line in str.split('\n'):
828 if line or include_blank_lines:
829 out += "%s%s\n" % (prefix, line)
830 # Strip trailing new line
835 ################################################################################
837 def validate_changes_file_arg(filename, require_changes=1):
839 'filename' is either a .changes or .dak file. If 'filename' is a
840 .dak file, it's changed to be the corresponding .changes file. The
841 function then checks if the .changes file a) exists and b) is
842 readable and returns the .changes filename if so. If there's a
843 problem, the next action depends on the option 'require_changes'
846 - If 'require_changes' == -1, errors are ignored and the .changes
847 filename is returned.
848 - If 'require_changes' == 0, a warning is given and 'None' is returned.
849 - If 'require_changes' == 1, a fatal error is raised.
854 orig_filename = filename
855 if filename.endswith(".dak"):
856 filename = filename[:-4]+".changes"
858 if not filename.endswith(".changes"):
859 error = "invalid file type; not a changes file"
861 if not os.access(filename,os.R_OK):
862 if os.path.exists(filename):
863 error = "permission denied"
865 error = "file not found"
868 if require_changes == 1:
869 fubar("%s: %s." % (orig_filename, error))
870 elif require_changes == 0:
871 warn("Skipping %s - %s" % (orig_filename, error))
873 else: # We only care about the .dak file
878 ################################################################################
881 return (arch != "source" and arch != "all")
883 ################################################################################
885 def join_with_commas_and(list):
886 if len(list) == 0: return "nothing"
887 if len(list) == 1: return list[0]
888 return ", ".join(list[:-1]) + " and " + list[-1]
890 ################################################################################
895 (pkg, version, constraint) = atom
897 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
900 pp_deps.append(pp_dep)
901 return " |".join(pp_deps)
903 ################################################################################
908 ################################################################################
910 def parse_args(Options):
911 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
915 for suite in split_args(Options["Suite"]):
916 suite_id = database.get_suite_id(suite)
918 warn("suite '%s' not recognised." % (suite))
920 suite_ids_list.append(suite_id)
922 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
924 fubar("No valid suite given.")
929 if Options["Component"]:
930 component_ids_list = []
931 for component in split_args(Options["Component"]):
932 component_id = database.get_component_id(component)
933 if component_id == -1:
934 warn("component '%s' not recognised." % (component))
936 component_ids_list.append(component_id)
937 if component_ids_list:
938 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
940 fubar("No valid component given.")
944 # Process architecture
945 con_architectures = ""
946 if Options["Architecture"]:
949 for architecture in split_args(Options["Architecture"]):
950 if architecture == "source":
953 architecture_id = database.get_architecture_id(architecture)
954 if architecture_id == -1:
955 warn("architecture '%s' not recognised." % (architecture))
957 arch_ids_list.append(architecture_id)
959 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
962 fubar("No valid architecture given.")
966 return (con_suites, con_architectures, con_components, check_source)
968 ################################################################################
970 # Inspired(tm) by Bryn Keller's print_exc_plus (See
971 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
974 tb = sys.exc_info()[2]
983 traceback.print_exc()
985 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
986 frame.f_code.co_filename,
988 for key, value in frame.f_locals.items():
989 print "\t%20s = " % key,
993 print "<unable to print>"
995 ################################################################################
997 def try_with_debug(function):
1005 ################################################################################
1007 def arch_compare_sw (a, b):
1009 Function for use in sorting lists of architectures.
1011 Sorts normally except that 'source' dominates all others.
1014 if a == "source" and b == "source":
1023 ################################################################################
1025 def split_args (s, dwim=1):
1027 Split command line arguments which can be separated by either commas
1028 or whitespace. If dwim is set, it will complain about string ending
1029 in comma since this usually means someone did 'dak ls -a i386, m68k
1030 foo' or something and the inevitable confusion resulting from 'm68k'
1031 being treated as an argument is undesirable.
1034 if s.find(",") == -1:
1037 if s[-1:] == "," and dwim:
1038 fubar("split_args: found trailing comma, spurious space maybe?")
1041 ################################################################################
1043 def Dict(**dict): return dict
1045 ########################################
1047 def gpgv_get_status_output(cmd, status_read, status_write):
1049 Our very own version of commands.getouputstatus(), hacked to support
1053 cmd = ['/bin/sh', '-c', cmd]
1054 p2cread, p2cwrite = os.pipe()
1055 c2pread, c2pwrite = os.pipe()
1056 errout, errin = os.pipe()
1066 for i in range(3, 256):
1067 if i != status_write:
1073 os.execvp(cmd[0], cmd)
1079 os.dup2(c2pread, c2pwrite)
1080 os.dup2(errout, errin)
1082 output = status = ""
1084 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1087 r = os.read(fd, 8196)
1089 more_data.append(fd)
1090 if fd == c2pwrite or fd == errin:
1092 elif fd == status_read:
1095 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1097 pid, exit_status = os.waitpid(pid, 0)
1099 os.close(status_write)
1100 os.close(status_read)
1110 return output, status, exit_status
1112 ################################################################################
1114 def process_gpgv_output(status):
1115 # Process the status-fd output
1118 for line in status.split('\n'):
1122 split = line.split()
1124 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1126 (gnupg, keyword) = split[:2]
1127 if gnupg != "[GNUPG:]":
1128 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1131 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1132 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1135 keywords[keyword] = args
1137 return (keywords, internal_error)
1139 ################################################################################
1141 def retrieve_key (filename, keyserver=None, keyring=None):
1143 Retrieve the key that signed 'filename' from 'keyserver' and
1144 add it to 'keyring'. Returns nothing on success, or an error message
1148 # Defaults for keyserver and keyring
1150 keyserver = Cnf["Dinstall::KeyServer"]
1152 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1154 # Ensure the filename contains no shell meta-characters or other badness
1155 if not re_taint_free.match(filename):
1156 return "%s: tainted filename" % (filename)
1158 # Invoke gpgv on the file
1159 status_read, status_write = os.pipe()
1160 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1161 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1163 # Process the status-fd output
1164 (keywords, internal_error) = process_gpgv_output(status)
1166 return internal_error
1168 if not keywords.has_key("NO_PUBKEY"):
1169 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1171 fingerprint = keywords["NO_PUBKEY"][0]
1172 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1173 # it'll try to create a lockfile in /dev. A better solution might
1174 # be a tempfile or something.
1175 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1176 % (Cnf["Dinstall::SigningKeyring"])
1177 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1178 % (keyring, keyserver, fingerprint)
1179 (result, output) = commands.getstatusoutput(cmd)
1181 return "'%s' failed with exit code %s" % (cmd, result)
1185 ################################################################################
1187 def gpg_keyring_args(keyrings=None):
1189 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1191 return " ".join(["--keyring %s" % x for x in keyrings])
1193 ################################################################################
1195 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1197 Check the signature of a file and return the fingerprint if the
1198 signature is valid or 'None' if it's not. The first argument is the
1199 filename whose signature should be checked. The second argument is a
1200 reject function and is called when an error is found. The reject()
1201 function must allow for two arguments: the first is the error message,
1202 the second is an optional prefix string. It's possible for reject()
1203 to be called more than once during an invocation of check_signature().
1204 The third argument is optional and is the name of the files the
1205 detached signature applies to. The fourth argument is optional and is
1206 a *list* of keyrings to use. 'autofetch' can either be None, True or
1207 False. If None, the default behaviour specified in the config will be
1211 # Ensure the filename contains no shell meta-characters or other badness
1212 if not re_taint_free.match(sig_filename):
1213 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1216 if data_filename and not re_taint_free.match(data_filename):
1217 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1221 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1223 # Autofetch the signing key if that's enabled
1224 if autofetch == None:
1225 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1227 error_msg = retrieve_key(sig_filename)
1232 # Build the command line
1233 status_read, status_write = os.pipe()
1234 cmd = "gpgv --status-fd %s %s %s %s" % (
1235 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1237 # Invoke gpgv on the file
1238 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1240 # Process the status-fd output
1241 (keywords, internal_error) = process_gpgv_output(status)
1243 # If we failed to parse the status-fd output, let's just whine and bail now
1245 reject("internal error while performing signature check on %s." % (sig_filename))
1246 reject(internal_error, "")
1247 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1251 # Now check for obviously bad things in the processed output
1252 if keywords.has_key("KEYREVOKED"):
1253 reject("The key used to sign %s has been revoked." % (sig_filename))
1255 if keywords.has_key("BADSIG"):
1256 reject("bad signature on %s." % (sig_filename))
1258 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1259 reject("failed to check signature on %s." % (sig_filename))
1261 if keywords.has_key("NO_PUBKEY"):
1262 args = keywords["NO_PUBKEY"]
1265 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1267 if keywords.has_key("BADARMOR"):
1268 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1270 if keywords.has_key("NODATA"):
1271 reject("no signature found in %s." % (sig_filename))
1273 if keywords.has_key("EXPKEYSIG"):
1274 args = keywords["EXPKEYSIG"]
1277 reject("Signature made by expired key 0x%s" % (key))
1279 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1280 args = keywords["KEYEXPIRED"]
1284 if timestamp.count("T") == 0:
1285 expiredate = time.strftime("%Y-%m-%d", time.gmtime(timestamp))
1287 expiredate = timestamp
1288 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1294 # Next check gpgv exited with a zero return code
1296 reject("gpgv failed while checking %s." % (sig_filename))
1298 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1300 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1303 # Sanity check the good stuff we expect
1304 if not keywords.has_key("VALIDSIG"):
1305 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1308 args = keywords["VALIDSIG"]
1310 reject("internal error while checking signature on %s." % (sig_filename))
1313 fingerprint = args[0]
1314 if not keywords.has_key("GOODSIG"):
1315 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1317 if not keywords.has_key("SIG_ID"):
1318 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1321 # Finally ensure there's not something we don't recognise
1322 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1323 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1324 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1326 for keyword in keywords.keys():
1327 if not known_keywords.has_key(keyword):
1328 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1336 ################################################################################
1338 def gpg_get_key_addresses(fingerprint):
1339 """retreive email addresses from gpg key uids for a given fingerprint"""
1340 addresses = key_uid_email_cache.get(fingerprint)
1341 if addresses != None:
1344 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1345 % (gpg_keyring_args(), fingerprint)
1346 (result, output) = commands.getstatusoutput(cmd)
1348 for l in output.split('\n'):
1349 m = re_gpg_uid.match(l)
1351 addresses.add(m.group(1))
1352 key_uid_email_cache[fingerprint] = addresses
1355 ################################################################################
1357 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1359 def wrap(paragraph, max_length, prefix=""):
1363 words = paragraph.split()
1366 word_size = len(word)
1367 if word_size > max_length:
1369 s += line + '\n' + prefix
1370 s += word + '\n' + prefix
1373 new_length = len(line) + word_size + 1
1374 if new_length > max_length:
1375 s += line + '\n' + prefix
1388 ################################################################################
1390 def clean_symlink (src, dest, root):
1392 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1395 src = src.replace(root, '', 1)
1396 dest = dest.replace(root, '', 1)
1397 dest = os.path.dirname(dest)
1398 new_src = '../' * len(dest.split('/'))
1399 return new_src + src
1401 ################################################################################
1403 def temp_filename(directory=None, prefix="dak", suffix=""):
1405 Return a secure and unique filename by pre-creating it.
1406 If 'directory' is non-null, it will be the directory the file is pre-created in.
1407 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1408 If 'suffix' is non-null, the filename will end with it.
1410 Returns a pair (fd, name).
1413 return tempfile.mkstemp(suffix, prefix, directory)
1415 ################################################################################
1417 def is_email_alias(email):
1418 """ checks if the user part of the email is listed in the alias file """
1420 if alias_cache == None:
1421 aliasfn = which_alias_file()
1424 for l in open(aliasfn):
1425 alias_cache.add(l.split(':')[0])
1426 uid = email.split('@')[0]
1427 return uid in alias_cache
1429 ################################################################################
1433 Cnf = apt_pkg.newConfiguration()
1434 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1436 if which_conf_file() != default_config:
1437 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1439 ################################################################################
1441 def generate_contents_information(filename):
1443 Generate a list of flies contained in a .deb
1445 @type filename: string
1446 @param filename: the path to a .deb
1449 @return: a list of files in the data.tar.* portion of the .deb
1451 cmd = "ar t %s" % (filename)
1452 (result, output) = commands.getstatusoutput(cmd)
1454 reject("%s: 'ar t' invocation failed." % (filename))
1455 reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
1457 # Ugh ... this is ugly ... Code ripped from process_unchecked.py
1458 chunks = output.split('\n')
1462 cmd = "ar x %s %s" % (filename, chunks[2])
1463 (result, output) = commands.getstatusoutput(cmd)
1465 reject("%s: '%s' invocation failed." % (filename, cmd))
1466 reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
1468 # Got deb tarballs, now lets go through and determine what bits
1469 # and pieces the deb had ...
1470 if chunks[2] == "data.tar.gz":
1471 data = tarfile.open("data.tar.gz", "r:gz")
1472 elif chunks[2] == "data.tar.bz2":
1473 data = tarfile.open("data.tar.bz2", "r:bz2")
1475 os.remove(chunks[2])
1476 reject("couldn't find data.tar.*")
1478 for tarinfo in data:
1479 if not tarinfo.isdir():
1480 contents.append(tarinfo.name[2:])
1483 if os.path.exists( chunks[2] ):
1484 os.remove( chunks[2] )
1488 ###############################################################################