3 # Utility functions for katie
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.56 2005-11-25 06:59:45 ajt Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$");
32 re_default_answer = re.compile(r"\[(.*)\]");
33 re_fdnic = re.compile(r"\n\n");
34 re_bin_only_nmu = re.compile(r"\+b\d+$");
36 ###############################################################################
38 # Convenience wrapper to carry around all the package information in
41 def __init__(self, **kwds):
42 self.__dict__.update(kwds);
44 def update(self, **kwds):
45 self.__dict__.update(kwds);
47 ###############################################################################
50 # Read in the group maintainer override file
51 def __init__ (self, Cnf):
52 self.group_maint = {};
54 if Cnf.get("Dinstall::GroupOverrideFilename"):
55 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
56 file = utils.open_file(filename);
57 for line in file.readlines():
58 line = utils.re_comments.sub('', line).lower().strip();
60 self.group_maint[line] = 1;
63 def is_an_nmu (self, pkg):
65 changes = pkg.changes;
68 i = utils.fix_maintainer (dsc.get("maintainer",
69 Cnf["Dinstall::MyEmailAddress"]).lower());
70 (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i;
71 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
72 if dsc_name == changes["maintainername"].lower() and \
73 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
76 if dsc.has_key("uploaders"):
77 uploaders = dsc["uploaders"].lower().split(",");
80 (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip());
81 uploadernames[name] = "";
82 if uploadernames.has_key(changes["changedbyname"].lower()):
85 # Some group maintained packages (e.g. Debian QA) are never NMU's
86 if self.group_maint.has_key(changes["maintaineremail"].lower()):
91 ###############################################################################
95 def __init__(self, Cnf):
97 # Read in the group-maint override file
98 self.nmu = nmu_p(Cnf);
99 self.accept_count = 0;
100 self.accept_bytes = 0L;
101 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
102 legacy_source_untouchable = {});
104 # Initialize the substitution template mapping global
105 Subst = self.Subst = {};
106 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
107 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
108 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
109 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
111 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
112 db_access.init(Cnf, self.projectB);
114 ###########################################################################
116 def init_vars (self):
117 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
118 exec "self.pkg.%s.clear();" % (i);
119 self.pkg.orig_tar_id = None;
120 self.pkg.orig_tar_location = "";
121 self.pkg.orig_tar_gz = None;
123 ###########################################################################
125 def update_vars (self):
126 dump_filename = self.pkg.changes_file[:-8]+".katie";
127 dump_file = utils.open_file(dump_filename);
128 p = cPickle.Unpickler(dump_file);
129 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
130 exec "self.pkg.%s.update(p.load());" % (i);
131 for i in [ "orig_tar_id", "orig_tar_location" ]:
132 exec "self.pkg.%s = p.load();" % (i);
135 ###########################################################################
137 # This could just dump the dictionaries as is, but I'd like to avoid
138 # this so there's some idea of what katie & lisa use from jennifer
140 def dump_vars(self, dest_dir):
141 for i in [ "changes", "dsc", "files", "dsc_files",
142 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
143 exec "%s = self.pkg.%s;" % (i,i);
144 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
145 dump_file = utils.open_file(dump_filename, 'w');
147 os.chmod(dump_filename, 0660);
149 if errno.errorcode[e.errno] == 'EPERM':
150 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]);
151 if perms & stat.S_IROTH:
152 utils.fubar("%s is world readable and chmod failed." % (dump_filename));
156 p = cPickle.Pickler(dump_file, 1);
157 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
160 for file in files.keys():
162 for i in [ "package", "version", "architecture", "type", "size",
163 "md5sum", "component", "location id", "source package",
164 "source version", "maintainer", "dbtype", "files id",
165 "new", "section", "priority", "othercomponents",
166 "pool name", "original component" ]:
167 if files[file].has_key(i):
168 d_files[file][i] = files[file][i];
170 # Mandatory changes fields
171 for i in [ "distribution", "source", "architecture", "version",
172 "maintainer", "urgency", "fingerprint", "changedby822",
173 "changedby2047", "changedbyname", "maintainer822",
174 "maintainer2047", "maintainername", "maintaineremail",
175 "closes", "changes" ]:
176 d_changes[i] = changes[i];
177 # Optional changes fields
178 for i in [ "changed-by", "filecontents", "format", "lisa note", "distribution-version" ]:
179 if changes.has_key(i):
180 d_changes[i] = changes[i];
182 for i in [ "source", "version", "maintainer", "fingerprint",
183 "uploaders", "bts changelog" ]:
187 for file in dsc_files.keys():
188 d_dsc_files[file] = {};
189 # Mandatory dsc_files fields
190 for i in [ "size", "md5sum" ]:
191 d_dsc_files[file][i] = dsc_files[file][i];
192 # Optional dsc_files fields
193 for i in [ "files id" ]:
194 if dsc_files[file].has_key(i):
195 d_dsc_files[file][i] = dsc_files[file][i];
197 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
198 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
202 ###########################################################################
204 # Set up the per-package template substitution mappings
206 def update_subst (self, reject_message = ""):
208 changes = self.pkg.changes;
209 # If jennifer crashed out in the right place, architecture may still be a string.
210 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
211 changes["architecture"] = { "Unknown" : "" };
212 # and maintainer2047 may not exist.
213 if not changes.has_key("maintainer2047"):
214 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"];
216 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys());
217 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
218 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
220 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
221 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
222 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"];
223 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
224 changes["maintainer2047"]);
225 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
227 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"];
228 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"];
229 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
230 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
231 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
233 # Apply any global override of the Maintainer field
234 if self.Cnf.get("Dinstall::OverrideMaintainer"):
235 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
236 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
238 Subst["__REJECT_MESSAGE__"] = reject_message;
239 Subst["__SOURCE__"] = changes.get("source", "Unknown");
240 Subst["__VERSION__"] = changes.get("version", "Unknown");
242 ###########################################################################
244 def build_summaries(self):
245 changes = self.pkg.changes;
246 files = self.pkg.files;
248 byhand = summary = new = "";
250 # changes["distribution"] may not exist in corner cases
251 # (e.g. unreadable changes files)
252 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
253 changes["distribution"] = {};
255 file_keys = files.keys();
257 for file in file_keys:
258 if files[file].has_key("byhand"):
260 summary += file + " byhand\n"
261 elif files[file].has_key("new"):
263 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
264 if files[file].has_key("othercomponents"):
265 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
266 if files[file]["type"] == "deb":
267 deb_fh = utils.open_file(file)
268 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n';
271 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
272 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
273 summary += file + "\n to " + destination + "\n"
275 short_summary = summary;
277 # This is for direport's benefit...
278 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
281 summary += "Changes: " + f;
283 summary += self.announce(short_summary, 0)
285 return (summary, short_summary);
287 ###########################################################################
289 def close_bugs (self, summary, action):
290 changes = self.pkg.changes;
294 bugs = changes["closes"].keys();
300 if not self.nmu.is_an_nmu(self.pkg):
301 if changes["distribution"].has_key("experimental"):
302 # tag bugs as fixed-in-experimental for uploads to experimental
303 summary += "Setting bugs to severity fixed: ";
304 control_message = "";
306 summary += "%s " % (bug);
307 control_message += "tag %s + fixed-in-experimental\n" % (bug);
308 if action and control_message != "":
309 Subst["__CONTROL_MESSAGE__"] = control_message;
310 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-experimental-fixed");
311 utils.send_mail (mail_message);
313 self.Logger.log(["setting bugs to fixed"]+bugs);
317 summary += "Closing bugs: ";
319 summary += "%s " % (bug);
321 Subst["__BUG_NUMBER__"] = bug;
322 if changes["distribution"].has_key("stable"):
323 Subst["__STABLE_WARNING__"] = """
324 Note that this package is not part of the released stable Debian
325 distribution. It may have dependencies on other unreleased software,
326 or other instabilities. Please take care if you wish to install it.
327 The update will eventually make its way into the next released Debian
330 Subst["__STABLE_WARNING__"] = "";
331 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
332 utils.send_mail (mail_message);
334 self.Logger.log(["closing bugs"]+bugs);
337 summary += "Setting bugs to severity fixed: ";
338 control_message = "";
340 summary += "%s " % (bug);
341 control_message += "tag %s + fixed\n" % (bug);
342 if action and control_message != "":
343 Subst["__CONTROL_MESSAGE__"] = control_message;
344 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
345 utils.send_mail (mail_message);
347 self.Logger.log(["setting bugs to fixed"]+bugs);
351 ###########################################################################
353 def announce (self, short_summary, action):
356 changes = self.pkg.changes;
358 # Only do announcements for source uploads with a recent dpkg-dev installed
359 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
364 Subst["__SHORT_SUMMARY__"] = short_summary;
366 for dist in changes["distribution"].keys():
367 list = Cnf.Find("Suite::%s::Announce" % (dist));
368 if list == "" or lists_done.has_key(list):
370 lists_done[list] = 1;
371 summary += "Announcing to %s\n" % (list);
374 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
375 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
376 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
377 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
378 utils.send_mail (mail_message);
380 if Cnf.FindB("Dinstall::CloseBugs"):
381 summary = self.close_bugs(summary, action);
385 ###########################################################################
387 def accept (self, summary, short_summary):
390 files = self.pkg.files;
391 changes = self.pkg.changes;
392 changes_file = self.pkg.changes_file;
396 self.Logger.log(["Accepting changes",changes_file]);
398 self.dump_vars(Cnf["Dir::Queue::Accepted"]);
400 # Move all the files into the accepted directory
401 utils.move(changes_file, Cnf["Dir::Queue::Accepted"]);
402 file_keys = files.keys();
403 for file in file_keys:
404 utils.move(file, Cnf["Dir::Queue::Accepted"]);
405 self.accept_bytes += float(files[file]["size"])
406 self.accept_count += 1;
408 # Send accept mail, announce to lists, close bugs and check for
409 # override disparities
410 if not Cnf["Dinstall::Options::No-Mail"]:
411 Subst["__SUITE__"] = "";
412 Subst["__SUMMARY__"] = summary;
413 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
414 utils.send_mail(mail_message)
415 self.announce(short_summary, 1)
418 ## Helper stuff for DebBugs Version Tracking
419 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
420 # ??? once queue/* is cleared on *.d.o and/or reprocessed
421 # the conditionalization on dsc["bts changelog"] should be
424 # Write out the version history from the changelog
425 if changes["architecture"].has_key("source") and \
426 dsc.has_key("bts changelog"):
428 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
429 dotprefix=1, perms=0644);
430 version_history = utils.open_file(temp_filename, 'w');
431 version_history.write(dsc["bts changelog"]);
432 version_history.close();
433 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
434 changes_file[:-8]+".versions");
435 os.rename(temp_filename, filename);
437 # Write out the binary -> source mapping.
438 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
439 dotprefix=1, perms=0644);
440 debinfo = utils.open_file(temp_filename, 'w');
441 for file in file_keys:
443 if f["type"] == "deb":
444 line = " ".join([f["package"], f["version"],
445 f["architecture"], f["source package"],
446 f["source version"]]);
447 debinfo.write(line+"\n");
449 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
450 changes_file[:-8]+".debinfo");
451 os.rename(temp_filename, filename);
453 ## Special support to enable clean auto-building of accepted packages
454 self.projectB.query("BEGIN WORK");
455 for suite in changes["distribution"].keys():
456 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
458 suite_id = db_access.get_suite_id(suite);
459 dest_dir = Cnf["Dir::QueueBuild"];
460 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
461 dest_dir = os.path.join(dest_dir, suite);
462 for file in file_keys:
463 src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
464 dest = os.path.join(dest_dir, file);
465 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
466 # Copy it since the original won't be readable by www-data
467 utils.copy(src, dest);
469 # Create a symlink to it
470 os.symlink(src, dest);
471 # Add it to the list of packages for later processing by apt-ftparchive
472 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, 0, '%s', 't')" % (suite_id, dest));
473 # If the .orig.tar.gz is in the pool, create a symlink to
474 # it (if one doesn't already exist)
475 if self.pkg.orig_tar_id:
476 # Determine the .orig.tar.gz file name
477 for dsc_file in self.pkg.dsc_files.keys():
478 if dsc_file.endswith(".orig.tar.gz"):
480 dest = os.path.join(dest_dir, filename);
481 # If it doesn't exist, create a symlink
482 if not os.path.exists(dest):
483 # Find the .orig.tar.gz in the pool
484 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
487 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
488 src = os.path.join(ql[0][0], ql[0][1]);
489 os.symlink(src, dest);
490 # Add it to the list of packages for later processing by apt-ftparchive
491 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, 0, '%s', 't')" % (suite_id, dest));
492 # if it does, update things to ensure it's not removed prematurely
494 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
496 self.projectB.query("COMMIT WORK");
498 ###########################################################################
500 def check_override (self):
502 changes = self.pkg.changes;
503 files = self.pkg.files;
506 # Abandon the check if:
507 # a) it's a non-sourceful upload
508 # b) override disparity checks have been disabled
509 # c) we're not sending mail
510 if not changes["architecture"].has_key("source") or \
511 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
512 Cnf["Dinstall::Options::No-Mail"]:
516 file_keys = files.keys();
518 for file in file_keys:
519 if not files[file].has_key("new") and files[file]["type"] == "deb":
520 section = files[file]["section"];
521 override_section = files[file]["override section"];
522 if section.lower() != override_section.lower() and section != "-":
523 # Ignore this; it's a common mistake and not worth whining about
524 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
526 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section);
527 priority = files[file]["priority"];
528 override_priority = files[file]["override priority"];
529 if priority != override_priority and priority != "-":
530 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority);
535 Subst["__SUMMARY__"] = summary;
536 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
537 utils.send_mail(mail_message);
539 ###########################################################################
541 def force_reject (self, files):
542 """Forcefully move files from the current directory to the
543 reject directory. If any file already exists in the reject
544 directory it will be moved to the morgue to make way for
550 # Skip any files which don't exist or which we don't have permission to copy.
551 if os.access(file,os.R_OK) == 0:
553 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
555 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
557 # File exists? Let's try and move it to the morgue
558 if errno.errorcode[e.errno] == 'EEXIST':
559 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
561 morgue_file = utils.find_next_free(morgue_file);
562 except utils.tried_too_hard_exc:
563 # Something's either gone badly Pete Tong, or
564 # someone is trying to exploit us.
565 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
567 utils.move(dest_file, morgue_file, perms=0660);
569 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
572 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
576 # If we got here, we own the destination file, so we can
577 # safely overwrite it.
578 utils.move(file, dest_file, 1, perms=0660);
581 ###########################################################################
583 def do_reject (self, manual = 0, reject_message = ""):
584 # If we weren't given a manual rejection message, spawn an
585 # editor so the user can add one in...
586 if manual and not reject_message:
587 temp_filename = utils.temp_filename();
588 editor = os.environ.get("EDITOR","vi")
591 os.system("%s %s" % (editor, temp_filename))
592 temp_fh = utils.open_file(temp_filename);
593 reject_message = "".join(temp_fh.readlines());
595 print "Reject message:";
596 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1);
597 prompt = "[R]eject, Edit, Abandon, Quit ?"
599 while prompt.find(answer) == -1:
600 answer = utils.our_raw_input(prompt);
601 m = re_default_answer.search(prompt);
604 answer = answer[:1].upper();
605 os.unlink(temp_filename);
617 reason_filename = pkg.changes_file[:-8] + ".reason";
618 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
620 # Move all the files into the reject directory
621 reject_files = pkg.files.keys() + [pkg.changes_file];
622 self.force_reject(reject_files);
624 # If we fail here someone is probably trying to exploit the race
625 # so let's just raise an exception ...
626 if os.path.exists(reason_filename):
627 os.unlink(reason_filename);
628 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
631 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
632 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
633 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
634 os.write(reason_fd, reject_message);
635 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
637 # Build up the rejection email
638 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
640 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
641 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
642 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
643 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
644 # Write the rejection email out as the <foo>.reason file
645 os.write(reason_fd, reject_mail_message);
649 # Send the rejection mail if appropriate
650 if not Cnf["Dinstall::Options::No-Mail"]:
651 utils.send_mail(reject_mail_message);
653 self.Logger.log(["rejected", pkg.changes_file]);
656 ################################################################################
658 # Ensure that source exists somewhere in the archive for the binary
659 # upload being processed.
661 # (1) exact match => 1.0-3
662 # (2) Bin-only NMU of an MU => 1.0-3.0.1
663 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
665 def source_exists (self, package, source_version, suites = ["any"]):
669 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
672 # source must exist in suite X, or in some other suite that's
673 # mapped to X, recursively... silent-maps are counted too,
674 # unreleased-maps aren't.
675 maps = self.Cnf.ValueList("SuiteMappings")[:]
677 maps = [ m.split() for m in maps ]
678 maps = [ (x[1], x[2]) for x in maps
679 if x[0] == "map" or x[0] == "silent-map" ]
682 if x[1] in s and x[0] not in s:
685 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, string.join(["su.suite_name = '%s'" % a for a in s], " OR "));
686 q = self.projectB.query(que)
688 # Reduce the query results to a list of version numbers
689 ql = map(lambda x: x[0], q.getresult());
692 if source_version in ql:
696 orig_source_version = re_bin_only_nmu.sub('', source_version)
697 if orig_source_version in ql:
704 ################################################################################
706 def in_override_p (self, package, component, suite, binary_type, file):
707 files = self.pkg.files;
709 if binary_type == "": # must be source
714 # Override suite name; used for example with proposed-updates
715 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
716 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
718 # Avoid <undef> on unknown distributions
719 suite_id = db_access.get_suite_id(suite);
722 component_id = db_access.get_component_id(component);
723 type_id = db_access.get_override_type_id(type);
725 # FIXME: nasty non-US speficic hack
726 if component.lower().startswith("non-us/"):
727 component = component[7:];
729 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
730 % (package, suite_id, component_id, type_id));
731 result = q.getresult();
732 # If checking for a source package fall back on the binary override type
733 if type == "dsc" and not result:
734 deb_type_id = db_access.get_override_type_id("deb");
735 udeb_type_id = db_access.get_override_type_id("udeb");
736 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
737 % (package, suite_id, component_id, deb_type_id, udeb_type_id));
738 result = q.getresult();
740 # Remember the section and priority so we can check them later if appropriate
742 files[file]["override section"] = result[0][0];
743 files[file]["override priority"] = result[0][1];
747 ################################################################################
749 def reject (self, str, prefix="Rejected: "):
751 # Unlike other rejects we add new lines first to avoid trailing
752 # new lines when this message is passed back up to a caller.
753 if self.reject_message:
754 self.reject_message += "\n";
755 self.reject_message += prefix + str;
757 ################################################################################
759 def get_anyversion(self, query_result, suite):
761 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
762 for (v, s) in query_result:
763 if s in [ string.lower(x) for x in anysuite ]:
764 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
768 ################################################################################
770 def cross_suite_version_check(self, query_result, file, new_version):
771 """Ensure versions are newer than existing packages in target
772 suites and that cross-suite version checking rules as
773 set out in the conf file are satisfied."""
775 # Check versions for each target suite
776 for target_suite in self.pkg.changes["distribution"].keys():
777 must_be_newer_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
778 must_be_older_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
779 # Enforce "must be newer than target suite" even if conffile omits it
780 if target_suite not in must_be_newer_than:
781 must_be_newer_than.append(target_suite);
782 for entry in query_result:
783 existent_version = entry[0];
785 if suite in must_be_newer_than and \
786 apt_pkg.VersionCompare(new_version, existent_version) < 1:
787 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
788 if suite in must_be_older_than and \
789 apt_pkg.VersionCompare(new_version, existent_version) > -1:
790 ch = self.pkg.changes
792 if ch.get('distribution-version', {}).has_key(suite):
793 # we really use the other suite, ignoring the conflicting one ...
794 addsuite = ch["distribution-version"][suite]
796 add_version = self.get_anyversion(query_result, addsuite)
797 target_version = self.get_anyversion(query_result, target_suite)
800 # not add_version can only happen if we map to a suite
801 # that doesn't enhance the suite we're propup'ing from.
802 # so "propup-ver x a b c; map a d" is a problem only if
803 # d doesn't enhance a.
805 # i think we could always propagate in this case, rather
806 # than complaining. either way, this isn't a REJECT issue
808 # And - we really should complain to the dorks who configured dak
809 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
810 self.pkg.changes.setdefault("propdistribution", {})
811 self.pkg.changes["propdistribution"][addsuite] = 1
813 elif not target_version:
814 # not targets_version is true when the package is NEW
815 # we could just stick with the "...old version..." REJECT
817 self.reject("Won't propogate NEW packages.")
818 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
819 # propogation would be redundant. no need to reject though.
820 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
822 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
823 apt_pkg.VersionCompare(add_version, target_version) >= 0:
825 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
826 self.pkg.changes.setdefault("propdistribution", {})
827 self.pkg.changes["propdistribution"][addsuite] = 1
831 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
833 ################################################################################
835 def check_binary_against_db(self, file):
836 self.reject_message = "";
837 files = self.pkg.files;
839 # Ensure version is sane
840 q = self.projectB.query("""
841 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
843 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
844 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
845 % (files[file]["package"],
846 files[file]["architecture"]));
847 self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
849 # Check for any existing copies of the file
850 q = self.projectB.query("""
851 SELECT b.id FROM binaries b, architecture a
852 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
853 AND a.id = b.architecture"""
854 % (files[file]["package"],
855 files[file]["version"],
856 files[file]["architecture"]))
858 self.reject("%s: can not overwrite existing copy already in the archive." % (file));
860 return self.reject_message;
862 ################################################################################
864 def check_source_against_db(self, file):
865 self.reject_message = "";
868 # Ensure version is sane
869 q = self.projectB.query("""
870 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
871 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
872 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
874 return self.reject_message;
876 ################################################################################
879 # NB: this function can remove entries from the 'files' index [if
880 # the .orig.tar.gz is a duplicate of the one in the archive]; if
881 # you're iterating over 'files' and call this function as part of
882 # the loop, be sure to add a check to the top of the loop to
883 # ensure you haven't just tried to derefernece the deleted entry.
886 def check_dsc_against_db(self, file):
887 self.reject_message = "";
888 files = self.pkg.files;
889 dsc_files = self.pkg.dsc_files;
890 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
891 self.pkg.orig_tar_gz = None;
893 # Try and find all files mentioned in the .dsc. This has
894 # to work harder to cope with the multiple possible
895 # locations of an .orig.tar.gz.
896 for dsc_file in dsc_files.keys():
898 if files.has_key(dsc_file):
899 actual_md5 = files[dsc_file]["md5sum"];
900 actual_size = int(files[dsc_file]["size"]);
901 found = "%s in incoming" % (dsc_file)
902 # Check the file does not already exist in the archive
903 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
905 # Strip out anything that isn't '%s' or '/%s$'
907 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
910 # "[katie] has not broken them. [katie] has fixed a
911 # brokenness. Your crappy hack exploited a bug in
914 # "(Come on! I thought it was always obvious that
915 # one just doesn't release different files with
916 # the same name and version.)"
917 # -- ajk@ on d-devel@l.d.o
920 # Ignore exact matches for .orig.tar.gz
922 if dsc_file.endswith(".orig.tar.gz"):
924 if files.has_key(dsc_file) and \
925 int(files[dsc_file]["size"]) == int(i[0]) and \
926 files[dsc_file]["md5sum"] == i[1]:
927 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
929 self.pkg.orig_tar_gz = i[2] + i[3];
933 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
934 elif dsc_file.endswith(".orig.tar.gz"):
936 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
938 # Strip out anything that isn't '%s' or '/%s$'
940 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
944 # Unfortunately, we may get more than one match here if,
945 # for example, the package was in potato but had an -sa
946 # upload in woody. So we need to choose the right one.
948 x = ql[0]; # default to something sane in case we don't match any or have only one
952 old_file = i[0] + i[1];
953 old_file_fh = utils.open_file(old_file)
954 actual_md5 = apt_pkg.md5sum(old_file_fh);
956 actual_size = os.stat(old_file)[stat.ST_SIZE];
957 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
960 legacy_source_untouchable[i[3]] = "";
962 old_file = x[0] + x[1];
963 old_file_fh = utils.open_file(old_file)
964 actual_md5 = apt_pkg.md5sum(old_file_fh);
966 actual_size = os.stat(old_file)[stat.ST_SIZE];
969 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
970 # See install() in katie...
971 self.pkg.orig_tar_id = x[3];
972 self.pkg.orig_tar_gz = old_file;
973 if suite_type == "legacy" or suite_type == "legacy-mixed":
974 self.pkg.orig_tar_location = "legacy";
976 self.pkg.orig_tar_location = x[4];
978 # Not there? Check the queue directories...
980 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
981 # See process_it() in jennifer for explanation of this
982 if os.path.exists(in_unchecked):
983 return (self.reject_message, in_unchecked);
985 for dir in [ "Accepted", "New", "Byhand" ]:
986 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
987 if os.path.exists(in_otherdir):
988 in_otherdir_fh = utils.open_file(in_otherdir)
989 actual_md5 = apt_pkg.md5sum(in_otherdir_fh);
990 in_otherdir_fh.close()
991 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
993 self.pkg.orig_tar_gz = in_otherdir;
996 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
997 self.pkg.orig_tar_gz = -1;
1000 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
1002 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1003 self.reject("md5sum for %s doesn't match %s." % (found, file));
1004 if actual_size != int(dsc_files[dsc_file]["size"]):
1005 self.reject("size for %s doesn't match %s." % (found, file));
1007 return (self.reject_message, None);
1009 def do_query(self, q):
1010 sys.stderr.write("query: \"%s\" ... " % (q));
1011 before = time.time();
1012 r = self.projectB.query(q);
1013 time_diff = time.time()-before;
1014 sys.stderr.write("took %.3f seconds.\n" % (time_diff));