3 # Utility functions for katie
4 # Copyright (C) 2001, 2002, 2003, 2004 James Troup <james@nocrew.org>
5 # $Id: katie.py,v 1.44 2004-02-27 20:07:40 troup Exp $
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
21 ###############################################################################
23 import cPickle, errno, os, pg, re, stat, string, sys, tempfile, time;
24 import utils, db_access;
25 import apt_inst, apt_pkg;
29 ###############################################################################
31 re_isanum = re.compile (r"^\d+$");
32 re_default_answer = re.compile(r"\[(.*)\]");
33 re_fdnic = re.compile("\n\n");
34 re_bin_only_nmu_of_mu = re.compile("\.\d+\.\d+$");
35 re_bin_only_nmu_of_nmu = re.compile("\.\d+$");
37 ###############################################################################
39 # Convenience wrapper to carry around all the package information in
42 def __init__(self, **kwds):
43 self.__dict__.update(kwds);
45 def update(self, **kwds):
46 self.__dict__.update(kwds);
48 ###############################################################################
51 # Read in the group maintainer override file
52 def __init__ (self, Cnf):
53 self.group_maint = {};
55 if Cnf.get("Dinstall::GroupOverrideFilename"):
56 filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"];
57 file = utils.open_file(filename);
58 for line in file.readlines():
59 line = utils.re_comments.sub('', line).lower().strip();
61 self.group_maint[line] = 1;
64 def is_an_nmu (self, pkg):
66 changes = pkg.changes;
69 (dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"]).lower());
70 # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
71 if dsc_name == changes["maintainername"].lower() and \
72 (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
75 if dsc.has_key("uploaders"):
76 uploaders = dsc["uploaders"].lower().split(",");
79 (rfc822, name, email) = utils.fix_maintainer (i.strip());
80 uploadernames[name] = "";
81 if uploadernames.has_key(changes["changedbyname"].lower()):
84 # Some group maintained packages (e.g. Debian QA) are never NMU's
85 if self.group_maint.has_key(changes["maintaineremail"].lower()):
90 ###############################################################################
94 def __init__(self, Cnf):
96 # Read in the group-maint override file
97 self.nmu = nmu_p(Cnf);
98 self.accept_count = 0;
99 self.accept_bytes = 0L;
100 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
101 legacy_source_untouchable = {});
103 # Initialize the substitution template mapping global
104 Subst = self.Subst = {};
105 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"];
106 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"];
107 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"];
108 Subst["__KATIE_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
110 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]));
111 db_access.init(Cnf, self.projectB);
113 ###########################################################################
115 def init_vars (self):
116 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
117 exec "self.pkg.%s.clear();" % (i);
118 self.pkg.orig_tar_id = None;
119 self.pkg.orig_tar_location = "";
121 ###########################################################################
123 def update_vars (self):
124 dump_filename = self.pkg.changes_file[:-8]+".katie";
125 dump_file = utils.open_file(dump_filename);
126 p = cPickle.Unpickler(dump_file);
127 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
128 exec "self.pkg.%s.update(p.load());" % (i);
129 for i in [ "orig_tar_id", "orig_tar_location" ]:
130 exec "self.pkg.%s = p.load();" % (i);
133 ###########################################################################
135 # This could just dump the dictionaries as is, but I'd like to avoid
136 # this so there's some idea of what katie & lisa use from jennifer
138 def dump_vars(self, dest_dir):
139 for i in [ "changes", "dsc", "files", "dsc_files",
140 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
141 exec "%s = self.pkg.%s;" % (i,i);
142 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
143 dump_file = utils.open_file(dump_filename, 'w');
145 os.chmod(dump_filename, 0660);
147 if errno.errorcode[e.errno] == 'EPERM':
148 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]);
149 if perms & stat.S_IROTH:
150 utils.fubar("%s is world readable and chmod failed." % (dump_filename));
154 p = cPickle.Pickler(dump_file, 1);
155 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
158 for file in files.keys():
160 for i in [ "package", "version", "architecture", "type", "size",
161 "md5sum", "component", "location id", "source package",
162 "source version", "maintainer", "dbtype", "files id",
163 "new", "section", "priority", "othercomponents",
164 "pool name", "original component" ]:
165 if files[file].has_key(i):
166 d_files[file][i] = files[file][i];
168 # Mandatory changes fields
169 for i in [ "distribution", "source", "architecture", "version", "maintainer",
170 "urgency", "fingerprint", "changedby822", "changedbyname",
171 "maintainername", "maintaineremail", "closes" ]:
172 d_changes[i] = changes[i];
173 # Optional changes fields
174 # FIXME: changes should be mandatory
175 for i in [ "changed-by", "maintainer822", "filecontents", "format",
176 "changes", "lisa note" ]:
177 if changes.has_key(i):
178 d_changes[i] = changes[i];
180 for i in [ "source", "version", "maintainer", "fingerprint", "uploaders" ]:
184 for file in dsc_files.keys():
185 d_dsc_files[file] = {};
186 # Mandatory dsc_files fields
187 for i in [ "size", "md5sum" ]:
188 d_dsc_files[file][i] = dsc_files[file][i];
189 # Optional dsc_files fields
190 for i in [ "files id" ]:
191 if dsc_files[file].has_key(i):
192 d_dsc_files[file][i] = dsc_files[file][i];
194 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
195 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
199 ###########################################################################
201 # Set up the per-package template substitution mappings
203 def update_subst (self, reject_message = ""):
205 changes = self.pkg.changes;
206 # If jennifer crashed out in the right place, architecture may still be a string.
207 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
208 changes["architecture"] = { "Unknown" : "" };
209 # and maintainer822 may not exist.
210 if not changes.has_key("maintainer822"):
211 changes["maintainer822"] = self.Cnf["Dinstall::MyEmailAddress"];
213 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys());
214 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file);
215 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "");
217 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
218 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
219 Subst["__MAINTAINER_FROM__"] = changes["changedby822"];
220 Subst["__MAINTAINER_TO__"] = changes["changedby822"] + ", " + changes["maintainer822"];
221 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown");
223 Subst["__MAINTAINER_FROM__"] = changes["maintainer822"];
224 Subst["__MAINTAINER_TO__"] = changes["maintainer822"];
225 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown");
226 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
227 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
229 # Apply any global override of the Maintainer field
230 if self.Cnf.get("Dinstall::OverrideMaintainer"):
231 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"];
232 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"];
234 Subst["__REJECT_MESSAGE__"] = reject_message;
235 Subst["__SOURCE__"] = changes.get("source", "Unknown");
236 Subst["__VERSION__"] = changes.get("version", "Unknown");
238 ###########################################################################
240 def build_summaries(self):
241 changes = self.pkg.changes;
242 files = self.pkg.files;
244 byhand = summary = new = "";
246 # changes["distribution"] may not exist in corner cases
247 # (e.g. unreadable changes files)
248 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
249 changes["distribution"] = {};
251 file_keys = files.keys();
253 for file in file_keys:
254 if files[file].has_key("byhand"):
256 summary += file + " byhand\n"
257 elif files[file].has_key("new"):
259 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
260 if files[file].has_key("othercomponents"):
261 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
262 if files[file]["type"] == "deb":
263 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file)))["Description"] + '\n';
265 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
266 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
267 summary += file + "\n to " + destination + "\n"
269 short_summary = summary;
271 # This is for direport's benefit...
272 f = re_fdnic.sub("\n .\n", changes.get("changes",""));
275 summary += "Changes: " + f;
277 summary += self.announce(short_summary, 0)
279 return (summary, short_summary);
281 ###########################################################################
283 def close_bugs (self, summary, action):
284 changes = self.pkg.changes;
288 bugs = changes["closes"].keys();
294 if not self.nmu.is_an_nmu(self.pkg):
295 if changes["distribution"].has_key("experimental"):
296 # tag bugs as fixed-in-experimental for uploads to experimental
297 summary += "Setting bugs to severity fixed: ";
298 control_message = "";
300 summary += "%s " % (bug);
301 control_message += "tag %s + fixed-in-experimental\n" % (bug);
302 if action and control_message != "":
303 Subst["__CONTROL_MESSAGE__"] = control_message;
304 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-experimental-fixed");
305 utils.send_mail (mail_message);
307 self.Logger.log(["setting bugs to fixed"]+bugs);
311 summary += "Closing bugs: ";
313 summary += "%s " % (bug);
315 Subst["__BUG_NUMBER__"] = bug;
316 if changes["distribution"].has_key("stable"):
317 Subst["__STABLE_WARNING__"] = """
318 Note that this package is not part of the released stable Debian
319 distribution. It may have dependencies on other unreleased software,
320 or other instabilities. Please take care if you wish to install it.
321 The update will eventually make its way into the next released Debian
324 Subst["__STABLE_WARNING__"] = "";
325 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-close");
326 utils.send_mail (mail_message);
328 self.Logger.log(["closing bugs"]+bugs);
331 summary += "Setting bugs to severity fixed: ";
332 control_message = "";
334 summary += "%s " % (bug);
335 control_message += "tag %s + fixed\n" % (bug);
336 if action and control_message != "":
337 Subst["__CONTROL_MESSAGE__"] = control_message;
338 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.bug-nmu-fixed");
339 utils.send_mail (mail_message);
341 self.Logger.log(["setting bugs to fixed"]+bugs);
345 ###########################################################################
347 def announce (self, short_summary, action):
350 changes = self.pkg.changes;
352 # Only do announcements for source uploads with a recent dpkg-dev installed
353 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
358 Subst["__SHORT_SUMMARY__"] = short_summary;
360 for dist in changes["distribution"].keys():
361 list = Cnf.Find("Suite::%s::Announce" % (dist));
362 if list == "" or lists_done.has_key(list):
364 lists_done[list] = 1;
365 summary += "Announcing to %s\n" % (list);
368 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list;
369 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
370 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"]);
371 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.announce");
372 utils.send_mail (mail_message);
374 if Cnf.FindB("Dinstall::CloseBugs"):
375 summary = self.close_bugs(summary, action);
379 ###########################################################################
381 def accept (self, summary, short_summary):
384 files = self.pkg.files;
387 self.Logger.log(["Accepting changes",self.pkg.changes_file]);
389 self.dump_vars(Cnf["Dir::Queue::Accepted"]);
391 # Move all the files into the accepted directory
392 utils.move(self.pkg.changes_file, Cnf["Dir::Queue::Accepted"]);
393 file_keys = files.keys();
394 for file in file_keys:
395 utils.move(file, Cnf["Dir::Queue::Accepted"]);
396 self.accept_bytes += float(files[file]["size"])
397 self.accept_count += 1;
399 # Send accept mail, announce to lists, close bugs and check for
400 # override disparities
401 if not Cnf["Dinstall::Options::No-Mail"]:
402 Subst["__SUITE__"] = "";
403 Subst["__SUMMARY__"] = summary;
404 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/jennifer.accepted");
405 utils.send_mail(mail_message)
406 self.announce(short_summary, 1)
408 # Special support to enable clean auto-building of accepted packages
409 self.projectB.query("BEGIN WORK");
410 for suite in self.pkg.changes["distribution"].keys():
411 if suite not in Cnf.ValueList("Dinstall::AcceptedAutoBuildSuites"):
413 suite_id = db_access.get_suite_id(suite);
414 dest_dir = Cnf["Dir::AcceptedAutoBuild"];
415 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
416 dest_dir = os.path.join(dest_dir, suite);
417 for file in file_keys:
418 src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
419 dest = os.path.join(dest_dir, file);
420 if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
421 # Copy it since the original won't be readable by www-data
422 utils.copy(src, dest);
424 # Create a symlink to it
425 os.symlink(src, dest);
426 # Add it to the list of packages for later processing by apt-ftparchive
427 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
428 # If the .orig.tar.gz is in the pool, create a symlink to
429 # it (if one doesn't already exist)
430 if self.pkg.orig_tar_id:
431 # Determine the .orig.tar.gz file name
432 for dsc_file in self.pkg.dsc_files.keys():
433 if dsc_file.endswith(".orig.tar.gz"):
435 dest = os.path.join(dest_dir, filename);
436 # If it doesn't exist, create a symlink
437 if not os.path.exists(dest):
438 # Find the .orig.tar.gz in the pool
439 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id));
442 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id));
443 src = os.path.join(ql[0][0], ql[0][1]);
444 os.symlink(src, dest);
445 # Add it to the list of packages for later processing by apt-ftparchive
446 self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
447 # if it does, update things to ensure it's not removed prematurely
449 self.projectB.query("UPDATE accepted_autobuild SET in_accepted = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
451 self.projectB.query("COMMIT WORK");
453 ###########################################################################
455 def check_override (self):
457 changes = self.pkg.changes;
458 files = self.pkg.files;
461 # Abandon the check if:
462 # a) it's a non-sourceful upload
463 # b) override disparity checks have been disabled
464 # c) we're not sending mail
465 if not changes["architecture"].has_key("source") or \
466 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
467 Cnf["Dinstall::Options::No-Mail"]:
471 file_keys = files.keys();
473 for file in file_keys:
474 if not files[file].has_key("new") and files[file]["type"] == "deb":
475 section = files[file]["section"];
476 override_section = files[file]["override section"];
477 if section.lower() != override_section.lower() and section != "-":
478 # Ignore this; it's a common mistake and not worth whining about
479 if section.lower() == "non-us/main" and override_section.lower() == "non-us":
481 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section);
482 priority = files[file]["priority"];
483 override_priority = files[file]["override priority"];
484 if priority != override_priority and priority != "-":
485 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority);
490 Subst["__SUMMARY__"] = summary;
491 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/jennifer.override-disparity");
492 utils.send_mail(mail_message);
494 ###########################################################################
496 def force_reject (self, files):
497 """Forcefully move files from the current directory to the
498 reject directory. If any file already exists in the reject
499 directory it will be moved to the morgue to make way for
505 # Skip any files which don't exist or which we don't have permission to copy.
506 if os.access(file,os.R_OK) == 0:
508 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file);
510 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
512 # File exists? Let's try and move it to the morgue
513 if errno.errorcode[e.errno] == 'EEXIST':
514 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file);
516 morgue_file = utils.find_next_free(morgue_file);
517 except utils.tried_too_hard_exc:
518 # Something's either gone badly Pete Tong, or
519 # someone is trying to exploit us.
520 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file));
522 utils.move(dest_file, morgue_file, perms=0660);
524 os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
527 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file));
531 # If we got here, we own the destination file, so we can
532 # safely overwrite it.
533 utils.move(file, dest_file, 1, perms=0660);
535 ###########################################################################
537 def do_reject (self, manual = 0, reject_message = ""):
538 # If we weren't given a manual rejection message, spawn an
539 # editor so the user can add one in...
540 if manual and not reject_message:
541 temp_filename = tempfile.mktemp();
542 fd = os.open(temp_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700);
544 editor = os.environ.get("EDITOR","vi")
547 os.system("%s %s" % (editor, temp_filename))
548 file = utils.open_file(temp_filename);
549 reject_message = "".join(file.readlines());
551 print "Reject message:";
552 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1);
553 prompt = "[R]eject, Edit, Abandon, Quit ?"
555 while prompt.find(answer) == -1:
556 answer = utils.our_raw_input(prompt);
557 m = re_default_answer.search(prompt);
560 answer = answer[:1].upper();
561 os.unlink(temp_filename);
573 reason_filename = pkg.changes_file[:-8] + ".reason";
574 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename;
576 # Move all the files into the reject directory
577 reject_files = pkg.files.keys() + [pkg.changes_file];
578 self.force_reject(reject_files);
580 # If we fail here someone is probably trying to exploit the race
581 # so let's just raise an exception ...
582 if os.path.exists(reason_filename):
583 os.unlink(reason_filename);
584 reason_file = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644);
587 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"];
588 Subst["__MANUAL_REJECT_MESSAGE__"] = "";
589 Subst["__CC__"] = "X-Katie-Rejection: automatic (moo)";
590 os.write(reason_file, reject_message);
591 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
593 # Build up the rejection email
594 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"]);
596 Subst["__REJECTOR_ADDRESS__"] = user_email_address;
597 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message;
598 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"];
599 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/katie.rejected");
600 # Write the rejection email out as the <foo>.reason file
601 os.write(reason_file, reject_mail_message);
603 os.close(reason_file);
605 # Send the rejection mail if appropriate
606 if not Cnf["Dinstall::Options::No-Mail"]:
607 utils.send_mail(reject_mail_message);
609 self.Logger.log(["rejected", pkg.changes_file]);
612 ################################################################################
614 # Ensure that source exists somewhere in the archive for the binary
615 # upload being processed.
617 # (1) exact match => 1.0-3
618 # (2) Bin-only NMU of an MU => 1.0-3.0.1
619 # (3) Bin-only NMU of a sourceful-NMU => 1.0-3.1.1
621 def source_exists (self, package, source_version, suites = ["any"]):
625 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
628 # source must exist in suite X, or in some other suite that's
629 # mapped to X, recursively... silent-maps are counted too,
630 # unreleased-maps aren't.
631 maps = self.Cnf.ValueList("SuiteMappings")[:]
633 maps = [ m.split() for m in maps ]
634 maps = [ (x[1], x[2]) for x in maps
635 if x[0] == "map" or x[0] == "silent-map" ]
638 if x[1] in s and x[0] not in s:
641 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, string.join(["su.suite_name = '%s'" % a for a in s], " OR "));
642 q = self.projectB.query(que)
644 # Reduce the query results to a list of version numbers
645 ql = map(lambda x: x[0], q.getresult());
648 if source_version in ql:
652 orig_source_version = re_bin_only_nmu_of_mu.sub('', source_version)
653 if orig_source_version in ql:
657 orig_source_version = re_bin_only_nmu_of_nmu.sub('', source_version)
658 if orig_source_version in ql:
665 ################################################################################
667 def in_override_p (self, package, component, suite, binary_type, file):
668 files = self.pkg.files;
670 if binary_type == "": # must be source
675 # Override suite name; used for example with proposed-updates
676 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
677 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)];
679 # Avoid <undef> on unknown distributions
680 suite_id = db_access.get_suite_id(suite);
683 component_id = db_access.get_component_id(component);
684 type_id = db_access.get_override_type_id(type);
686 # FIXME: nasty non-US speficic hack
687 if component[:7].lower() == "non-us/":
688 component = component[7:];
690 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
691 % (package, suite_id, component_id, type_id));
692 result = q.getresult();
693 # If checking for a source package fall back on the binary override type
694 if type == "dsc" and not result:
695 deb_type_id = db_access.get_override_type_id("deb");
696 udeb_type_id = db_access.get_override_type_id("udeb");
697 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
698 % (package, suite_id, component_id, deb_type_id, udeb_type_id));
699 result = q.getresult();
701 # Remember the section and priority so we can check them later if appropriate
703 files[file]["override section"] = result[0][0];
704 files[file]["override priority"] = result[0][1];
708 ################################################################################
710 def reject (self, str, prefix="Rejected: "):
712 # Unlike other rejects we add new lines first to avoid trailing
713 # new lines when this message is passed back up to a caller.
714 if self.reject_message:
715 self.reject_message += "\n";
716 self.reject_message += prefix + str;
718 ################################################################################
720 def cross_suite_version_check(self, query_result, file, new_version):
721 """Ensure versions are newer than existing packages in target
722 suites and that cross-suite version checking rules as
723 set out in the conf file are satisfied."""
725 # Check versions for each target suite
726 for target_suite in self.pkg.changes["distribution"].keys():
727 must_be_newer_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
728 must_be_older_than = map(string.lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
729 # Enforce "must be newer than target suite" even if conffile omits it
730 if target_suite not in must_be_newer_than:
731 must_be_newer_than.append(target_suite);
732 for entry in query_result:
733 existent_version = entry[0];
735 if suite in must_be_newer_than and \
736 apt_pkg.VersionCompare(new_version, existent_version) != 1:
737 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
738 if suite in must_be_older_than and \
739 apt_pkg.VersionCompare(new_version, existent_version) != -1:
740 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
742 ################################################################################
744 def check_binary_against_db(self, file):
745 self.reject_message = "";
746 files = self.pkg.files;
748 # Ensure version is sane
749 q = self.projectB.query("""
750 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
752 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
753 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
754 % (files[file]["package"],
755 files[file]["architecture"]));
756 self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
758 # Check for any existing copies of the file
759 q = self.projectB.query("""
760 SELECT b.id FROM binaries b, architecture a
761 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
762 AND a.id = b.architecture"""
763 % (files[file]["package"],
764 files[file]["version"],
765 files[file]["architecture"]))
767 self.reject("%s: can not overwrite existing copy already in the archive." % (file));
769 return self.reject_message;
771 ################################################################################
773 def check_source_against_db(self, file):
774 self.reject_message = "";
777 # Ensure version is sane
778 q = self.projectB.query("""
779 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
780 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
781 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
783 return self.reject_message;
785 ################################################################################
788 # NB: this function can remove entries from the 'files' index [if
789 # the .orig.tar.gz is a duplicate of the one in the archive]; if
790 # you're iterating over 'files' and call this function as part of
791 # the loop, be sure to add a check to the top of the loop to
792 # ensure you haven't just tried to derefernece the deleted entry.
795 def check_dsc_against_db(self, file):
796 self.reject_message = "";
797 files = self.pkg.files;
798 dsc_files = self.pkg.dsc_files;
799 legacy_source_untouchable = self.pkg.legacy_source_untouchable;
802 # Try and find all files mentioned in the .dsc. This has
803 # to work harder to cope with the multiple possible
804 # locations of an .orig.tar.gz.
805 for dsc_file in dsc_files.keys():
807 if files.has_key(dsc_file):
808 actual_md5 = files[dsc_file]["md5sum"];
809 actual_size = int(files[dsc_file]["size"]);
810 found = "%s in incoming" % (dsc_file)
811 # Check the file does not already exist in the archive
812 q = self.projectB.query("SELECT size, md5sum, filename FROM files WHERE filename LIKE '%%%s%%'" % (dsc_file));
815 # Strip out anything that isn't '%s' or '/%s$'
817 if i[2] != dsc_file and i[2][-(len(dsc_file)+1):] != '/'+dsc_file:
818 self.Logger.log(["check_dsc_against_db",i[2],dsc_file]);
821 # "[katie] has not broken them. [katie] has fixed a
822 # brokenness. Your crappy hack exploited a bug in
825 # "(Come on! I thought it was always obvious that
826 # one just doesn't release different files with
827 # the same name and version.)"
828 # -- ajk@ on d-devel@l.d.o
831 # Ignore exact matches for .orig.tar.gz
833 if dsc_file.endswith(".orig.tar.gz"):
835 if files.has_key(dsc_file) and \
836 int(files[dsc_file]["size"]) == int(i[0]) and \
837 files[dsc_file]["md5sum"] == i[1]:
838 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
843 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file));
844 elif dsc_file.endswith(".orig.tar.gz"):
846 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file));
848 # Strip out anything that isn't '%s' or '/%s$'
850 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
851 self.Logger.log(["check_dsc_against_db",i[1],dsc_file]);
855 # Unfortunately, we make get more than one
856 # match here if, for example, the package was
857 # in potato but had a -sa upload in woody. So
858 # we need to choose the right one.
860 x = ql[0]; # default to something sane in case we don't match any or have only one
864 old_file = i[0] + i[1];
865 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
866 actual_size = os.stat(old_file)[stat.ST_SIZE];
867 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
870 legacy_source_untouchable[i[3]] = "";
872 old_file = x[0] + x[1];
873 actual_md5 = apt_pkg.md5sum(utils.open_file(old_file));
874 actual_size = os.stat(old_file)[stat.ST_SIZE];
877 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
878 # See install() in katie...
879 self.pkg.orig_tar_id = x[3];
880 if suite_type == "legacy" or suite_type == "legacy-mixed":
881 self.pkg.orig_tar_location = "legacy";
883 self.pkg.orig_tar_location = x[4];
885 # Not there? Check the queue directories...
887 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file);
888 # See process_it() in jennifer for explanation of this
889 if os.path.exists(in_unchecked):
890 return (self.reject_message, in_unchecked);
892 for dir in [ "Accepted", "New", "Byhand" ]:
893 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file);
894 if os.path.exists(in_otherdir):
895 actual_md5 = apt_pkg.md5sum(utils.open_file(in_otherdir));
896 actual_size = os.stat(in_otherdir)[stat.ST_SIZE];
900 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file));
903 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file));
905 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
906 self.reject("md5sum for %s doesn't match %s." % (found, file));
907 if actual_size != int(dsc_files[dsc_file]["size"]):
908 self.reject("size for %s doesn't match %s." % (found, file));
910 return (self.reject_message, orig_tar_gz);
912 def do_query(self, q):
913 sys.stderr.write("query: \"%s\" ... " % (q));
914 before = time.time();
915 r = self.projectB.query(q);
916 time_diff = time.time()-before;
917 sys.stderr.write("took %.3f seconds.\n" % (time_diff));