3 # Queue utility functions for dak
4 # Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 ###############################################################################
22 import cPickle, errno, os, pg, re, stat, sys, time
23 import apt_inst, apt_pkg
24 import utils, database
28 ###############################################################################
30 re_isanum = re.compile (r"^\d+$")
31 re_default_answer = re.compile(r"\[(.*)\]")
32 re_fdnic = re.compile(r"\n\n")
33 re_bin_only_nmu = re.compile(r"\+b\d+$")
35 ################################################################################
37 # Determine what parts in a .changes are NEW
39 def determine_new(changes, files, projectB, warn=1):
42 # Build up a list of potentially new things
43 for file in files.keys():
45 # Skip byhand elements
46 if f["type"] == "byhand":
49 priority = f["priority"]
50 section = f["section"]
52 component = f["component"]
56 if not new.has_key(pkg):
58 new[pkg]["priority"] = priority
59 new[pkg]["section"] = section
60 new[pkg]["type"] = type
61 new[pkg]["component"] = component
62 new[pkg]["files"] = []
64 old_type = new[pkg]["type"]
66 # source gets trumped by deb or udeb
68 new[pkg]["priority"] = priority
69 new[pkg]["section"] = section
70 new[pkg]["type"] = type
71 new[pkg]["component"] = component
72 new[pkg]["files"].append(file)
73 if f.has_key("othercomponents"):
74 new[pkg]["othercomponents"] = f["othercomponents"]
76 for suite in changes["suite"].keys():
77 suite_id = database.get_suite_id(suite)
78 for pkg in new.keys():
79 component_id = database.get_component_id(new[pkg]["component"])
80 type_id = database.get_override_type_id(new[pkg]["type"])
81 q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
84 for file in new[pkg]["files"]:
85 if files[file].has_key("new"):
86 del files[file]["new"]
90 if changes["suite"].has_key("stable"):
91 print "WARNING: overrides will be added for stable!"
92 if changes["suite"].has_key("oldstable"):
93 print "WARNING: overrides will be added for OLDstable!"
94 for pkg in new.keys():
95 if new[pkg].has_key("othercomponents"):
96 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
100 ################################################################################
104 if f.has_key("dbtype"):
106 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
109 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (type))
111 # Validate the override type
112 type_id = database.get_override_type_id(type)
114 utils.fubar("invalid type (%s) for new. Say wha?" % (type))
118 ################################################################################
120 # check if section/priority values are valid
122 def check_valid(new):
123 for pkg in new.keys():
124 section = new[pkg]["section"]
125 priority = new[pkg]["priority"]
126 type = new[pkg]["type"]
127 new[pkg]["section id"] = database.get_section_id(section)
128 new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
130 di = section.find("debian-installer") != -1
131 if (di and type != "udeb") or (not di and type == "udeb"):
132 new[pkg]["section id"] = -1
133 if (priority == "source" and type != "dsc") or \
134 (priority != "source" and type == "dsc"):
135 new[pkg]["priority id"] = -1
138 ###############################################################################
140 # Convenience wrapper to carry around all the package information in
143 def __init__(self, **kwds):
144 self.__dict__.update(kwds)
146 def update(self, **kwds):
147 self.__dict__.update(kwds)
149 ###############################################################################
153 def __init__(self, Cnf):
155 self.accept_count = 0
156 self.accept_bytes = 0L
157 self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
158 legacy_source_untouchable = {})
160 # Initialize the substitution template mapping global
161 Subst = self.Subst = {}
162 Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
163 Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
164 Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
165 Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
167 self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
168 database.init(Cnf, self.projectB)
170 ###########################################################################
172 def init_vars (self):
173 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
174 exec "self.pkg.%s.clear();" % (i)
175 self.pkg.orig_tar_id = None
176 self.pkg.orig_tar_location = ""
177 self.pkg.orig_tar_gz = None
179 ###########################################################################
181 def update_vars (self):
182 dump_filename = self.pkg.changes_file[:-8]+".dak"
183 dump_file = utils.open_file(dump_filename)
184 p = cPickle.Unpickler(dump_file)
185 for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
186 exec "self.pkg.%s.update(p.load());" % (i)
187 for i in [ "orig_tar_id", "orig_tar_location" ]:
188 exec "self.pkg.%s = p.load();" % (i)
191 ###########################################################################
193 # This could just dump the dictionaries as is, but I'd like to
194 # avoid this so there's some idea of what process-accepted &
195 # process-new use from process-unchecked
197 def dump_vars(self, dest_dir):
198 for i in [ "changes", "dsc", "files", "dsc_files",
199 "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
200 exec "%s = self.pkg.%s;" % (i,i)
201 dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
202 dump_file = utils.open_file(dump_filename, 'w')
204 os.chmod(dump_filename, 0660)
206 if errno.errorcode[e.errno] == 'EPERM':
207 perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
208 if perms & stat.S_IROTH:
209 utils.fubar("%s is world readable and chmod failed." % (dump_filename))
213 p = cPickle.Pickler(dump_file, 1)
214 for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
217 for file in files.keys():
219 for i in [ "package", "version", "architecture", "type", "size",
220 "md5sum", "component", "location id", "source package",
221 "source version", "maintainer", "dbtype", "files id",
222 "new", "section", "priority", "othercomponents",
223 "pool name", "original component" ]:
224 if files[file].has_key(i):
225 d_files[file][i] = files[file][i]
227 # Mandatory changes fields
228 for i in [ "distribution", "source", "architecture", "version",
229 "maintainer", "urgency", "fingerprint", "changedby822",
230 "changedby2047", "changedbyname", "maintainer822",
231 "maintainer2047", "maintainername", "maintaineremail",
232 "closes", "changes" ]:
233 d_changes[i] = changes[i]
234 # Optional changes fields
235 for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
236 if changes.has_key(i):
237 d_changes[i] = changes[i]
239 for i in [ "source", "version", "maintainer", "fingerprint",
240 "uploaders", "bts changelog", "dm-upload-allowed" ]:
244 for file in dsc_files.keys():
245 d_dsc_files[file] = {}
246 # Mandatory dsc_files fields
247 for i in [ "size", "md5sum" ]:
248 d_dsc_files[file][i] = dsc_files[file][i]
249 # Optional dsc_files fields
250 for i in [ "files id" ]:
251 if dsc_files[file].has_key(i):
252 d_dsc_files[file][i] = dsc_files[file][i]
254 for i in [ d_changes, d_dsc, d_files, d_dsc_files,
255 legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
259 ###########################################################################
261 # Set up the per-package template substitution mappings
263 def update_subst (self, reject_message = ""):
265 changes = self.pkg.changes
266 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
267 if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
268 changes["architecture"] = { "Unknown" : "" }
269 # and maintainer2047 may not exist.
270 if not changes.has_key("maintainer2047"):
271 changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
273 Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
274 Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
275 Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
277 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
278 if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
279 Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
280 Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
281 changes["maintainer2047"])
282 if "sponsoremail" in changes:
283 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
284 Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
286 Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
287 Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
288 if "sponsoremail" in changes:
289 Subst["__MAINTAINER_TO__"] += ", %s"%changes["sponsoremail"]
290 Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
291 if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
292 Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
294 # Apply any global override of the Maintainer field
295 if self.Cnf.get("Dinstall::OverrideMaintainer"):
296 Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
297 Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
299 Subst["__REJECT_MESSAGE__"] = reject_message
300 Subst["__SOURCE__"] = changes.get("source", "Unknown")
301 Subst["__VERSION__"] = changes.get("version", "Unknown")
303 ###########################################################################
305 def build_summaries(self):
306 changes = self.pkg.changes
307 files = self.pkg.files
309 byhand = summary = new = ""
311 # changes["distribution"] may not exist in corner cases
312 # (e.g. unreadable changes files)
313 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
314 changes["distribution"] = {}
316 override_summary ="";
317 file_keys = files.keys()
319 for file in file_keys:
320 if files[file].has_key("byhand"):
322 summary += file + " byhand\n"
323 elif files[file].has_key("new"):
325 summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
326 if files[file].has_key("othercomponents"):
327 summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
328 if files[file]["type"] == "deb":
329 deb_fh = utils.open_file(file)
330 summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
333 files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
334 destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
335 summary += file + "\n to " + destination + "\n"
336 if not files[file].has_key("type"):
337 files[file]["type"] = "unknown"
338 if files[file]["type"] in ["deb", "udeb", "dsc"]:
339 # (queue/unchecked), there we have override entries already, use them
340 # (process-new), there we dont have override entries, use the newly generated ones.
341 override_prio = files[file].get("override priority", files[file]["priority"])
342 override_sect = files[file].get("override section", files[file]["section"])
343 override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
345 short_summary = summary
347 # This is for direport's benefit...
348 f = re_fdnic.sub("\n .\n", changes.get("changes",""))
351 summary += "Changes: " + f
353 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
355 summary += self.announce(short_summary, 0)
357 return (summary, short_summary)
359 ###########################################################################
361 def close_bugs (self, summary, action):
362 changes = self.pkg.changes
366 bugs = changes["closes"].keys()
372 summary += "Closing bugs: "
374 summary += "%s " % (bug)
376 Subst["__BUG_NUMBER__"] = bug
377 if changes["distribution"].has_key("stable"):
378 Subst["__STABLE_WARNING__"] = """
379 Note that this package is not part of the released stable Debian
380 distribution. It may have dependencies on other unreleased software,
381 or other instabilities. Please take care if you wish to install it.
382 The update will eventually make its way into the next released Debian
385 Subst["__STABLE_WARNING__"] = ""
386 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
387 utils.send_mail (mail_message)
389 self.Logger.log(["closing bugs"]+bugs)
394 ###########################################################################
396 def announce (self, short_summary, action):
399 changes = self.pkg.changes
401 # Only do announcements for source uploads with a recent dpkg-dev installed
402 if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
407 Subst["__SHORT_SUMMARY__"] = short_summary
409 for dist in changes["distribution"].keys():
410 list = Cnf.Find("Suite::%s::Announce" % (dist))
411 if list == "" or lists_done.has_key(list):
414 summary += "Announcing to %s\n" % (list)
417 Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
418 if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
419 Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
420 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
421 utils.send_mail (mail_message)
423 if Cnf.FindB("Dinstall::CloseBugs"):
424 summary = self.close_bugs(summary, action)
428 ###########################################################################
430 def accept (self, summary, short_summary):
433 files = self.pkg.files
434 changes = self.pkg.changes
435 changes_file = self.pkg.changes_file
439 self.Logger.log(["Accepting changes",changes_file])
441 self.dump_vars(Cnf["Dir::Queue::Accepted"])
443 # Move all the files into the accepted directory
444 utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
445 file_keys = files.keys()
446 for file in file_keys:
447 utils.move(file, Cnf["Dir::Queue::Accepted"])
448 self.accept_bytes += float(files[file]["size"])
449 self.accept_count += 1
451 # Send accept mail, announce to lists, close bugs and check for
452 # override disparities
453 if not Cnf["Dinstall::Options::No-Mail"]:
454 Subst["__SUITE__"] = ""
455 Subst["__SUMMARY__"] = summary
456 mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
457 utils.send_mail(mail_message)
458 self.announce(short_summary, 1)
461 ## Helper stuff for DebBugs Version Tracking
462 if Cnf.Find("Dir::Queue::BTSVersionTrack"):
463 # ??? once queue/* is cleared on *.d.o and/or reprocessed
464 # the conditionalization on dsc["bts changelog"] should be
467 # Write out the version history from the changelog
468 if changes["architecture"].has_key("source") and \
469 dsc.has_key("bts changelog"):
471 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
472 dotprefix=1, perms=0644)
473 version_history = utils.open_file(temp_filename, 'w')
474 version_history.write(dsc["bts changelog"])
475 version_history.close()
476 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
477 changes_file[:-8]+".versions")
478 os.rename(temp_filename, filename)
480 # Write out the binary -> source mapping.
481 temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
482 dotprefix=1, perms=0644)
483 debinfo = utils.open_file(temp_filename, 'w')
484 for file in file_keys:
486 if f["type"] == "deb":
487 line = " ".join([f["package"], f["version"],
488 f["architecture"], f["source package"],
489 f["source version"]])
490 debinfo.write(line+"\n")
492 filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
493 changes_file[:-8]+".debinfo")
494 os.rename(temp_filename, filename)
496 self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
498 ###########################################################################
500 def queue_build (self, queue, path):
503 files = self.pkg.files
504 changes = self.pkg.changes
505 changes_file = self.pkg.changes_file
507 file_keys = files.keys()
509 ## Special support to enable clean auto-building of queued packages
510 queue_id = database.get_or_set_queue_id(queue)
512 self.projectB.query("BEGIN WORK")
513 for suite in changes["distribution"].keys():
514 if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
516 suite_id = database.get_suite_id(suite)
517 dest_dir = Cnf["Dir::QueueBuild"]
518 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
519 dest_dir = os.path.join(dest_dir, suite)
520 for file in file_keys:
521 src = os.path.join(path, file)
522 dest = os.path.join(dest_dir, file)
523 if Cnf.FindB("Dinstall::SecurityQueueBuild"):
524 # Copy it since the original won't be readable by www-data
525 utils.copy(src, dest)
527 # Create a symlink to it
528 os.symlink(src, dest)
529 # Add it to the list of packages for later processing by apt-ftparchive
530 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
531 # If the .orig.tar.gz is in the pool, create a symlink to
532 # it (if one doesn't already exist)
533 if self.pkg.orig_tar_id:
534 # Determine the .orig.tar.gz file name
535 for dsc_file in self.pkg.dsc_files.keys():
536 if dsc_file.endswith(".orig.tar.gz"):
538 dest = os.path.join(dest_dir, filename)
539 # If it doesn't exist, create a symlink
540 if not os.path.exists(dest):
541 # Find the .orig.tar.gz in the pool
542 q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
545 utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
546 src = os.path.join(ql[0][0], ql[0][1])
547 os.symlink(src, dest)
548 # Add it to the list of packages for later processing by apt-ftparchive
549 self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
550 # if it does, update things to ensure it's not removed prematurely
552 self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
554 self.projectB.query("COMMIT WORK")
556 ###########################################################################
558 def check_override (self):
560 changes = self.pkg.changes
561 files = self.pkg.files
564 # Abandon the check if:
565 # a) it's a non-sourceful upload
566 # b) override disparity checks have been disabled
567 # c) we're not sending mail
568 if not changes["architecture"].has_key("source") or \
569 not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
570 Cnf["Dinstall::Options::No-Mail"]:
574 file_keys = files.keys()
576 for file in file_keys:
577 if not files[file].has_key("new") and files[file]["type"] == "deb":
578 section = files[file]["section"]
579 override_section = files[file]["override section"]
580 if section.lower() != override_section.lower() and section != "-":
581 summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
582 priority = files[file]["priority"]
583 override_priority = files[file]["override priority"]
584 if priority != override_priority and priority != "-":
585 summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
590 Subst["__SUMMARY__"] = summary
591 mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
592 utils.send_mail(mail_message)
594 ###########################################################################
596 def force_reject (self, files):
597 """Forcefully move files from the current directory to the
598 reject directory. If any file already exists in the reject
599 directory it will be moved to the morgue to make way for
605 # Skip any files which don't exist or which we don't have permission to copy.
606 if os.access(file,os.R_OK) == 0:
608 dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
610 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
612 # File exists? Let's try and move it to the morgue
613 if errno.errorcode[e.errno] == 'EEXIST':
614 morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
616 morgue_file = utils.find_next_free(morgue_file)
617 except utils.tried_too_hard_exc:
618 # Something's either gone badly Pete Tong, or
619 # someone is trying to exploit us.
620 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
622 utils.move(dest_file, morgue_file, perms=0660)
624 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
627 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
631 # If we got here, we own the destination file, so we can
632 # safely overwrite it.
633 utils.move(file, dest_file, 1, perms=0660)
636 ###########################################################################
638 def do_reject (self, manual = 0, reject_message = ""):
639 # If we weren't given a manual rejection message, spawn an
640 # editor so the user can add one in...
641 if manual and not reject_message:
642 temp_filename = utils.temp_filename()
643 editor = os.environ.get("EDITOR","vi")
646 os.system("%s %s" % (editor, temp_filename))
647 temp_fh = utils.open_file(temp_filename)
648 reject_message = "".join(temp_fh.readlines())
650 print "Reject message:"
651 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
652 prompt = "[R]eject, Edit, Abandon, Quit ?"
654 while prompt.find(answer) == -1:
655 answer = utils.our_raw_input(prompt)
656 m = re_default_answer.search(prompt)
659 answer = answer[:1].upper()
660 os.unlink(temp_filename)
672 reason_filename = pkg.changes_file[:-8] + ".reason"
673 reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
675 # Move all the files into the reject directory
676 reject_files = pkg.files.keys() + [pkg.changes_file]
677 self.force_reject(reject_files)
679 # If we fail here someone is probably trying to exploit the race
680 # so let's just raise an exception ...
681 if os.path.exists(reason_filename):
682 os.unlink(reason_filename)
683 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
686 Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
687 Subst["__MANUAL_REJECT_MESSAGE__"] = ""
688 Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
689 os.write(reason_fd, reject_message)
690 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
692 # Build up the rejection email
693 user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
695 Subst["__REJECTOR_ADDRESS__"] = user_email_address
696 Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
697 Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
698 reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
699 # Write the rejection email out as the <foo>.reason file
700 os.write(reason_fd, reject_mail_message)
704 # Send the rejection mail if appropriate
705 if not Cnf["Dinstall::Options::No-Mail"]:
706 utils.send_mail(reject_mail_message)
708 self.Logger.log(["rejected", pkg.changes_file])
711 ################################################################################
713 # Ensure that source exists somewhere in the archive for the binary
714 # upload being processed.
716 # (1) exact match => 1.0-3
717 # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
719 def source_exists (self, package, source_version, suites = ["any"]):
723 que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
726 # source must exist in suite X, or in some other suite that's
727 # mapped to X, recursively... silent-maps are counted too,
728 # unreleased-maps aren't.
729 maps = self.Cnf.ValueList("SuiteMappings")[:]
731 maps = [ m.split() for m in maps ]
732 maps = [ (x[1], x[2]) for x in maps
733 if x[0] == "map" or x[0] == "silent-map" ]
736 if x[1] in s and x[0] not in s:
739 que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
740 q = self.projectB.query(que)
742 # Reduce the query results to a list of version numbers
743 ql = [ i[0] for i in q.getresult() ]
746 if source_version in ql:
750 orig_source_version = re_bin_only_nmu.sub('', source_version)
751 if orig_source_version in ql:
759 ################################################################################
761 def in_override_p (self, package, component, suite, binary_type, file):
762 files = self.pkg.files
764 if binary_type == "": # must be source
769 # Override suite name; used for example with proposed-updates
770 if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
771 suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
773 # Avoid <undef> on unknown distributions
774 suite_id = database.get_suite_id(suite)
777 component_id = database.get_component_id(component)
778 type_id = database.get_override_type_id(type)
780 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
781 % (package, suite_id, component_id, type_id))
782 result = q.getresult()
783 # If checking for a source package fall back on the binary override type
784 if type == "dsc" and not result:
785 deb_type_id = database.get_override_type_id("deb")
786 udeb_type_id = database.get_override_type_id("udeb")
787 q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
788 % (package, suite_id, component_id, deb_type_id, udeb_type_id))
789 result = q.getresult()
791 # Remember the section and priority so we can check them later if appropriate
793 files[file]["override section"] = result[0][0]
794 files[file]["override priority"] = result[0][1]
798 ################################################################################
800 def reject (self, str, prefix="Rejected: "):
802 # Unlike other rejects we add new lines first to avoid trailing
803 # new lines when this message is passed back up to a caller.
804 if self.reject_message:
805 self.reject_message += "\n"
806 self.reject_message += prefix + str
808 ################################################################################
810 def get_anyversion(self, query_result, suite):
812 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
813 for (v, s) in query_result:
814 if s in [ x.lower() for x in anysuite ]:
815 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
819 ################################################################################
821 def cross_suite_version_check(self, query_result, file, new_version):
822 """Ensure versions are newer than existing packages in target
823 suites and that cross-suite version checking rules as
824 set out in the conf file are satisfied."""
826 # Check versions for each target suite
827 for target_suite in self.pkg.changes["distribution"].keys():
828 must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
829 must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
830 # Enforce "must be newer than target suite" even if conffile omits it
831 if target_suite not in must_be_newer_than:
832 must_be_newer_than.append(target_suite)
833 for entry in query_result:
834 existent_version = entry[0]
836 if suite in must_be_newer_than and \
837 apt_pkg.VersionCompare(new_version, existent_version) < 1:
838 self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
839 if suite in must_be_older_than and \
840 apt_pkg.VersionCompare(new_version, existent_version) > -1:
841 ch = self.pkg.changes
843 if ch.get('distribution-version', {}).has_key(suite):
844 # we really use the other suite, ignoring the conflicting one ...
845 addsuite = ch["distribution-version"][suite]
847 add_version = self.get_anyversion(query_result, addsuite)
848 target_version = self.get_anyversion(query_result, target_suite)
851 # not add_version can only happen if we map to a suite
852 # that doesn't enhance the suite we're propup'ing from.
853 # so "propup-ver x a b c; map a d" is a problem only if
854 # d doesn't enhance a.
856 # i think we could always propagate in this case, rather
857 # than complaining. either way, this isn't a REJECT issue
859 # And - we really should complain to the dorks who configured dak
860 self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
861 self.pkg.changes.setdefault("propdistribution", {})
862 self.pkg.changes["propdistribution"][addsuite] = 1
864 elif not target_version:
865 # not targets_version is true when the package is NEW
866 # we could just stick with the "...old version..." REJECT
868 self.reject("Won't propogate NEW packages.")
869 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
870 # propogation would be redundant. no need to reject though.
871 self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
873 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
874 apt_pkg.VersionCompare(add_version, target_version) >= 0:
876 self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
877 self.pkg.changes.setdefault("propdistribution", {})
878 self.pkg.changes["propdistribution"][addsuite] = 1
882 self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
884 ################################################################################
886 def check_binary_against_db(self, file):
887 self.reject_message = ""
888 files = self.pkg.files
890 # Ensure version is sane
891 q = self.projectB.query("""
892 SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
894 WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
895 AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
896 % (files[file]["package"],
897 files[file]["architecture"]))
898 self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
900 # Check for any existing copies of the file
901 q = self.projectB.query("""
902 SELECT b.id FROM binaries b, architecture a
903 WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
904 AND a.id = b.architecture"""
905 % (files[file]["package"],
906 files[file]["version"],
907 files[file]["architecture"]))
909 self.reject("%s: can not overwrite existing copy already in the archive." % (file))
911 return self.reject_message
913 ################################################################################
915 def check_source_against_db(self, file):
916 self.reject_message = ""
919 # Ensure version is sane
920 q = self.projectB.query("""
921 SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
922 WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
923 self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
925 return self.reject_message
927 ################################################################################
930 # NB: this function can remove entries from the 'files' index [if
931 # the .orig.tar.gz is a duplicate of the one in the archive]; if
932 # you're iterating over 'files' and call this function as part of
933 # the loop, be sure to add a check to the top of the loop to
934 # ensure you haven't just tried to dereference the deleted entry.
937 def check_dsc_against_db(self, file):
938 self.reject_message = ""
939 files = self.pkg.files
940 dsc_files = self.pkg.dsc_files
941 legacy_source_untouchable = self.pkg.legacy_source_untouchable
942 self.pkg.orig_tar_gz = None
944 # Try and find all files mentioned in the .dsc. This has
945 # to work harder to cope with the multiple possible
946 # locations of an .orig.tar.gz.
947 # The ordering on the select is needed to pick the newest orig
948 # when it exists in multiple places.
949 for dsc_file in dsc_files.keys():
951 if files.has_key(dsc_file):
952 actual_md5 = files[dsc_file]["md5sum"]
953 actual_size = int(files[dsc_file]["size"])
954 found = "%s in incoming" % (dsc_file)
955 # Check the file does not already exist in the archive
956 q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
958 # Strip out anything that isn't '%s' or '/%s$'
960 if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
963 # "[dak] has not broken them. [dak] has fixed a
964 # brokenness. Your crappy hack exploited a bug in
967 # "(Come on! I thought it was always obvious that
968 # one just doesn't release different files with
969 # the same name and version.)"
970 # -- ajk@ on d-devel@l.d.o
973 # Ignore exact matches for .orig.tar.gz
975 if dsc_file.endswith(".orig.tar.gz"):
977 if files.has_key(dsc_file) and \
978 int(files[dsc_file]["size"]) == int(i[0]) and \
979 files[dsc_file]["md5sum"] == i[1]:
980 self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
982 self.pkg.orig_tar_gz = i[2] + i[3]
986 self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
987 elif dsc_file.endswith(".orig.tar.gz"):
989 q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
991 # Strip out anything that isn't '%s' or '/%s$'
993 if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
997 # Unfortunately, we may get more than one match here if,
998 # for example, the package was in potato but had an -sa
999 # upload in woody. So we need to choose the right one.
1001 x = ql[0]; # default to something sane in case we don't match any or have only one
1005 old_file = i[0] + i[1]
1006 old_file_fh = utils.open_file(old_file)
1007 actual_md5 = apt_pkg.md5sum(old_file_fh)
1009 actual_size = os.stat(old_file)[stat.ST_SIZE]
1010 if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
1013 legacy_source_untouchable[i[3]] = ""
1015 old_file = x[0] + x[1]
1016 old_file_fh = utils.open_file(old_file)
1017 actual_md5 = apt_pkg.md5sum(old_file_fh)
1019 actual_size = os.stat(old_file)[stat.ST_SIZE]
1022 dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
1023 # See install() in process-accepted...
1024 self.pkg.orig_tar_id = x[3]
1025 self.pkg.orig_tar_gz = old_file
1026 if suite_type == "legacy" or suite_type == "legacy-mixed":
1027 self.pkg.orig_tar_location = "legacy"
1029 self.pkg.orig_tar_location = x[4]
1031 # Not there? Check the queue directories...
1033 in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
1034 # See process_it() in 'dak process-unchecked' for explanation of this
1035 # in_unchecked check dropped by ajt 2007-08-28, how did that
1037 if os.path.exists(in_unchecked) and False:
1038 return (self.reject_message, in_unchecked)
1040 for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
1041 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
1042 if os.path.exists(in_otherdir):
1043 in_otherdir_fh = utils.open_file(in_otherdir)
1044 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
1045 in_otherdir_fh.close()
1046 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
1048 self.pkg.orig_tar_gz = in_otherdir
1051 self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
1052 self.pkg.orig_tar_gz = -1
1055 self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
1057 if actual_md5 != dsc_files[dsc_file]["md5sum"]:
1058 self.reject("md5sum for %s doesn't match %s." % (found, file))
1059 if actual_size != int(dsc_files[dsc_file]["size"]):
1060 self.reject("size for %s doesn't match %s." % (found, file))
1062 return (self.reject_message, None)
1064 def do_query(self, q):
1065 sys.stderr.write("query: \"%s\" ... " % (q))
1066 before = time.time()
1067 r = self.projectB.query(q)
1068 time_diff = time.time()-before
1069 sys.stderr.write("took %.3f seconds.\n" % (time_diff))