5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 # suppress some deprecation warnings in squeeze related to apt_pkg
62 warnings.filterwarnings('ignore', \
63 "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
65 warnings.filterwarnings('ignore', \
66 "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
69 ###############################################################################
71 def get_type(f, session):
73 Get the file type of C{f}
76 @param f: file entry from Changes object
78 @type session: SQLA Session
79 @param session: SQL Alchemy session object
86 if f.has_key("dbtype"):
87 file_type = f["dbtype"]
88 elif re_source_ext.match(f["type"]):
92 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
94 # Validate the override type
95 type_id = get_override_type(file_type, session)
97 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
101 ################################################################################
103 # Determine what parts in a .changes are NEW
105 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = {}):
107 Determine what parts in a C{changes} file are NEW.
110 @param filename: changes filename
112 @type changes: Upload.Pkg.changes dict
113 @param changes: Changes dictionary
115 @type files: Upload.Pkg.files dict
116 @param files: Files dictionary
119 @param warn: Warn if overrides are added for (old)stable
121 @type dsc: Upload.Pkg.dsc dict
122 @param dsc: (optional); Dsc dictionary
125 @param new: new packages as returned by a previous call to this function, but override information may have changed
128 @return: dictionary of NEW components.
131 # TODO: This should all use the database instead of parsing the changes
135 dbchg = get_dbchange(filename, session)
137 print "Warning: cannot find changes file in database; won't check byhand"
139 # Try to get the Package-Set field from an included .dsc file (if possible).
141 for package, entry in build_package_set(dsc, session).items():
142 if not new.has_key(package):
145 # Build up a list of potentially new things
146 for name, f in files.items():
147 # Keep a record of byhand elements
148 if f["section"] == "byhand":
153 priority = f["priority"]
154 section = f["section"]
155 file_type = get_type(f, session)
156 component = f["component"]
158 if file_type == "dsc":
161 if not new.has_key(pkg):
163 new[pkg]["priority"] = priority
164 new[pkg]["section"] = section
165 new[pkg]["type"] = file_type
166 new[pkg]["component"] = component
167 new[pkg]["files"] = []
169 old_type = new[pkg]["type"]
170 if old_type != file_type:
171 # source gets trumped by deb or udeb
172 if old_type == "dsc":
173 new[pkg]["priority"] = priority
174 new[pkg]["section"] = section
175 new[pkg]["type"] = file_type
176 new[pkg]["component"] = component
178 new[pkg]["files"].append(name)
180 if f.has_key("othercomponents"):
181 new[pkg]["othercomponents"] = f["othercomponents"]
183 # Fix up the list of target suites
185 for suite in changes["suite"].keys():
186 oldsuite = get_suite(suite, session)
188 print "WARNING: Invalid suite %s found" % suite
191 if oldsuite.overridesuite:
192 newsuite = get_suite(oldsuite.overridesuite, session)
195 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
196 oldsuite.overridesuite, suite)
197 del changes["suite"][suite]
198 changes["suite"][oldsuite.overridesuite] = 1
200 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
201 oldsuite.overridesuite, suite)
203 # Check for unprocessed byhand files
204 if dbchg is not None:
205 for b in byhand.keys():
206 # Find the file entry in the database
208 for f in dbchg.files:
211 # If it's processed, we can ignore it
217 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
219 # Check for new stuff
220 for suite in changes["suite"].keys():
221 for pkg in new.keys():
222 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
224 for file_entry in new[pkg]["files"]:
225 if files[file_entry].has_key("new"):
226 del files[file_entry]["new"]
230 for s in ['stable', 'oldstable']:
231 if changes["suite"].has_key(s):
232 print "WARNING: overrides will be added for %s!" % s
233 for pkg in new.keys():
234 if new[pkg].has_key("othercomponents"):
235 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
239 ################################################################################
241 def check_valid(new, session = None):
243 Check if section and priority for NEW packages exist in database.
244 Additionally does sanity checks:
245 - debian-installer packages have to be udeb (or source)
246 - non debian-installer packages can not be udeb
247 - source priority can only be assigned to dsc file types
250 @param new: Dict of new packages with their section, priority and type.
253 for pkg in new.keys():
254 section_name = new[pkg]["section"]
255 priority_name = new[pkg]["priority"]
256 file_type = new[pkg]["type"]
258 section = get_section(section_name, session)
260 new[pkg]["section id"] = -1
262 new[pkg]["section id"] = section.section_id
264 priority = get_priority(priority_name, session)
266 new[pkg]["priority id"] = -1
268 new[pkg]["priority id"] = priority.priority_id
271 di = section_name.find("debian-installer") != -1
273 # If d-i, we must be udeb and vice-versa
274 if (di and file_type not in ("udeb", "dsc")) or \
275 (not di and file_type == "udeb"):
276 new[pkg]["section id"] = -1
278 # If dsc we need to be source and vice-versa
279 if (priority == "source" and file_type != "dsc") or \
280 (priority != "source" and file_type == "dsc"):
281 new[pkg]["priority id"] = -1
283 ###############################################################################
285 # Used by Upload.check_timestamps
286 class TarTime(object):
287 def __init__(self, future_cutoff, past_cutoff):
289 self.future_cutoff = future_cutoff
290 self.past_cutoff = past_cutoff
293 self.future_files = {}
294 self.ancient_files = {}
296 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
297 if MTime > self.future_cutoff:
298 self.future_files[Name] = MTime
299 if MTime < self.past_cutoff:
300 self.ancient_files[Name] = MTime
302 ###############################################################################
304 def prod_maintainer(notes, upload):
307 # Here we prepare an editor and get them ready to prod...
308 (fd, temp_filename) = utils.temp_filename()
309 temp_file = os.fdopen(fd, 'w')
311 temp_file.write(note.comment)
313 editor = os.environ.get("EDITOR","vi")
316 os.system("%s %s" % (editor, temp_filename))
317 temp_fh = utils.open_file(temp_filename)
318 prod_message = "".join(temp_fh.readlines())
320 print "Prod message:"
321 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
322 prompt = "[P]rod, Edit, Abandon, Quit ?"
324 while prompt.find(answer) == -1:
325 answer = utils.our_raw_input(prompt)
326 m = re_default_answer.search(prompt)
329 answer = answer[:1].upper()
330 os.unlink(temp_filename)
336 # Otherwise, do the proding...
337 user_email_address = utils.whoami() + " <%s>" % (
338 cnf["Dinstall::MyAdminAddress"])
342 Subst["__FROM_ADDRESS__"] = user_email_address
343 Subst["__PROD_MESSAGE__"] = prod_message
344 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
346 prod_mail_message = utils.TemplateSubst(
347 Subst,cnf["Dir::Templates"]+"/process-new.prod")
350 utils.send_mail(prod_mail_message)
352 print "Sent prodding message"
354 ################################################################################
356 def edit_note(note, upload, session, trainee=False):
357 # Write the current data to a temporary file
358 (fd, temp_filename) = utils.temp_filename()
359 editor = os.environ.get("EDITOR","vi")
362 os.system("%s %s" % (editor, temp_filename))
363 temp_file = utils.open_file(temp_filename)
364 newnote = temp_file.read().rstrip()
367 print utils.prefix_multi_line_string(newnote," ")
368 prompt = "[D]one, Edit, Abandon, Quit ?"
370 while prompt.find(answer) == -1:
371 answer = utils.our_raw_input(prompt)
372 m = re_default_answer.search(prompt)
375 answer = answer[:1].upper()
376 os.unlink(temp_filename)
383 comment = NewComment()
384 comment.package = upload.pkg.changes["source"]
385 comment.version = upload.pkg.changes["version"]
386 comment.comment = newnote
387 comment.author = utils.whoami()
388 comment.trainee = trainee
392 ###############################################################################
394 # suite names DMs can upload to
395 dm_suites = ['unstable', 'experimental']
397 def get_newest_source(source, session):
398 'returns the newest DBSource object in dm_suites'
399 ## the most recent version of the package uploaded to unstable or
400 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
401 ## section of its control file
402 q = session.query(DBSource).filter_by(source = source). \
403 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
404 order_by(desc('source.version'))
407 def get_suite_version_by_source(source, session):
408 'returns a list of tuples (suite_name, version) for source package'
409 q = session.query(Suite.suite_name, DBSource.version). \
410 join(Suite.sources).filter_by(source = source)
413 def get_source_by_package_and_suite(package, suite_name, session):
415 returns a DBSource query filtered by DBBinary.package and this package's
418 return session.query(DBSource). \
419 join(DBSource.binaries).filter_by(package = package). \
420 join(DBBinary.suites).filter_by(suite_name = suite_name)
422 def get_suite_version_by_package(package, arch_string, session):
424 returns a list of tuples (suite_name, version) for binary package and
427 return session.query(Suite.suite_name, DBBinary.version). \
428 join(Suite.binaries).filter_by(package = package). \
429 join(DBBinary.architecture). \
430 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
432 class Upload(object):
434 Everything that has to do with an upload processed.
442 ###########################################################################
445 """ Reset a number of internal variables."""
447 # Initialize the substitution template map
450 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
451 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
452 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
453 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
459 self.later_check_files = []
463 def package_info(self):
465 Format various messages from this Upload to send to the maintainer.
469 ('Reject Reasons', self.rejects),
470 ('Warnings', self.warnings),
471 ('Notes', self.notes),
475 for title, messages in msgs:
477 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
482 ###########################################################################
483 def update_subst(self):
484 """ Set up the per-package template substitution mappings """
488 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
489 if not self.pkg.changes.has_key("architecture") or not \
490 isinstance(self.pkg.changes["architecture"], dict):
491 self.pkg.changes["architecture"] = { "Unknown" : "" }
493 # and maintainer2047 may not exist.
494 if not self.pkg.changes.has_key("maintainer2047"):
495 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
497 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
498 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
499 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
501 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
502 if self.pkg.changes["architecture"].has_key("source") and \
503 self.pkg.changes["changedby822"] != "" and \
504 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
506 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
507 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
508 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
510 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
511 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
512 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
514 # Process policy doesn't set the fingerprint field and I don't want to make it
515 # do it for now as I don't want to have to deal with the case where we accepted
516 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
517 # the meantime so the package will be remarked as rejectable. Urgh.
518 # TODO: Fix this properly
519 if self.pkg.changes.has_key('fingerprint'):
520 session = DBConn().session()
521 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
522 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
523 if self.pkg.changes.has_key("sponsoremail"):
524 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
527 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
528 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
530 # Apply any global override of the Maintainer field
531 if cnf.get("Dinstall::OverrideMaintainer"):
532 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
533 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
535 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
536 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
537 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
538 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
540 ###########################################################################
541 def load_changes(self, filename):
543 Load a changes file and setup a dictionary around it. Also checks for mandantory
546 @type filename: string
547 @param filename: Changes filename, full path.
550 @return: whether the changes file was valid or not. We may want to
551 reject even if this is True (see what gets put in self.rejects).
552 This is simply to prevent us even trying things later which will
553 fail because we couldn't properly parse the file.
556 self.pkg.changes_file = filename
558 # Parse the .changes field into a dictionary
560 self.pkg.changes.update(parse_changes(filename))
561 except CantOpenError:
562 self.rejects.append("%s: can't read file." % (filename))
564 except ParseChangesError, line:
565 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
567 except ChangesUnicodeError:
568 self.rejects.append("%s: changes file not proper utf-8" % (filename))
571 # Parse the Files field from the .changes into another dictionary
573 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
574 except ParseChangesError, line:
575 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
577 except UnknownFormatError, format:
578 self.rejects.append("%s: unknown format '%s'." % (filename, format))
581 # Check for mandatory fields
582 for i in ("distribution", "source", "binary", "architecture",
583 "version", "maintainer", "files", "changes", "description"):
584 if not self.pkg.changes.has_key(i):
585 # Avoid undefined errors later
586 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
589 # Strip a source version in brackets from the source field
590 if re_strip_srcver.search(self.pkg.changes["source"]):
591 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
593 # Ensure the source field is a valid package name.
594 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
595 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
597 # Split multi-value fields into a lower-level dictionary
598 for i in ("architecture", "distribution", "binary", "closes"):
599 o = self.pkg.changes.get(i, "")
601 del self.pkg.changes[i]
603 self.pkg.changes[i] = {}
606 self.pkg.changes[i][j] = 1
608 # Fix the Maintainer: field to be RFC822/2047 compatible
610 (self.pkg.changes["maintainer822"],
611 self.pkg.changes["maintainer2047"],
612 self.pkg.changes["maintainername"],
613 self.pkg.changes["maintaineremail"]) = \
614 fix_maintainer (self.pkg.changes["maintainer"])
615 except ParseMaintError, msg:
616 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
617 % (filename, self.pkg.changes["maintainer"], msg))
619 # ...likewise for the Changed-By: field if it exists.
621 (self.pkg.changes["changedby822"],
622 self.pkg.changes["changedby2047"],
623 self.pkg.changes["changedbyname"],
624 self.pkg.changes["changedbyemail"]) = \
625 fix_maintainer (self.pkg.changes.get("changed-by", ""))
626 except ParseMaintError, msg:
627 self.pkg.changes["changedby822"] = ""
628 self.pkg.changes["changedby2047"] = ""
629 self.pkg.changes["changedbyname"] = ""
630 self.pkg.changes["changedbyemail"] = ""
632 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
633 % (filename, self.pkg.changes["changed-by"], msg))
635 # Ensure all the values in Closes: are numbers
636 if self.pkg.changes.has_key("closes"):
637 for i in self.pkg.changes["closes"].keys():
638 if re_isanum.match (i) == None:
639 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
641 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
642 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
643 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
645 # Check the .changes is non-empty
646 if not self.pkg.files:
647 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
650 # Changes was syntactically valid even if we'll reject
653 ###########################################################################
655 def check_distributions(self):
656 "Check and map the Distribution field"
660 # Handle suite mappings
661 for m in Cnf.ValueList("SuiteMappings"):
664 if mtype == "map" or mtype == "silent-map":
665 (source, dest) = args[1:3]
666 if self.pkg.changes["distribution"].has_key(source):
667 del self.pkg.changes["distribution"][source]
668 self.pkg.changes["distribution"][dest] = 1
669 if mtype != "silent-map":
670 self.notes.append("Mapping %s to %s." % (source, dest))
671 if self.pkg.changes.has_key("distribution-version"):
672 if self.pkg.changes["distribution-version"].has_key(source):
673 self.pkg.changes["distribution-version"][source]=dest
674 elif mtype == "map-unreleased":
675 (source, dest) = args[1:3]
676 if self.pkg.changes["distribution"].has_key(source):
677 for arch in self.pkg.changes["architecture"].keys():
678 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
679 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
680 del self.pkg.changes["distribution"][source]
681 self.pkg.changes["distribution"][dest] = 1
683 elif mtype == "ignore":
685 if self.pkg.changes["distribution"].has_key(suite):
686 del self.pkg.changes["distribution"][suite]
687 self.warnings.append("Ignoring %s as a target suite." % (suite))
688 elif mtype == "reject":
690 if self.pkg.changes["distribution"].has_key(suite):
691 self.rejects.append("Uploads to %s are not accepted." % (suite))
692 elif mtype == "propup-version":
693 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
695 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
696 if self.pkg.changes["distribution"].has_key(args[1]):
697 self.pkg.changes.setdefault("distribution-version", {})
698 for suite in args[2:]:
699 self.pkg.changes["distribution-version"][suite] = suite
701 # Ensure there is (still) a target distribution
702 if len(self.pkg.changes["distribution"].keys()) < 1:
703 self.rejects.append("No valid distribution remaining.")
705 # Ensure target distributions exist
706 for suite in self.pkg.changes["distribution"].keys():
707 if not Cnf.has_key("Suite::%s" % (suite)):
708 self.rejects.append("Unknown distribution `%s'." % (suite))
710 ###########################################################################
712 def binary_file_checks(self, f, session):
714 entry = self.pkg.files[f]
716 # Extract package control information
717 deb_file = utils.open_file(f)
719 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
721 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
723 # Can't continue, none of the checks on control would work.
726 # Check for mandantory "Description:"
729 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
731 self.rejects.append("%s: Missing Description in binary package" % (f))
736 # Check for mandatory fields
737 for field in [ "Package", "Architecture", "Version" ]:
738 if control.Find(field) == None:
740 self.rejects.append("%s: No %s field in control." % (f, field))
743 # Ensure the package name matches the one give in the .changes
744 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
745 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
747 # Validate the package field
748 package = control.Find("Package")
749 if not re_valid_pkg_name.match(package):
750 self.rejects.append("%s: invalid package name '%s'." % (f, package))
752 # Validate the version field
753 version = control.Find("Version")
754 if not re_valid_version.match(version):
755 self.rejects.append("%s: invalid version number '%s'." % (f, version))
757 # Ensure the architecture of the .deb is one we know about.
758 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
759 architecture = control.Find("Architecture")
760 upload_suite = self.pkg.changes["distribution"].keys()[0]
762 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
763 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
764 self.rejects.append("Unknown architecture '%s'." % (architecture))
766 # Ensure the architecture of the .deb is one of the ones
767 # listed in the .changes.
768 if not self.pkg.changes["architecture"].has_key(architecture):
769 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
771 # Sanity-check the Depends field
772 depends = control.Find("Depends")
774 self.rejects.append("%s: Depends field is empty." % (f))
776 # Sanity-check the Provides field
777 provides = control.Find("Provides")
779 provide = re_spacestrip.sub('', provides)
781 self.rejects.append("%s: Provides field is empty." % (f))
782 prov_list = provide.split(",")
783 for prov in prov_list:
784 if not re_valid_pkg_name.match(prov):
785 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
787 # If there is a Built-Using field, we need to check we can find the
788 # exact source version
789 built_using = control.Find("Built-Using")
792 entry["built-using"] = []
793 for dep in apt_pkg.parse_depends(built_using):
794 bu_s, bu_v, bu_e = dep[0]
795 # Check that it's an exact match dependency and we have
796 # some form of version
797 if bu_e != "=" or len(bu_v) < 1:
798 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
800 # Find the source id for this version
801 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
803 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
805 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
807 except ValueError, e:
808 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
811 # Check the section & priority match those given in the .changes (non-fatal)
812 if control.Find("Section") and entry["section"] != "" \
813 and entry["section"] != control.Find("Section"):
814 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
815 (f, control.Find("Section", ""), entry["section"]))
816 if control.Find("Priority") and entry["priority"] != "" \
817 and entry["priority"] != control.Find("Priority"):
818 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
819 (f, control.Find("Priority", ""), entry["priority"]))
821 entry["package"] = package
822 entry["architecture"] = architecture
823 entry["version"] = version
824 entry["maintainer"] = control.Find("Maintainer", "")
826 if f.endswith(".udeb"):
827 self.pkg.files[f]["dbtype"] = "udeb"
828 elif f.endswith(".deb"):
829 self.pkg.files[f]["dbtype"] = "deb"
831 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
833 entry["source"] = control.Find("Source", entry["package"])
835 # Get the source version
836 source = entry["source"]
839 if source.find("(") != -1:
840 m = re_extract_src_version.match(source)
842 source_version = m.group(2)
844 if not source_version:
845 source_version = self.pkg.files[f]["version"]
847 entry["source package"] = source
848 entry["source version"] = source_version
850 # Ensure the filename matches the contents of the .deb
851 m = re_isadeb.match(f)
854 file_package = m.group(1)
855 if entry["package"] != file_package:
856 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
857 (f, file_package, entry["dbtype"], entry["package"]))
858 epochless_version = re_no_epoch.sub('', control.Find("Version"))
861 file_version = m.group(2)
862 if epochless_version != file_version:
863 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
864 (f, file_version, entry["dbtype"], epochless_version))
867 file_architecture = m.group(3)
868 if entry["architecture"] != file_architecture:
869 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
870 (f, file_architecture, entry["dbtype"], entry["architecture"]))
872 # Check for existent source
873 source_version = entry["source version"]
874 source_package = entry["source package"]
875 if self.pkg.changes["architecture"].has_key("source"):
876 if source_version != self.pkg.changes["version"]:
877 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
878 (source_version, f, self.pkg.changes["version"]))
880 # Check in the SQL database
881 if not source_exists(source_package, source_version, suites = \
882 self.pkg.changes["distribution"].keys(), session = session):
883 # Check in one of the other directories
884 source_epochless_version = re_no_epoch.sub('', source_version)
885 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
886 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
888 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
891 dsc_file_exists = False
892 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
893 if cnf.has_key("Dir::Queue::%s" % (myq)):
894 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
895 dsc_file_exists = True
898 if not dsc_file_exists:
899 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
901 # Check the version and for file overwrites
902 self.check_binary_against_db(f, session)
904 def source_file_checks(self, f, session):
905 entry = self.pkg.files[f]
907 m = re_issource.match(f)
911 entry["package"] = m.group(1)
912 entry["version"] = m.group(2)
913 entry["type"] = m.group(3)
915 # Ensure the source package name matches the Source filed in the .changes
916 if self.pkg.changes["source"] != entry["package"]:
917 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
919 # Ensure the source version matches the version in the .changes file
920 if re_is_orig_source.match(f):
921 changes_version = self.pkg.changes["chopversion2"]
923 changes_version = self.pkg.changes["chopversion"]
925 if changes_version != entry["version"]:
926 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
928 # Ensure the .changes lists source in the Architecture field
929 if not self.pkg.changes["architecture"].has_key("source"):
930 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
932 # Check the signature of a .dsc file
933 if entry["type"] == "dsc":
934 # check_signature returns either:
935 # (None, [list, of, rejects]) or (signature, [])
936 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
938 self.rejects.append(j)
940 entry["architecture"] = "source"
942 def per_suite_file_checks(self, f, suite, session):
944 entry = self.pkg.files[f]
947 if entry.has_key("byhand"):
950 # Check we have fields we need to do these checks
952 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
953 if not entry.has_key(m):
954 self.rejects.append("file '%s' does not have field %s set" % (f, m))
960 # Handle component mappings
961 for m in cnf.ValueList("ComponentMappings"):
962 (source, dest) = m.split()
963 if entry["component"] == source:
964 entry["original component"] = source
965 entry["component"] = dest
967 # Ensure the component is valid for the target suite
968 if cnf.has_key("Suite:%s::Components" % (suite)) and \
969 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
970 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
973 # Validate the component
974 if not get_component(entry["component"], session):
975 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
978 # See if the package is NEW
979 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
982 # Validate the priority
983 if entry["priority"].find('/') != -1:
984 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
986 # Determine the location
987 location = cnf["Dir::Pool"]
988 l = get_location(location, entry["component"], session=session)
990 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
991 entry["location id"] = -1
993 entry["location id"] = l.location_id
995 # Check the md5sum & size against existing files (if any)
996 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
998 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
999 entry["size"], entry["md5sum"], entry["location id"])
1002 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1003 elif found is False and poolfile is not None:
1004 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1006 if poolfile is None:
1007 entry["files id"] = None
1009 entry["files id"] = poolfile.file_id
1011 # Check for packages that have moved from one component to another
1012 entry['suite'] = suite
1013 arch_list = [entry["architecture"], 'all']
1014 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1015 [suite], arch_list = arch_list, session = session)
1016 if component is not None:
1017 entry["othercomponents"] = component
1019 def check_files(self, action=True):
1020 file_keys = self.pkg.files.keys()
1026 os.chdir(self.pkg.directory)
1028 ret = holding.copy_to_holding(f)
1030 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1034 # check we already know the changes file
1035 # [NB: this check must be done post-suite mapping]
1036 base_filename = os.path.basename(self.pkg.changes_file)
1038 session = DBConn().session()
1041 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1042 # if in the pool or in a queue other than unchecked, reject
1043 if (dbc.in_queue is None) \
1044 or (dbc.in_queue is not None
1045 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1046 self.rejects.append("%s file already known to dak" % base_filename)
1047 except NoResultFound, e:
1051 has_binaries = False
1054 for f, entry in self.pkg.files.items():
1055 # Ensure the file does not already exist in one of the accepted directories
1056 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1057 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1058 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1059 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1061 if not re_taint_free.match(f):
1062 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1064 # Check the file is readable
1065 if os.access(f, os.R_OK) == 0:
1066 # When running in -n, copy_to_holding() won't have
1067 # generated the reject_message, so we need to.
1069 if os.path.exists(f):
1070 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1072 # Don't directly reject, mark to check later to deal with orig's
1073 # we can find in the pool
1074 self.later_check_files.append(f)
1075 entry["type"] = "unreadable"
1078 # If it's byhand skip remaining checks
1079 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1081 entry["type"] = "byhand"
1083 # Checks for a binary package...
1084 elif re_isadeb.match(f):
1086 entry["type"] = "deb"
1088 # This routine appends to self.rejects/warnings as appropriate
1089 self.binary_file_checks(f, session)
1091 # Checks for a source package...
1092 elif re_issource.match(f):
1095 # This routine appends to self.rejects/warnings as appropriate
1096 self.source_file_checks(f, session)
1098 # Not a binary or source package? Assume byhand...
1101 entry["type"] = "byhand"
1103 # Per-suite file checks
1104 entry["oldfiles"] = {}
1105 for suite in self.pkg.changes["distribution"].keys():
1106 self.per_suite_file_checks(f, suite, session)
1110 # If the .changes file says it has source, it must have source.
1111 if self.pkg.changes["architecture"].has_key("source"):
1113 self.rejects.append("no source found and Architecture line in changes mention source.")
1115 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1116 self.rejects.append("source only uploads are not supported.")
1118 ###########################################################################
1120 def __dsc_filename(self):
1122 Returns: (Status, Dsc_Filename)
1124 Status: Boolean; True when there was no error, False otherwise
1125 Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1130 for name, entry in self.pkg.files.items():
1131 if entry.has_key("type") and entry["type"] == "dsc":
1133 return False, "cannot process a .changes file with multiple .dsc's."
1137 if not dsc_filename:
1138 return False, "source uploads must contain a dsc file"
1140 return True, dsc_filename
1142 def load_dsc(self, action=True, signing_rules=1):
1144 Find and load the dsc from self.pkg.files into self.dsc
1146 Returns: (Status, Reason)
1148 Status: Boolean; True when there was no error, False otherwise
1149 Reason: String; When Status is False this describes the error
1153 (status, dsc_filename) = self.__dsc_filename()
1155 # If status is false, dsc_filename has the reason
1156 return False, dsc_filename
1159 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1160 except CantOpenError:
1162 return False, "%s: can't read file." % (dsc_filename)
1163 except ParseChangesError, line:
1164 return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1165 except InvalidDscError, line:
1166 return False, "%s: syntax error on line %s." % (dsc_filename, line)
1167 except ChangesUnicodeError:
1168 return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1172 ###########################################################################
1174 def check_dsc(self, action=True, session=None):
1175 """Returns bool indicating whether or not the source changes are valid"""
1176 # Ensure there is source to check
1177 if not self.pkg.changes["architecture"].has_key("source"):
1180 (status, reason) = self.load_dsc(action=action)
1182 self.rejects.append(reason)
1184 (status, dsc_filename) = self.__dsc_filename()
1186 # If status is false, dsc_filename has the reason
1187 self.rejects.append(dsc_filename)
1190 # Build up the file list of files mentioned by the .dsc
1192 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1193 except NoFilesFieldError:
1194 self.rejects.append("%s: no Files: field." % (dsc_filename))
1196 except UnknownFormatError, format:
1197 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1199 except ParseChangesError, line:
1200 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1203 # Enforce mandatory fields
1204 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1205 if not self.pkg.dsc.has_key(i):
1206 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1209 # Validate the source and version fields
1210 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1211 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1212 if not re_valid_version.match(self.pkg.dsc["version"]):
1213 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1215 # Only a limited list of source formats are allowed in each suite
1216 for dist in self.pkg.changes["distribution"].keys():
1217 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1218 if self.pkg.dsc["format"] not in allowed:
1219 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1221 # Validate the Maintainer field
1223 # We ignore the return value
1224 fix_maintainer(self.pkg.dsc["maintainer"])
1225 except ParseMaintError, msg:
1226 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1227 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1229 # Validate the build-depends field(s)
1230 for field_name in [ "build-depends", "build-depends-indep" ]:
1231 field = self.pkg.dsc.get(field_name)
1233 # Have apt try to parse them...
1235 apt_pkg.ParseSrcDepends(field)
1237 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1239 # Ensure the version number in the .dsc matches the version number in the .changes
1240 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1241 changes_version = self.pkg.files[dsc_filename]["version"]
1243 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1244 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1246 # Ensure the Files field contain only what's expected
1247 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1249 # Ensure source is newer than existing source in target suites
1250 session = DBConn().session()
1251 self.check_source_against_db(dsc_filename, session)
1252 self.check_dsc_against_db(dsc_filename, session)
1254 dbchg = get_dbchange(self.pkg.changes_file, session)
1256 # Finally, check if we're missing any files
1257 for f in self.later_check_files:
1259 # Check if we've already processed this file if we have a dbchg object
1262 for pf in dbchg.files:
1263 if pf.filename == f and pf.processed:
1264 self.notes.append('%s was already processed so we can go ahead' % f)
1266 del self.pkg.files[f]
1268 self.rejects.append("Could not find file %s references in changes" % f)
1274 ###########################################################################
1276 def get_changelog_versions(self, source_dir):
1277 """Extracts a the source package and (optionally) grabs the
1278 version history out of debian/changelog for the BTS."""
1282 # Find the .dsc (again)
1284 for f in self.pkg.files.keys():
1285 if self.pkg.files[f]["type"] == "dsc":
1288 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1289 if not dsc_filename:
1292 # Create a symlink mirror of the source files in our temporary directory
1293 for f in self.pkg.files.keys():
1294 m = re_issource.match(f)
1296 src = os.path.join(source_dir, f)
1297 # If a file is missing for whatever reason, give up.
1298 if not os.path.exists(src):
1301 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1302 self.pkg.orig_files[f].has_key("path"):
1304 dest = os.path.join(os.getcwd(), f)
1305 os.symlink(src, dest)
1307 # If the orig files are not a part of the upload, create symlinks to the
1309 for orig_file in self.pkg.orig_files.keys():
1310 if not self.pkg.orig_files[orig_file].has_key("path"):
1312 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1313 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1315 # Extract the source
1317 unpacked = UnpackedSource(dsc_filename)
1319 self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1322 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1325 # Get the upstream version
1326 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1327 if re_strip_revision.search(upstr_version):
1328 upstr_version = re_strip_revision.sub('', upstr_version)
1330 # Ensure the changelog file exists
1331 changelog_file = unpacked.get_changelog_file()
1332 if changelog_file is None:
1333 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1336 # Parse the changelog
1337 self.pkg.dsc["bts changelog"] = ""
1338 for line in changelog_file.readlines():
1339 m = re_changelog_versions.match(line)
1341 self.pkg.dsc["bts changelog"] += line
1342 changelog_file.close()
1345 # Check we found at least one revision in the changelog
1346 if not self.pkg.dsc["bts changelog"]:
1347 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1349 def check_source(self):
1351 # a) there's no source
1352 if not self.pkg.changes["architecture"].has_key("source"):
1355 tmpdir = utils.temp_dirname()
1357 # Move into the temporary directory
1361 # Get the changelog version history
1362 self.get_changelog_versions(cwd)
1364 # Move back and cleanup the temporary tree
1368 shutil.rmtree(tmpdir)
1370 if e.errno != errno.EACCES:
1372 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1374 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1375 # We probably have u-r or u-w directories so chmod everything
1377 cmd = "chmod -R u+rwx %s" % (tmpdir)
1378 result = os.system(cmd)
1380 utils.fubar("'%s' failed with result %s." % (cmd, result))
1381 shutil.rmtree(tmpdir)
1382 except Exception, e:
1383 print "foobar2 (%s)" % e
1384 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1386 ###########################################################################
1387 def ensure_hashes(self):
1388 # Make sure we recognise the format of the Files: field in the .changes
1389 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1390 if len(format) == 2:
1391 format = int(format[0]), int(format[1])
1393 format = int(float(format[0])), 0
1395 # We need to deal with the original changes blob, as the fields we need
1396 # might not be in the changes dict serialised into the .dak anymore.
1397 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1399 # Copy the checksums over to the current changes dict. This will keep
1400 # the existing modifications to it intact.
1401 for field in orig_changes:
1402 if field.startswith('checksums-'):
1403 self.pkg.changes[field] = orig_changes[field]
1405 # Check for unsupported hashes
1406 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1407 self.rejects.append(j)
1409 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1410 self.rejects.append(j)
1412 # We have to calculate the hash if we have an earlier changes version than
1413 # the hash appears in rather than require it exist in the changes file
1414 for hashname, hashfunc, version in utils.known_hashes:
1415 # TODO: Move _ensure_changes_hash into this class
1416 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1417 self.rejects.append(j)
1418 if "source" in self.pkg.changes["architecture"]:
1419 # TODO: Move _ensure_dsc_hash into this class
1420 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1421 self.rejects.append(j)
1423 def check_hashes(self):
1424 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1425 self.rejects.append(m)
1427 for m in utils.check_size(".changes", self.pkg.files):
1428 self.rejects.append(m)
1430 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1431 self.rejects.append(m)
1433 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1434 self.rejects.append(m)
1436 self.ensure_hashes()
1438 ###########################################################################
1440 def ensure_orig(self, target_dir='.', session=None):
1442 Ensures that all orig files mentioned in the changes file are present
1443 in target_dir. If they do not exist, they are symlinked into place.
1445 An list containing the symlinks that were created are returned (so they
1452 for filename, entry in self.pkg.dsc_files.iteritems():
1453 if not re_is_orig_source.match(filename):
1454 # File is not an orig; ignore
1457 if os.path.exists(filename):
1458 # File exists, no need to continue
1461 def symlink_if_valid(path):
1462 f = utils.open_file(path)
1463 md5sum = apt_pkg.md5sum(f)
1466 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1467 expected = (int(entry['size']), entry['md5sum'])
1469 if fingerprint != expected:
1472 dest = os.path.join(target_dir, filename)
1474 os.symlink(path, dest)
1475 symlinked.append(dest)
1481 session_ = DBConn().session()
1486 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1487 poolfile_path = os.path.join(
1488 poolfile.location.path, poolfile.filename
1491 if symlink_if_valid(poolfile_path):
1501 # Look in some other queues for the file
1502 queues = ('New', 'Byhand', 'ProposedUpdates',
1503 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1505 for queue in queues:
1506 if not cnf.get('Dir::Queue::%s' % queue):
1509 queuefile_path = os.path.join(
1510 cnf['Dir::Queue::%s' % queue], filename
1513 if not os.path.exists(queuefile_path):
1514 # Does not exist in this queue
1517 if symlink_if_valid(queuefile_path):
1522 ###########################################################################
1524 def check_lintian(self):
1526 Extends self.rejects by checking the output of lintian against tags
1527 specified in Dinstall::LintianTags.
1532 # Don't reject binary uploads
1533 if not self.pkg.changes['architecture'].has_key('source'):
1536 # Only check some distributions
1537 for dist in ('unstable', 'experimental'):
1538 if dist in self.pkg.changes['distribution']:
1543 # If we do not have a tagfile, don't do anything
1544 tagfile = cnf.get("Dinstall::LintianTags")
1548 # Parse the yaml file
1549 sourcefile = file(tagfile, 'r')
1550 sourcecontent = sourcefile.read()
1554 lintiantags = yaml.load(sourcecontent)['lintian']
1555 except yaml.YAMLError, msg:
1556 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1559 # Try and find all orig mentioned in the .dsc
1560 symlinked = self.ensure_orig()
1562 # Setup the input file for lintian
1563 fd, temp_filename = utils.temp_filename()
1564 temptagfile = os.fdopen(fd, 'w')
1565 for tags in lintiantags.values():
1566 temptagfile.writelines(['%s\n' % x for x in tags])
1570 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1571 (temp_filename, self.pkg.changes_file)
1573 result, output = commands.getstatusoutput(cmd)
1575 # Remove our tempfile and any symlinks we created
1576 os.unlink(temp_filename)
1578 for symlink in symlinked:
1582 utils.warn("lintian failed for %s [return code: %s]." % \
1583 (self.pkg.changes_file, result))
1584 utils.warn(utils.prefix_multi_line_string(output, \
1585 " [possible output:] "))
1590 [self.pkg.changes_file, "check_lintian"] + list(txt)
1594 parsed_tags = parse_lintian_output(output)
1595 self.rejects.extend(
1596 generate_reject_messages(parsed_tags, lintiantags, log=log)
1599 ###########################################################################
1600 def check_urgency(self):
1602 if self.pkg.changes["architecture"].has_key("source"):
1603 if not self.pkg.changes.has_key("urgency"):
1604 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1605 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1606 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1607 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1608 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1609 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1611 ###########################################################################
1613 # Sanity check the time stamps of files inside debs.
1614 # [Files in the near future cause ugly warnings and extreme time
1615 # travel can cause errors on extraction]
1617 def check_timestamps(self):
1620 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1621 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1622 tar = TarTime(future_cutoff, past_cutoff)
1624 for filename, entry in self.pkg.files.items():
1625 if entry["type"] == "deb":
1628 deb_file = utils.open_file(filename)
1629 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1632 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1633 except SystemError, e:
1634 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1635 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1638 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1642 future_files = tar.future_files.keys()
1644 num_future_files = len(future_files)
1645 future_file = future_files[0]
1646 future_date = tar.future_files[future_file]
1647 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1648 % (filename, num_future_files, future_file, time.ctime(future_date)))
1650 ancient_files = tar.ancient_files.keys()
1652 num_ancient_files = len(ancient_files)
1653 ancient_file = ancient_files[0]
1654 ancient_date = tar.ancient_files[ancient_file]
1655 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1656 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1658 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1660 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1661 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1663 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1669 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1670 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1671 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1672 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1673 self.pkg.changes["sponsoremail"] = uid_email
1678 ###########################################################################
1679 # check_signed_by_key checks
1680 ###########################################################################
1682 def check_signed_by_key(self):
1683 """Ensure the .changes is signed by an authorized uploader."""
1684 session = DBConn().session()
1686 # First of all we check that the person has proper upload permissions
1687 # and that this upload isn't blocked
1688 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1691 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1694 # TODO: Check that import-keyring adds UIDs properly
1696 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1699 # Check that the fingerprint which uploaded has permission to do so
1700 self.check_upload_permissions(fpr, session)
1702 # Check that this package is not in a transition
1703 self.check_transition(session)
1708 def check_upload_permissions(self, fpr, session):
1709 # Check any one-off upload blocks
1710 self.check_upload_blocks(fpr, session)
1712 # If the source_acl is None, source is never allowed
1713 if fpr.source_acl is None:
1714 if self.pkg.changes["architecture"].has_key("source"):
1715 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1716 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1717 self.rejects.append(rej)
1719 # Do DM as a special case
1720 # DM is a special case unfortunately, so we check it first
1721 # (keys with no source access get more access than DMs in one
1722 # way; DMs can only upload for their packages whether source
1723 # or binary, whereas keys with no access might be able to
1724 # upload some binaries)
1725 elif fpr.source_acl.access_level == 'dm':
1726 self.check_dm_upload(fpr, session)
1728 # If not a DM, we allow full upload rights
1729 uid_email = "%s@debian.org" % (fpr.uid.uid)
1730 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1733 # Check binary upload permissions
1734 # By this point we know that DMs can't have got here unless they
1735 # are allowed to deal with the package concerned so just apply
1737 if fpr.binary_acl.access_level == 'full':
1740 # Otherwise we're in the map case
1741 tmparches = self.pkg.changes["architecture"].copy()
1742 tmparches.pop('source', None)
1744 for bam in fpr.binary_acl_map:
1745 tmparches.pop(bam.architecture.arch_string, None)
1747 if len(tmparches.keys()) > 0:
1748 if fpr.binary_reject:
1749 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1750 if len(tmparches.keys()) == 1:
1751 rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1753 rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1754 self.rejects.append(rej)
1756 # TODO: This is where we'll implement reject vs throw away binaries later
1757 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1758 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1759 rej += "\nFingerprint: %s", (fpr.fingerprint)
1760 self.rejects.append(rej)
1763 def check_upload_blocks(self, fpr, session):
1764 """Check whether any upload blocks apply to this source, source
1765 version, uid / fpr combination"""
1767 def block_rej_template(fb):
1768 rej = 'Manual upload block in place for package %s' % fb.source
1769 if fb.version is not None:
1770 rej += ', version %s' % fb.version
1773 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1774 # version is None if the block applies to all versions
1775 if fb.version is None or fb.version == self.pkg.changes['version']:
1776 # Check both fpr and uid - either is enough to cause a reject
1777 if fb.fpr is not None:
1778 if fb.fpr.fingerprint == fpr.fingerprint:
1779 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1780 if fb.uid is not None:
1781 if fb.uid == fpr.uid:
1782 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1785 def check_dm_upload(self, fpr, session):
1786 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1787 ## none of the uploaded packages are NEW
1789 for f in self.pkg.files.keys():
1790 if self.pkg.files[f].has_key("byhand"):
1791 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1793 if self.pkg.files[f].has_key("new"):
1794 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1800 r = get_newest_source(self.pkg.changes["source"], session)
1803 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1804 self.rejects.append(rej)
1807 if not r.dm_upload_allowed:
1808 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1809 self.rejects.append(rej)
1812 ## the Maintainer: field of the uploaded .changes file corresponds with
1813 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1815 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1816 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1818 ## the most recent version of the package uploaded to unstable or
1819 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1820 ## non-developer maintainers cannot NMU or hijack packages)
1822 # uploader includes the maintainer
1824 for uploader in r.uploaders:
1825 (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1826 # Eww - I hope we never have two people with the same name in Debian
1827 if email == fpr.uid.uid or name == fpr.uid.name:
1832 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1835 ## none of the packages are being taken over from other source packages
1836 for b in self.pkg.changes["binary"].keys():
1837 for suite in self.pkg.changes["distribution"].keys():
1838 for s in get_source_by_package_and_suite(b, suite, session):
1839 if s.source != self.pkg.changes["source"]:
1840 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1844 def check_transition(self, session):
1847 sourcepkg = self.pkg.changes["source"]
1849 # No sourceful upload -> no need to do anything else, direct return
1850 # We also work with unstable uploads, not experimental or those going to some
1851 # proposed-updates queue
1852 if "source" not in self.pkg.changes["architecture"] or \
1853 "unstable" not in self.pkg.changes["distribution"]:
1856 # Also only check if there is a file defined (and existant) with
1858 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1859 if transpath == "" or not os.path.exists(transpath):
1862 # Parse the yaml file
1863 sourcefile = file(transpath, 'r')
1864 sourcecontent = sourcefile.read()
1866 transitions = yaml.load(sourcecontent)
1867 except yaml.YAMLError, msg:
1868 # This shouldn't happen, there is a wrapper to edit the file which
1869 # checks it, but we prefer to be safe than ending up rejecting
1871 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1874 # Now look through all defined transitions
1875 for trans in transitions:
1876 t = transitions[trans]
1877 source = t["source"]
1880 # Will be None if nothing is in testing.
1881 current = get_source_in_suite(source, "testing", session)
1882 if current is not None:
1883 compare = apt_pkg.VersionCompare(current.version, expected)
1885 if current is None or compare < 0:
1886 # This is still valid, the current version in testing is older than
1887 # the new version we wait for, or there is none in testing yet
1889 # Check if the source we look at is affected by this.
1890 if sourcepkg in t['packages']:
1891 # The source is affected, lets reject it.
1893 rejectmsg = "%s: part of the %s transition.\n\n" % (
1896 if current is not None:
1897 currentlymsg = "at version %s" % (current.version)
1899 currentlymsg = "not present in testing"
1901 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1903 rejectmsg += "\n".join(textwrap.wrap("""Your package
1904 is part of a testing transition designed to get %s migrated (it is
1905 currently %s, we need version %s). This transition is managed by the
1906 Release Team, and %s is the Release-Team member responsible for it.
1907 Please mail debian-release@lists.debian.org or contact %s directly if you
1908 need further assistance. You might want to upload to experimental until this
1909 transition is done."""
1910 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1912 self.rejects.append(rejectmsg)
1915 ###########################################################################
1916 # End check_signed_by_key checks
1917 ###########################################################################
1919 def build_summaries(self):
1920 """ Build a summary of changes the upload introduces. """
1922 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1924 short_summary = summary
1926 # This is for direport's benefit...
1927 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1930 summary += "Changes: " + f
1932 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1934 summary += self.announce(short_summary, 0)
1936 return (summary, short_summary)
1938 ###########################################################################
1940 def close_bugs(self, summary, action):
1942 Send mail to close bugs as instructed by the closes field in the changes file.
1943 Also add a line to summary if any work was done.
1945 @type summary: string
1946 @param summary: summary text, as given by L{build_summaries}
1949 @param action: Set to false no real action will be done.
1952 @return: summary. If action was taken, extended by the list of closed bugs.
1956 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1958 bugs = self.pkg.changes["closes"].keys()
1964 summary += "Closing bugs: "
1966 summary += "%s " % (bug)
1969 self.Subst["__BUG_NUMBER__"] = bug
1970 if self.pkg.changes["distribution"].has_key("stable"):
1971 self.Subst["__STABLE_WARNING__"] = """
1972 Note that this package is not part of the released stable Debian
1973 distribution. It may have dependencies on other unreleased software,
1974 or other instabilities. Please take care if you wish to install it.
1975 The update will eventually make its way into the next released Debian
1978 self.Subst["__STABLE_WARNING__"] = ""
1979 mail_message = utils.TemplateSubst(self.Subst, template)
1980 utils.send_mail(mail_message)
1982 # Clear up after ourselves
1983 del self.Subst["__BUG_NUMBER__"]
1984 del self.Subst["__STABLE_WARNING__"]
1986 if action and self.logger:
1987 self.logger.log(["closing bugs"] + bugs)
1993 ###########################################################################
1995 def announce(self, short_summary, action):
1997 Send an announce mail about a new upload.
1999 @type short_summary: string
2000 @param short_summary: Short summary text to include in the mail
2003 @param action: Set to false no real action will be done.
2006 @return: Textstring about action taken.
2011 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2013 # Only do announcements for source uploads with a recent dpkg-dev installed
2014 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2015 self.pkg.changes["architecture"].has_key("source"):
2021 self.Subst["__SHORT_SUMMARY__"] = short_summary
2023 for dist in self.pkg.changes["distribution"].keys():
2024 suite = get_suite(dist)
2025 if suite is None: continue
2026 announce_list = suite.announce
2027 if announce_list == "" or lists_done.has_key(announce_list):
2030 lists_done[announce_list] = 1
2031 summary += "Announcing to %s\n" % (announce_list)
2035 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2036 if cnf.get("Dinstall::TrackingServer") and \
2037 self.pkg.changes["architecture"].has_key("source"):
2038 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2039 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2041 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2042 utils.send_mail(mail_message)
2044 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2046 if cnf.FindB("Dinstall::CloseBugs"):
2047 summary = self.close_bugs(summary, action)
2049 del self.Subst["__SHORT_SUMMARY__"]
2053 ###########################################################################
2055 def accept (self, summary, short_summary, session=None):
2059 This moves all files referenced from the .changes into the pool,
2060 sends the accepted mail, announces to lists, closes bugs and
2061 also checks for override disparities. If enabled it will write out
2062 the version history for the BTS Version Tracking and will finally call
2065 @type summary: string
2066 @param summary: Summary text
2068 @type short_summary: string
2069 @param short_summary: Short summary
2073 stats = SummaryStats()
2076 self.logger.log(["installing changes", self.pkg.changes_file])
2081 # Add the .dsc file to the DB first
2082 for newfile, entry in self.pkg.files.items():
2083 if entry["type"] == "dsc":
2084 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2088 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2089 for newfile, entry in self.pkg.files.items():
2090 if entry["type"] == "deb":
2091 b, pf = add_deb_to_db(self, newfile, session)
2093 poolfiles.append(pf)
2095 # If this is a sourceful diff only upload that is moving
2096 # cross-component we need to copy the .orig files into the new
2097 # component too for the same reasons as above.
2098 # XXX: mhy: I think this should be in add_dsc_to_db
2099 if self.pkg.changes["architecture"].has_key("source"):
2100 for orig_file in self.pkg.orig_files.keys():
2101 if not self.pkg.orig_files[orig_file].has_key("id"):
2102 continue # Skip if it's not in the pool
2103 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2104 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2105 continue # Skip if the location didn't change
2108 oldf = get_poolfile_by_id(orig_file_id, session)
2109 old_filename = os.path.join(oldf.location.path, oldf.filename)
2110 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2111 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2113 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2115 # TODO: Care about size/md5sum collisions etc
2116 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2118 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2120 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2121 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2125 # Don't reference the old file from this changes
2127 if p.file_id == oldf.file_id:
2130 poolfiles.append(newf)
2132 # Fix up the DSC references
2135 for df in source.srcfiles:
2136 if df.poolfile.file_id == oldf.file_id:
2137 # Add a new DSC entry and mark the old one for deletion
2138 # Don't do it in the loop so we don't change the thing we're iterating over
2140 newdscf.source_id = source.source_id
2141 newdscf.poolfile_id = newf.file_id
2142 session.add(newdscf)
2152 # Make sure that our source object is up-to-date
2153 session.expire(source)
2155 # Add changelog information to the database
2156 self.store_changelog()
2158 # Install the files into the pool
2159 for newfile, entry in self.pkg.files.items():
2160 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2161 utils.move(newfile, destination)
2162 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2163 stats.accept_bytes += float(entry["size"])
2165 # Copy the .changes file across for suite which need it.
2166 copy_changes = dict([(x.copychanges, '')
2167 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2168 if x.copychanges is not None])
2170 for dest in copy_changes.keys():
2171 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2173 # We're done - commit the database changes
2175 # Our SQL session will automatically start a new transaction after
2178 # Now ensure that the metadata has been added
2179 # This has to be done after we copy the files into the pool
2180 # For source if we have it:
2181 if self.pkg.changes["architecture"].has_key("source"):
2182 import_metadata_into_db(source, session)
2184 # Now for any of our binaries
2186 import_metadata_into_db(b, session)
2190 # Move the .changes into the 'done' directory
2191 ye, mo, da = time.gmtime()[0:3]
2192 donedir = os.path.join(cnf["Dir::Queue::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2193 if not os.path.isdir(donedir):
2194 os.makedirs(donedir)
2196 utils.move(self.pkg.changes_file,
2197 os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2199 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2200 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2203 self.Subst["__SUMMARY__"] = summary
2204 mail_message = utils.TemplateSubst(self.Subst,
2205 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2206 utils.send_mail(mail_message)
2207 self.announce(short_summary, 1)
2209 ## Helper stuff for DebBugs Version Tracking
2210 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2211 if self.pkg.changes["architecture"].has_key("source"):
2212 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2213 version_history = os.fdopen(fd, 'w')
2214 version_history.write(self.pkg.dsc["bts changelog"])
2215 version_history.close()
2216 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2217 self.pkg.changes_file[:-8]+".versions")
2218 os.rename(temp_filename, filename)
2219 os.chmod(filename, 0644)
2221 # Write out the binary -> source mapping.
2222 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2223 debinfo = os.fdopen(fd, 'w')
2224 for name, entry in sorted(self.pkg.files.items()):
2225 if entry["type"] == "deb":
2226 line = " ".join([entry["package"], entry["version"],
2227 entry["architecture"], entry["source package"],
2228 entry["source version"]])
2229 debinfo.write(line+"\n")
2231 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2232 self.pkg.changes_file[:-8]+".debinfo")
2233 os.rename(temp_filename, filename)
2234 os.chmod(filename, 0644)
2238 # Set up our copy queues (e.g. buildd queues)
2239 for suite_name in self.pkg.changes["distribution"].keys():
2240 suite = get_suite(suite_name, session)
2241 for q in suite.copy_queues:
2243 q.add_file_from_pool(f)
2248 stats.accept_count += 1
2250 def check_override(self):
2252 Checks override entries for validity. Mails "Override disparity" warnings,
2253 if that feature is enabled.
2255 Abandons the check if
2256 - override disparity checks are disabled
2257 - mail sending is disabled
2262 # Abandon the check if override disparity checks have been disabled
2263 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2266 summary = self.pkg.check_override()
2271 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2274 self.Subst["__SUMMARY__"] = summary
2275 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2276 utils.send_mail(mail_message)
2277 del self.Subst["__SUMMARY__"]
2279 ###########################################################################
2281 def remove(self, from_dir=None):
2283 Used (for instance) in p-u to remove the package from unchecked
2285 Also removes the package from holding area.
2287 if from_dir is None:
2288 from_dir = self.pkg.directory
2291 for f in self.pkg.files.keys():
2292 os.unlink(os.path.join(from_dir, f))
2293 if os.path.exists(os.path.join(h.holding_dir, f)):
2294 os.unlink(os.path.join(h.holding_dir, f))
2296 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2297 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2298 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2300 ###########################################################################
2302 def move_to_queue (self, queue):
2304 Move files to a destination queue using the permissions in the table
2307 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2308 queue.path, perms=int(queue.change_perms, 8))
2309 for f in self.pkg.files.keys():
2310 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2312 ###########################################################################
2314 def force_reject(self, reject_files):
2316 Forcefully move files from the current directory to the
2317 reject directory. If any file already exists in the reject
2318 directory it will be moved to the morgue to make way for
2321 @type reject_files: dict
2322 @param reject_files: file dictionary
2328 for file_entry in reject_files:
2329 # Skip any files which don't exist or which we don't have permission to copy.
2330 if os.access(file_entry, os.R_OK) == 0:
2333 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2336 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2338 # File exists? Let's find a new name by adding a number
2339 if e.errno == errno.EEXIST:
2341 dest_file = utils.find_next_free(dest_file, 255)
2342 except NoFreeFilenameError:
2343 # Something's either gone badly Pete Tong, or
2344 # someone is trying to exploit us.
2345 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2348 # Make sure we really got it
2350 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2353 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2357 # If we got here, we own the destination file, so we can
2358 # safely overwrite it.
2359 utils.move(file_entry, dest_file, 1, perms=0660)
2362 ###########################################################################
2363 def do_reject (self, manual=0, reject_message="", notes=""):
2365 Reject an upload. If called without a reject message or C{manual} is
2366 true, spawn an editor so the user can write one.
2369 @param manual: manual or automated rejection
2371 @type reject_message: string
2372 @param reject_message: A reject message
2377 # If we weren't given a manual rejection message, spawn an
2378 # editor so the user can add one in...
2379 if manual and not reject_message:
2380 (fd, temp_filename) = utils.temp_filename()
2381 temp_file = os.fdopen(fd, 'w')
2384 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2385 % (note.author, note.version, note.notedate, note.comment))
2387 editor = os.environ.get("EDITOR","vi")
2389 while answer == 'E':
2390 os.system("%s %s" % (editor, temp_filename))
2391 temp_fh = utils.open_file(temp_filename)
2392 reject_message = "".join(temp_fh.readlines())
2394 print "Reject message:"
2395 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2396 prompt = "[R]eject, Edit, Abandon, Quit ?"
2398 while prompt.find(answer) == -1:
2399 answer = utils.our_raw_input(prompt)
2400 m = re_default_answer.search(prompt)
2403 answer = answer[:1].upper()
2404 os.unlink(temp_filename)
2410 print "Rejecting.\n"
2414 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2415 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2417 # Move all the files into the reject directory
2418 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2419 self.force_reject(reject_files)
2421 # If we fail here someone is probably trying to exploit the race
2422 # so let's just raise an exception ...
2423 if os.path.exists(reason_filename):
2424 os.unlink(reason_filename)
2425 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2427 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2431 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2432 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2433 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2434 os.write(reason_fd, reject_message)
2435 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2437 # Build up the rejection email
2438 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2439 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2440 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2441 self.Subst["__REJECT_MESSAGE__"] = ""
2442 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2443 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2444 # Write the rejection email out as the <foo>.reason file
2445 os.write(reason_fd, reject_mail_message)
2447 del self.Subst["__REJECTOR_ADDRESS__"]
2448 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2449 del self.Subst["__CC__"]
2453 # Send the rejection mail
2454 utils.send_mail(reject_mail_message)
2457 self.logger.log(["rejected", self.pkg.changes_file])
2461 ################################################################################
2462 def in_override_p(self, package, component, suite, binary_type, filename, session):
2464 Check if a package already has override entries in the DB
2466 @type package: string
2467 @param package: package name
2469 @type component: string
2470 @param component: database id of the component
2473 @param suite: database id of the suite
2475 @type binary_type: string
2476 @param binary_type: type of the package
2478 @type filename: string
2479 @param filename: filename we check
2481 @return: the database result. But noone cares anyway.
2487 if binary_type == "": # must be source
2490 file_type = binary_type
2492 # Override suite name; used for example with proposed-updates
2493 oldsuite = get_suite(suite, session)
2494 if (not oldsuite is None) and oldsuite.overridesuite:
2495 suite = oldsuite.overridesuite
2497 result = get_override(package, suite, component, file_type, session)
2499 # If checking for a source package fall back on the binary override type
2500 if file_type == "dsc" and len(result) < 1:
2501 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2503 # Remember the section and priority so we can check them later if appropriate
2506 self.pkg.files[filename]["override section"] = result.section.section
2507 self.pkg.files[filename]["override priority"] = result.priority.priority
2512 ################################################################################
2513 def get_anyversion(self, sv_list, suite):
2516 @param sv_list: list of (suite, version) tuples to check
2519 @param suite: suite name
2525 anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2526 for (s, v) in sv_list:
2527 if s in [ x.lower() for x in anysuite ]:
2528 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2533 ################################################################################
2535 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2538 @param sv_list: list of (suite, version) tuples to check
2540 @type filename: string
2541 @param filename: XXX
2543 @type new_version: string
2544 @param new_version: XXX
2546 Ensure versions are newer than existing packages in target
2547 suites and that cross-suite version checking rules as
2548 set out in the conf file are satisfied.
2553 # Check versions for each target suite
2554 for target_suite in self.pkg.changes["distribution"].keys():
2555 must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2556 must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2558 # Enforce "must be newer than target suite" even if conffile omits it
2559 if target_suite not in must_be_newer_than:
2560 must_be_newer_than.append(target_suite)
2562 for (suite, existent_version) in sv_list:
2563 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2565 if suite in must_be_newer_than and sourceful and vercmp < 1:
2566 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2568 if suite in must_be_older_than and vercmp > -1:
2571 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2572 # we really use the other suite, ignoring the conflicting one ...
2573 addsuite = self.pkg.changes["distribution-version"][suite]
2575 add_version = self.get_anyversion(sv_list, addsuite)
2576 target_version = self.get_anyversion(sv_list, target_suite)
2579 # not add_version can only happen if we map to a suite
2580 # that doesn't enhance the suite we're propup'ing from.
2581 # so "propup-ver x a b c; map a d" is a problem only if
2582 # d doesn't enhance a.
2584 # i think we could always propagate in this case, rather
2585 # than complaining. either way, this isn't a REJECT issue
2587 # And - we really should complain to the dorks who configured dak
2588 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2589 self.pkg.changes.setdefault("propdistribution", {})
2590 self.pkg.changes["propdistribution"][addsuite] = 1
2592 elif not target_version:
2593 # not targets_version is true when the package is NEW
2594 # we could just stick with the "...old version..." REJECT
2595 # for this, I think.
2596 self.rejects.append("Won't propogate NEW packages.")
2597 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2598 # propogation would be redundant. no need to reject though.
2599 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2601 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2602 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2604 self.warnings.append("Propogating upload to %s" % (addsuite))
2605 self.pkg.changes.setdefault("propdistribution", {})
2606 self.pkg.changes["propdistribution"][addsuite] = 1
2610 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2612 ################################################################################
2613 def check_binary_against_db(self, filename, session):
2614 # Ensure version is sane
2615 self.cross_suite_version_check( \
2616 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2617 self.pkg.files[filename]["architecture"], session),
2618 filename, self.pkg.files[filename]["version"], sourceful=False)
2620 # Check for any existing copies of the file
2621 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2622 q = q.filter_by(version=self.pkg.files[filename]["version"])
2623 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2626 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2628 ################################################################################
2630 def check_source_against_db(self, filename, session):
2631 source = self.pkg.dsc.get("source")
2632 version = self.pkg.dsc.get("version")
2634 # Ensure version is sane
2635 self.cross_suite_version_check( \
2636 get_suite_version_by_source(source, session), filename, version,
2639 ################################################################################
2640 def check_dsc_against_db(self, filename, session):
2643 @warning: NB: this function can remove entries from the 'files' index [if
2644 the orig tarball is a duplicate of the one in the archive]; if
2645 you're iterating over 'files' and call this function as part of
2646 the loop, be sure to add a check to the top of the loop to
2647 ensure you haven't just tried to dereference the deleted entry.
2652 self.pkg.orig_files = {} # XXX: do we need to clear it?
2653 orig_files = self.pkg.orig_files
2655 # Try and find all files mentioned in the .dsc. This has
2656 # to work harder to cope with the multiple possible
2657 # locations of an .orig.tar.gz.
2658 # The ordering on the select is needed to pick the newest orig
2659 # when it exists in multiple places.
2660 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2662 if self.pkg.files.has_key(dsc_name):
2663 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2664 actual_size = int(self.pkg.files[dsc_name]["size"])
2665 found = "%s in incoming" % (dsc_name)
2667 # Check the file does not already exist in the archive
2668 ql = get_poolfile_like_name(dsc_name, session)
2670 # Strip out anything that isn't '%s' or '/%s$'
2672 if not i.filename.endswith(dsc_name):
2675 # "[dak] has not broken them. [dak] has fixed a
2676 # brokenness. Your crappy hack exploited a bug in
2679 # "(Come on! I thought it was always obvious that
2680 # one just doesn't release different files with
2681 # the same name and version.)"
2682 # -- ajk@ on d-devel@l.d.o
2685 # Ignore exact matches for .orig.tar.gz
2687 if re_is_orig_source.match(dsc_name):
2689 if self.pkg.files.has_key(dsc_name) and \
2690 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2691 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2692 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2693 # TODO: Don't delete the entry, just mark it as not needed
2694 # This would fix the stupidity of changing something we often iterate over
2695 # whilst we're doing it
2696 del self.pkg.files[dsc_name]
2697 dsc_entry["files id"] = i.file_id
2698 if not orig_files.has_key(dsc_name):
2699 orig_files[dsc_name] = {}
2700 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2703 # Don't bitch that we couldn't find this file later
2705 self.later_check_files.remove(dsc_name)
2711 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2713 elif re_is_orig_source.match(dsc_name):
2715 ql = get_poolfile_like_name(dsc_name, session)
2717 # Strip out anything that isn't '%s' or '/%s$'
2718 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2720 if not i.filename.endswith(dsc_name):
2724 # Unfortunately, we may get more than one match here if,
2725 # for example, the package was in potato but had an -sa
2726 # upload in woody. So we need to choose the right one.
2728 # default to something sane in case we don't match any or have only one
2733 old_file = os.path.join(i.location.path, i.filename)
2734 old_file_fh = utils.open_file(old_file)
2735 actual_md5 = apt_pkg.md5sum(old_file_fh)
2737 actual_size = os.stat(old_file)[stat.ST_SIZE]
2738 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2741 old_file = os.path.join(i.location.path, i.filename)
2742 old_file_fh = utils.open_file(old_file)
2743 actual_md5 = apt_pkg.md5sum(old_file_fh)
2745 actual_size = os.stat(old_file)[stat.ST_SIZE]
2747 suite_type = x.location.archive_type
2748 # need this for updating dsc_files in install()
2749 dsc_entry["files id"] = x.file_id
2750 # See install() in process-accepted...
2751 if not orig_files.has_key(dsc_name):
2752 orig_files[dsc_name] = {}
2753 orig_files[dsc_name]["id"] = x.file_id
2754 orig_files[dsc_name]["path"] = old_file
2755 orig_files[dsc_name]["location"] = x.location.location_id
2757 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2758 # Not there? Check the queue directories...
2759 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2760 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2762 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2763 if os.path.exists(in_otherdir):
2764 in_otherdir_fh = utils.open_file(in_otherdir)
2765 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2766 in_otherdir_fh.close()
2767 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2769 if not orig_files.has_key(dsc_name):
2770 orig_files[dsc_name] = {}
2771 orig_files[dsc_name]["path"] = in_otherdir
2774 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2777 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2779 if actual_md5 != dsc_entry["md5sum"]:
2780 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2781 if actual_size != int(dsc_entry["size"]):
2782 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2784 ################################################################################
2785 # This is used by process-new and process-holding to recheck a changes file
2786 # at the time we're running. It mainly wraps various other internal functions
2787 # and is similar to accepted_checks - these should probably be tidied up
2789 def recheck(self, session):
2791 for f in self.pkg.files.keys():
2792 # The .orig.tar.gz can disappear out from under us is it's a
2793 # duplicate of one in the archive.
2794 if not self.pkg.files.has_key(f):
2797 entry = self.pkg.files[f]
2799 # Check that the source still exists
2800 if entry["type"] == "deb":
2801 source_version = entry["source version"]
2802 source_package = entry["source package"]
2803 if not self.pkg.changes["architecture"].has_key("source") \
2804 and not source_exists(source_package, source_version, \
2805 suites = self.pkg.changes["distribution"].keys(), session = session):
2806 source_epochless_version = re_no_epoch.sub('', source_version)
2807 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2809 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2810 if cnf.has_key("Dir::Queue::%s" % (q)):
2811 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2814 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2816 # Version and file overwrite checks
2817 if entry["type"] == "deb":
2818 self.check_binary_against_db(f, session)
2819 elif entry["type"] == "dsc":
2820 self.check_source_against_db(f, session)
2821 self.check_dsc_against_db(f, session)
2823 ################################################################################
2824 def accepted_checks(self, overwrite_checks, session):
2825 # Recheck anything that relies on the database; since that's not
2826 # frozen between accept and our run time when called from p-a.
2828 # overwrite_checks is set to False when installing to stable/oldstable
2833 # Find the .dsc (again)
2835 for f in self.pkg.files.keys():
2836 if self.pkg.files[f]["type"] == "dsc":
2839 for checkfile in self.pkg.files.keys():
2840 # The .orig.tar.gz can disappear out from under us is it's a
2841 # duplicate of one in the archive.
2842 if not self.pkg.files.has_key(checkfile):
2845 entry = self.pkg.files[checkfile]
2847 # Check that the source still exists
2848 if entry["type"] == "deb":
2849 source_version = entry["source version"]
2850 source_package = entry["source package"]
2851 if not self.pkg.changes["architecture"].has_key("source") \
2852 and not source_exists(source_package, source_version, \
2853 suites = self.pkg.changes["distribution"].keys(), \
2855 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2857 # Version and file overwrite checks
2858 if overwrite_checks:
2859 if entry["type"] == "deb":
2860 self.check_binary_against_db(checkfile, session)
2861 elif entry["type"] == "dsc":
2862 self.check_source_against_db(checkfile, session)
2863 self.check_dsc_against_db(dsc_filename, session)
2865 # propogate in the case it is in the override tables:
2866 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2867 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2868 propogate[suite] = 1
2870 nopropogate[suite] = 1
2872 for suite in propogate.keys():
2873 if suite in nopropogate:
2875 self.pkg.changes["distribution"][suite] = 1
2877 for checkfile in self.pkg.files.keys():
2878 # Check the package is still in the override tables
2879 for suite in self.pkg.changes["distribution"].keys():
2880 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2881 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2883 ################################################################################
2884 # If any file of an upload has a recent mtime then chances are good
2885 # the file is still being uploaded.
2887 def upload_too_new(self):
2890 # Move back to the original directory to get accurate time stamps
2892 os.chdir(self.pkg.directory)
2893 file_list = self.pkg.files.keys()
2894 file_list.extend(self.pkg.dsc_files.keys())
2895 file_list.append(self.pkg.changes_file)
2898 last_modified = time.time()-os.path.getmtime(f)
2899 if last_modified < int(cnf["Dinstall::SkipTime"]):
2908 def store_changelog(self):
2910 # Skip binary-only upload if it is not a bin-NMU
2911 if not self.pkg.changes['architecture'].has_key('source'):
2912 from daklib.regexes import re_bin_only_nmu
2913 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2916 session = DBConn().session()
2918 # Check if upload already has a changelog entry
2919 query = """SELECT changelog_id FROM changes WHERE source = :source
2920 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2921 if session.execute(query, {'source': self.pkg.changes['source'], \
2922 'version': self.pkg.changes['version'], \
2923 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2927 # Add current changelog text into changelogs_text table, return created ID
2928 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2929 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2931 # Link ID to the upload available in changes table
2932 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2933 AND version = :version AND architecture = :architecture"""
2934 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2935 'version': self.pkg.changes['version'], \
2936 'architecture': " ".join(self.pkg.changes['architecture'].keys())})