5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 # suppress some deprecation warnings in squeeze related to apt_pkg
62 warnings.filterwarnings('ignore', \
63 "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
65 warnings.filterwarnings('ignore', \
66 "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
69 ###############################################################################
71 def get_type(f, session):
73 Get the file type of C{f}
76 @param f: file entry from Changes object
78 @type session: SQLA Session
79 @param session: SQL Alchemy session object
86 if f.has_key("dbtype"):
87 file_type = f["dbtype"]
88 elif re_source_ext.match(f["type"]):
90 elif f['architecture'] == 'source' and f["type"] == 'unreadable':
91 utils.warn('unreadable source file (will continue and hope for the best)')
95 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
97 # Validate the override type
98 type_id = get_override_type(file_type, session)
100 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
104 ################################################################################
106 # Determine what parts in a .changes are NEW
108 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = {}):
110 Determine what parts in a C{changes} file are NEW.
113 @param filename: changes filename
115 @type changes: Upload.Pkg.changes dict
116 @param changes: Changes dictionary
118 @type files: Upload.Pkg.files dict
119 @param files: Files dictionary
122 @param warn: Warn if overrides are added for (old)stable
124 @type dsc: Upload.Pkg.dsc dict
125 @param dsc: (optional); Dsc dictionary
128 @param new: new packages as returned by a previous call to this function, but override information may have changed
131 @return: dictionary of NEW components.
134 # TODO: This should all use the database instead of parsing the changes
138 dbchg = get_dbchange(filename, session)
140 print "Warning: cannot find changes file in database; won't check byhand"
142 # Try to get the Package-Set field from an included .dsc file (if possible).
144 for package, entry in build_package_set(dsc, session).items():
145 if not new.has_key(package):
148 # Build up a list of potentially new things
149 for name, f in files.items():
150 # Keep a record of byhand elements
151 if f["section"] == "byhand":
156 priority = f["priority"]
157 section = f["section"]
158 file_type = get_type(f, session)
159 component = f["component"]
161 if file_type == "dsc":
164 if not new.has_key(pkg):
166 new[pkg]["priority"] = priority
167 new[pkg]["section"] = section
168 new[pkg]["type"] = file_type
169 new[pkg]["component"] = component
170 new[pkg]["files"] = []
172 old_type = new[pkg]["type"]
173 if old_type != file_type:
174 # source gets trumped by deb or udeb
175 if old_type == "dsc":
176 new[pkg]["priority"] = priority
177 new[pkg]["section"] = section
178 new[pkg]["type"] = file_type
179 new[pkg]["component"] = component
181 new[pkg]["files"].append(name)
183 if f.has_key("othercomponents"):
184 new[pkg]["othercomponents"] = f["othercomponents"]
186 # Fix up the list of target suites
188 for suite in changes["suite"].keys():
189 oldsuite = get_suite(suite, session)
191 print "WARNING: Invalid suite %s found" % suite
194 if oldsuite.overridesuite:
195 newsuite = get_suite(oldsuite.overridesuite, session)
198 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
199 oldsuite.overridesuite, suite)
200 del changes["suite"][suite]
201 changes["suite"][oldsuite.overridesuite] = 1
203 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
204 oldsuite.overridesuite, suite)
206 # Check for unprocessed byhand files
207 if dbchg is not None:
208 for b in byhand.keys():
209 # Find the file entry in the database
211 for f in dbchg.files:
214 # If it's processed, we can ignore it
220 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
222 # Check for new stuff
223 for suite in changes["suite"].keys():
224 for pkg in new.keys():
225 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
227 for file_entry in new[pkg]["files"]:
228 if files[file_entry].has_key("new"):
229 del files[file_entry]["new"]
233 for s in ['stable', 'oldstable']:
234 if changes["suite"].has_key(s):
235 print "WARNING: overrides will be added for %s!" % s
236 for pkg in new.keys():
237 if new[pkg].has_key("othercomponents"):
238 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
242 ################################################################################
244 def check_valid(new, session = None):
246 Check if section and priority for NEW packages exist in database.
247 Additionally does sanity checks:
248 - debian-installer packages have to be udeb (or source)
249 - non debian-installer packages can not be udeb
250 - source priority can only be assigned to dsc file types
253 @param new: Dict of new packages with their section, priority and type.
256 for pkg in new.keys():
257 section_name = new[pkg]["section"]
258 priority_name = new[pkg]["priority"]
259 file_type = new[pkg]["type"]
261 section = get_section(section_name, session)
263 new[pkg]["section id"] = -1
265 new[pkg]["section id"] = section.section_id
267 priority = get_priority(priority_name, session)
269 new[pkg]["priority id"] = -1
271 new[pkg]["priority id"] = priority.priority_id
274 di = section_name.find("debian-installer") != -1
276 # If d-i, we must be udeb and vice-versa
277 if (di and file_type not in ("udeb", "dsc")) or \
278 (not di and file_type == "udeb"):
279 new[pkg]["section id"] = -1
281 # If dsc we need to be source and vice-versa
282 if (priority == "source" and file_type != "dsc") or \
283 (priority != "source" and file_type == "dsc"):
284 new[pkg]["priority id"] = -1
286 ###############################################################################
288 # Used by Upload.check_timestamps
289 class TarTime(object):
290 def __init__(self, future_cutoff, past_cutoff):
292 self.future_cutoff = future_cutoff
293 self.past_cutoff = past_cutoff
296 self.future_files = {}
297 self.ancient_files = {}
299 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
300 if MTime > self.future_cutoff:
301 self.future_files[Name] = MTime
302 if MTime < self.past_cutoff:
303 self.ancient_files[Name] = MTime
305 ###############################################################################
307 def prod_maintainer(notes, upload):
310 # Here we prepare an editor and get them ready to prod...
311 (fd, temp_filename) = utils.temp_filename()
312 temp_file = os.fdopen(fd, 'w')
314 temp_file.write(note.comment)
316 editor = os.environ.get("EDITOR","vi")
319 os.system("%s %s" % (editor, temp_filename))
320 temp_fh = utils.open_file(temp_filename)
321 prod_message = "".join(temp_fh.readlines())
323 print "Prod message:"
324 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
325 prompt = "[P]rod, Edit, Abandon, Quit ?"
327 while prompt.find(answer) == -1:
328 answer = utils.our_raw_input(prompt)
329 m = re_default_answer.search(prompt)
332 answer = answer[:1].upper()
333 os.unlink(temp_filename)
339 # Otherwise, do the proding...
340 user_email_address = utils.whoami() + " <%s>" % (
341 cnf["Dinstall::MyAdminAddress"])
345 Subst["__FROM_ADDRESS__"] = user_email_address
346 Subst["__PROD_MESSAGE__"] = prod_message
347 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
349 prod_mail_message = utils.TemplateSubst(
350 Subst,cnf["Dir::Templates"]+"/process-new.prod")
353 utils.send_mail(prod_mail_message)
355 print "Sent prodding message"
357 ################################################################################
359 def edit_note(note, upload, session, trainee=False):
360 # Write the current data to a temporary file
361 (fd, temp_filename) = utils.temp_filename()
362 editor = os.environ.get("EDITOR","vi")
365 os.system("%s %s" % (editor, temp_filename))
366 temp_file = utils.open_file(temp_filename)
367 newnote = temp_file.read().rstrip()
370 print utils.prefix_multi_line_string(newnote," ")
371 prompt = "[D]one, Edit, Abandon, Quit ?"
373 while prompt.find(answer) == -1:
374 answer = utils.our_raw_input(prompt)
375 m = re_default_answer.search(prompt)
378 answer = answer[:1].upper()
379 os.unlink(temp_filename)
386 comment = NewComment()
387 comment.package = upload.pkg.changes["source"]
388 comment.version = upload.pkg.changes["version"]
389 comment.comment = newnote
390 comment.author = utils.whoami()
391 comment.trainee = trainee
395 ###############################################################################
397 # suite names DMs can upload to
398 dm_suites = ['unstable', 'experimental']
400 def get_newest_source(source, session):
401 'returns the newest DBSource object in dm_suites'
402 ## the most recent version of the package uploaded to unstable or
403 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
404 ## section of its control file
405 q = session.query(DBSource).filter_by(source = source). \
406 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
407 order_by(desc('source.version'))
410 def get_suite_version_by_source(source, session):
411 'returns a list of tuples (suite_name, version) for source package'
412 q = session.query(Suite.suite_name, DBSource.version). \
413 join(Suite.sources).filter_by(source = source)
416 def get_source_by_package_and_suite(package, suite_name, session):
418 returns a DBSource query filtered by DBBinary.package and this package's
421 return session.query(DBSource). \
422 join(DBSource.binaries).filter_by(package = package). \
423 join(DBBinary.suites).filter_by(suite_name = suite_name)
425 def get_suite_version_by_package(package, arch_string, session):
427 returns a list of tuples (suite_name, version) for binary package and
430 return session.query(Suite.suite_name, DBBinary.version). \
431 join(Suite.binaries).filter_by(package = package). \
432 join(DBBinary.architecture). \
433 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
435 class Upload(object):
437 Everything that has to do with an upload processed.
445 ###########################################################################
448 """ Reset a number of internal variables."""
450 # Initialize the substitution template map
453 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
454 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
455 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
456 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
462 self.later_check_files = []
466 def package_info(self):
468 Format various messages from this Upload to send to the maintainer.
472 ('Reject Reasons', self.rejects),
473 ('Warnings', self.warnings),
474 ('Notes', self.notes),
478 for title, messages in msgs:
480 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
485 ###########################################################################
486 def update_subst(self):
487 """ Set up the per-package template substitution mappings """
491 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
492 if not self.pkg.changes.has_key("architecture") or not \
493 isinstance(self.pkg.changes["architecture"], dict):
494 self.pkg.changes["architecture"] = { "Unknown" : "" }
496 # and maintainer2047 may not exist.
497 if not self.pkg.changes.has_key("maintainer2047"):
498 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
500 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
501 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
502 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
504 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
505 if self.pkg.changes["architecture"].has_key("source") and \
506 self.pkg.changes["changedby822"] != "" and \
507 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
509 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
510 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
511 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
513 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
514 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
515 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
517 # Process policy doesn't set the fingerprint field and I don't want to make it
518 # do it for now as I don't want to have to deal with the case where we accepted
519 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
520 # the meantime so the package will be remarked as rejectable. Urgh.
521 # TODO: Fix this properly
522 if self.pkg.changes.has_key('fingerprint'):
523 session = DBConn().session()
524 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
525 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
526 if self.pkg.changes.has_key("sponsoremail"):
527 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
530 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
531 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
533 # Apply any global override of the Maintainer field
534 if cnf.get("Dinstall::OverrideMaintainer"):
535 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
536 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
538 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
539 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
540 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
541 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
543 ###########################################################################
544 def load_changes(self, filename):
546 Load a changes file and setup a dictionary around it. Also checks for mandantory
549 @type filename: string
550 @param filename: Changes filename, full path.
553 @return: whether the changes file was valid or not. We may want to
554 reject even if this is True (see what gets put in self.rejects).
555 This is simply to prevent us even trying things later which will
556 fail because we couldn't properly parse the file.
559 self.pkg.changes_file = filename
561 # Parse the .changes field into a dictionary
563 self.pkg.changes.update(parse_changes(filename))
564 except CantOpenError:
565 self.rejects.append("%s: can't read file." % (filename))
567 except ParseChangesError, line:
568 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
570 except ChangesUnicodeError:
571 self.rejects.append("%s: changes file not proper utf-8" % (filename))
574 # Parse the Files field from the .changes into another dictionary
576 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
577 except ParseChangesError, line:
578 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
580 except UnknownFormatError, format:
581 self.rejects.append("%s: unknown format '%s'." % (filename, format))
584 # Check for mandatory fields
585 for i in ("distribution", "source", "binary", "architecture",
586 "version", "maintainer", "files", "changes", "description"):
587 if not self.pkg.changes.has_key(i):
588 # Avoid undefined errors later
589 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
592 # Strip a source version in brackets from the source field
593 if re_strip_srcver.search(self.pkg.changes["source"]):
594 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
596 # Ensure the source field is a valid package name.
597 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
598 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
600 # Split multi-value fields into a lower-level dictionary
601 for i in ("architecture", "distribution", "binary", "closes"):
602 o = self.pkg.changes.get(i, "")
604 del self.pkg.changes[i]
606 self.pkg.changes[i] = {}
609 self.pkg.changes[i][j] = 1
611 # Fix the Maintainer: field to be RFC822/2047 compatible
613 (self.pkg.changes["maintainer822"],
614 self.pkg.changes["maintainer2047"],
615 self.pkg.changes["maintainername"],
616 self.pkg.changes["maintaineremail"]) = \
617 fix_maintainer (self.pkg.changes["maintainer"])
618 except ParseMaintError, msg:
619 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
620 % (filename, self.pkg.changes["maintainer"], msg))
622 # ...likewise for the Changed-By: field if it exists.
624 (self.pkg.changes["changedby822"],
625 self.pkg.changes["changedby2047"],
626 self.pkg.changes["changedbyname"],
627 self.pkg.changes["changedbyemail"]) = \
628 fix_maintainer (self.pkg.changes.get("changed-by", ""))
629 except ParseMaintError, msg:
630 self.pkg.changes["changedby822"] = ""
631 self.pkg.changes["changedby2047"] = ""
632 self.pkg.changes["changedbyname"] = ""
633 self.pkg.changes["changedbyemail"] = ""
635 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
636 % (filename, self.pkg.changes["changed-by"], msg))
638 # Ensure all the values in Closes: are numbers
639 if self.pkg.changes.has_key("closes"):
640 for i in self.pkg.changes["closes"].keys():
641 if re_isanum.match (i) == None:
642 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
644 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
645 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
646 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
648 # Check the .changes is non-empty
649 if not self.pkg.files:
650 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
653 # Changes was syntactically valid even if we'll reject
656 ###########################################################################
658 def check_distributions(self):
659 "Check and map the Distribution field"
663 # Handle suite mappings
664 for m in Cnf.ValueList("SuiteMappings"):
667 if mtype == "map" or mtype == "silent-map":
668 (source, dest) = args[1:3]
669 if self.pkg.changes["distribution"].has_key(source):
670 del self.pkg.changes["distribution"][source]
671 self.pkg.changes["distribution"][dest] = 1
672 if mtype != "silent-map":
673 self.notes.append("Mapping %s to %s." % (source, dest))
674 if self.pkg.changes.has_key("distribution-version"):
675 if self.pkg.changes["distribution-version"].has_key(source):
676 self.pkg.changes["distribution-version"][source]=dest
677 elif mtype == "map-unreleased":
678 (source, dest) = args[1:3]
679 if self.pkg.changes["distribution"].has_key(source):
680 for arch in self.pkg.changes["architecture"].keys():
681 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
682 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
683 del self.pkg.changes["distribution"][source]
684 self.pkg.changes["distribution"][dest] = 1
686 elif mtype == "ignore":
688 if self.pkg.changes["distribution"].has_key(suite):
689 del self.pkg.changes["distribution"][suite]
690 self.warnings.append("Ignoring %s as a target suite." % (suite))
691 elif mtype == "reject":
693 if self.pkg.changes["distribution"].has_key(suite):
694 self.rejects.append("Uploads to %s are not accepted." % (suite))
695 elif mtype == "propup-version":
696 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
698 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
699 if self.pkg.changes["distribution"].has_key(args[1]):
700 self.pkg.changes.setdefault("distribution-version", {})
701 for suite in args[2:]:
702 self.pkg.changes["distribution-version"][suite] = suite
704 # Ensure there is (still) a target distribution
705 if len(self.pkg.changes["distribution"].keys()) < 1:
706 self.rejects.append("No valid distribution remaining.")
708 # Ensure target distributions exist
709 for suite in self.pkg.changes["distribution"].keys():
710 if not Cnf.has_key("Suite::%s" % (suite)):
711 self.rejects.append("Unknown distribution `%s'." % (suite))
713 ###########################################################################
715 def binary_file_checks(self, f, session):
717 entry = self.pkg.files[f]
719 # Extract package control information
720 deb_file = utils.open_file(f)
722 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
724 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
726 # Can't continue, none of the checks on control would work.
729 # Check for mandantory "Description:"
732 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
734 self.rejects.append("%s: Missing Description in binary package" % (f))
739 # Check for mandatory fields
740 for field in [ "Package", "Architecture", "Version" ]:
741 if control.Find(field) == None:
743 self.rejects.append("%s: No %s field in control." % (f, field))
746 # Ensure the package name matches the one give in the .changes
747 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
748 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
750 # Validate the package field
751 package = control.Find("Package")
752 if not re_valid_pkg_name.match(package):
753 self.rejects.append("%s: invalid package name '%s'." % (f, package))
755 # Validate the version field
756 version = control.Find("Version")
757 if not re_valid_version.match(version):
758 self.rejects.append("%s: invalid version number '%s'." % (f, version))
760 # Ensure the architecture of the .deb is one we know about.
761 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
762 architecture = control.Find("Architecture")
763 upload_suite = self.pkg.changes["distribution"].keys()[0]
765 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
766 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
767 self.rejects.append("Unknown architecture '%s'." % (architecture))
769 # Ensure the architecture of the .deb is one of the ones
770 # listed in the .changes.
771 if not self.pkg.changes["architecture"].has_key(architecture):
772 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
774 # Sanity-check the Depends field
775 depends = control.Find("Depends")
777 self.rejects.append("%s: Depends field is empty." % (f))
779 # Sanity-check the Provides field
780 provides = control.Find("Provides")
782 provide = re_spacestrip.sub('', provides)
784 self.rejects.append("%s: Provides field is empty." % (f))
785 prov_list = provide.split(",")
786 for prov in prov_list:
787 if not re_valid_pkg_name.match(prov):
788 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
790 # If there is a Built-Using field, we need to check we can find the
791 # exact source version
792 built_using = control.Find("Built-Using")
795 entry["built-using"] = []
796 for dep in apt_pkg.parse_depends(built_using):
797 bu_s, bu_v, bu_e = dep[0]
798 # Check that it's an exact match dependency and we have
799 # some form of version
800 if bu_e != "=" or len(bu_v) < 1:
801 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
803 # Find the source id for this version
804 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
806 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
808 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
810 except ValueError, e:
811 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
814 # Check the section & priority match those given in the .changes (non-fatal)
815 if control.Find("Section") and entry["section"] != "" \
816 and entry["section"] != control.Find("Section"):
817 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
818 (f, control.Find("Section", ""), entry["section"]))
819 if control.Find("Priority") and entry["priority"] != "" \
820 and entry["priority"] != control.Find("Priority"):
821 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
822 (f, control.Find("Priority", ""), entry["priority"]))
824 entry["package"] = package
825 entry["architecture"] = architecture
826 entry["version"] = version
827 entry["maintainer"] = control.Find("Maintainer", "")
829 if f.endswith(".udeb"):
830 self.pkg.files[f]["dbtype"] = "udeb"
831 elif f.endswith(".deb"):
832 self.pkg.files[f]["dbtype"] = "deb"
834 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
836 entry["source"] = control.Find("Source", entry["package"])
838 # Get the source version
839 source = entry["source"]
842 if source.find("(") != -1:
843 m = re_extract_src_version.match(source)
845 source_version = m.group(2)
847 if not source_version:
848 source_version = self.pkg.files[f]["version"]
850 entry["source package"] = source
851 entry["source version"] = source_version
853 # Ensure the filename matches the contents of the .deb
854 m = re_isadeb.match(f)
857 file_package = m.group(1)
858 if entry["package"] != file_package:
859 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
860 (f, file_package, entry["dbtype"], entry["package"]))
861 epochless_version = re_no_epoch.sub('', control.Find("Version"))
864 file_version = m.group(2)
865 if epochless_version != file_version:
866 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
867 (f, file_version, entry["dbtype"], epochless_version))
870 file_architecture = m.group(3)
871 if entry["architecture"] != file_architecture:
872 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
873 (f, file_architecture, entry["dbtype"], entry["architecture"]))
875 # Check for existent source
876 source_version = entry["source version"]
877 source_package = entry["source package"]
878 if self.pkg.changes["architecture"].has_key("source"):
879 if source_version != self.pkg.changes["version"]:
880 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
881 (source_version, f, self.pkg.changes["version"]))
883 # Check in the SQL database
884 if not source_exists(source_package, source_version, suites = \
885 self.pkg.changes["distribution"].keys(), session = session):
886 # Check in one of the other directories
887 source_epochless_version = re_no_epoch.sub('', source_version)
888 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
889 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
891 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
894 dsc_file_exists = False
895 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
896 if cnf.has_key("Dir::Queue::%s" % (myq)):
897 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
898 dsc_file_exists = True
901 if not dsc_file_exists:
902 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
904 # Check the version and for file overwrites
905 self.check_binary_against_db(f, session)
907 def source_file_checks(self, f, session):
908 entry = self.pkg.files[f]
910 m = re_issource.match(f)
914 entry["package"] = m.group(1)
915 entry["version"] = m.group(2)
916 entry["type"] = m.group(3)
918 # Ensure the source package name matches the Source filed in the .changes
919 if self.pkg.changes["source"] != entry["package"]:
920 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
922 # Ensure the source version matches the version in the .changes file
923 if re_is_orig_source.match(f):
924 changes_version = self.pkg.changes["chopversion2"]
926 changes_version = self.pkg.changes["chopversion"]
928 if changes_version != entry["version"]:
929 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
931 # Ensure the .changes lists source in the Architecture field
932 if not self.pkg.changes["architecture"].has_key("source"):
933 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
935 # Check the signature of a .dsc file
936 if entry["type"] == "dsc":
937 # check_signature returns either:
938 # (None, [list, of, rejects]) or (signature, [])
939 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
941 self.rejects.append(j)
943 entry["architecture"] = "source"
945 def per_suite_file_checks(self, f, suite, session):
947 entry = self.pkg.files[f]
950 if entry.has_key("byhand"):
953 # Check we have fields we need to do these checks
955 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
956 if not entry.has_key(m):
957 self.rejects.append("file '%s' does not have field %s set" % (f, m))
963 # Handle component mappings
964 for m in cnf.ValueList("ComponentMappings"):
965 (source, dest) = m.split()
966 if entry["component"] == source:
967 entry["original component"] = source
968 entry["component"] = dest
970 # Ensure the component is valid for the target suite
971 if cnf.has_key("Suite:%s::Components" % (suite)) and \
972 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
973 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
976 # Validate the component
977 if not get_component(entry["component"], session):
978 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
981 # See if the package is NEW
982 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
985 # Validate the priority
986 if entry["priority"].find('/') != -1:
987 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
989 # Determine the location
990 location = cnf["Dir::Pool"]
991 l = get_location(location, entry["component"], session=session)
993 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
994 entry["location id"] = -1
996 entry["location id"] = l.location_id
998 # Check the md5sum & size against existing files (if any)
999 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
1001 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1002 entry["size"], entry["md5sum"], entry["location id"])
1005 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1006 elif found is False and poolfile is not None:
1007 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1009 if poolfile is None:
1010 entry["files id"] = None
1012 entry["files id"] = poolfile.file_id
1014 # Check for packages that have moved from one component to another
1015 entry['suite'] = suite
1016 arch_list = [entry["architecture"], 'all']
1017 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1018 [suite], arch_list = arch_list, session = session)
1019 if component is not None:
1020 entry["othercomponents"] = component
1022 def check_files(self, action=True):
1023 file_keys = self.pkg.files.keys()
1029 os.chdir(self.pkg.directory)
1031 ret = holding.copy_to_holding(f)
1033 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1037 # check we already know the changes file
1038 # [NB: this check must be done post-suite mapping]
1039 base_filename = os.path.basename(self.pkg.changes_file)
1041 session = DBConn().session()
1044 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1045 # if in the pool or in a queue other than unchecked, reject
1046 if (dbc.in_queue is None) \
1047 or (dbc.in_queue is not None
1048 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1049 self.rejects.append("%s file already known to dak" % base_filename)
1050 except NoResultFound, e:
1054 has_binaries = False
1057 for f, entry in self.pkg.files.items():
1058 # Ensure the file does not already exist in one of the accepted directories
1059 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1060 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1061 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1062 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1064 if not re_taint_free.match(f):
1065 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1067 # Check the file is readable
1068 if os.access(f, os.R_OK) == 0:
1069 # When running in -n, copy_to_holding() won't have
1070 # generated the reject_message, so we need to.
1072 if os.path.exists(f):
1073 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1075 # Don't directly reject, mark to check later to deal with orig's
1076 # we can find in the pool
1077 self.later_check_files.append(f)
1078 entry["type"] = "unreadable"
1081 # If it's byhand skip remaining checks
1082 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1084 entry["type"] = "byhand"
1086 # Checks for a binary package...
1087 elif re_isadeb.match(f):
1089 entry["type"] = "deb"
1091 # This routine appends to self.rejects/warnings as appropriate
1092 self.binary_file_checks(f, session)
1094 # Checks for a source package...
1095 elif re_issource.match(f):
1098 # This routine appends to self.rejects/warnings as appropriate
1099 self.source_file_checks(f, session)
1101 # Not a binary or source package? Assume byhand...
1104 entry["type"] = "byhand"
1106 # Per-suite file checks
1107 entry["oldfiles"] = {}
1108 for suite in self.pkg.changes["distribution"].keys():
1109 self.per_suite_file_checks(f, suite, session)
1113 # If the .changes file says it has source, it must have source.
1114 if self.pkg.changes["architecture"].has_key("source"):
1116 self.rejects.append("no source found and Architecture line in changes mention source.")
1118 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1119 self.rejects.append("source only uploads are not supported.")
1121 ###########################################################################
1123 def __dsc_filename(self):
1125 Returns: (Status, Dsc_Filename)
1127 Status: Boolean; True when there was no error, False otherwise
1128 Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1133 for name, entry in self.pkg.files.items():
1134 if entry.has_key("type") and entry["type"] == "dsc":
1136 return False, "cannot process a .changes file with multiple .dsc's."
1140 if not dsc_filename:
1141 return False, "source uploads must contain a dsc file"
1143 return True, dsc_filename
1145 def load_dsc(self, action=True, signing_rules=1):
1147 Find and load the dsc from self.pkg.files into self.dsc
1149 Returns: (Status, Reason)
1151 Status: Boolean; True when there was no error, False otherwise
1152 Reason: String; When Status is False this describes the error
1156 (status, dsc_filename) = self.__dsc_filename()
1158 # If status is false, dsc_filename has the reason
1159 return False, dsc_filename
1162 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1163 except CantOpenError:
1165 return False, "%s: can't read file." % (dsc_filename)
1166 except ParseChangesError, line:
1167 return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1168 except InvalidDscError, line:
1169 return False, "%s: syntax error on line %s." % (dsc_filename, line)
1170 except ChangesUnicodeError:
1171 return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1175 ###########################################################################
1177 def check_dsc(self, action=True, session=None):
1178 """Returns bool indicating whether or not the source changes are valid"""
1179 # Ensure there is source to check
1180 if not self.pkg.changes["architecture"].has_key("source"):
1183 (status, reason) = self.load_dsc(action=action)
1185 self.rejects.append(reason)
1187 (status, dsc_filename) = self.__dsc_filename()
1189 # If status is false, dsc_filename has the reason
1190 self.rejects.append(dsc_filename)
1193 # Build up the file list of files mentioned by the .dsc
1195 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1196 except NoFilesFieldError:
1197 self.rejects.append("%s: no Files: field." % (dsc_filename))
1199 except UnknownFormatError, format:
1200 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1202 except ParseChangesError, line:
1203 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1206 # Enforce mandatory fields
1207 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1208 if not self.pkg.dsc.has_key(i):
1209 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1212 # Validate the source and version fields
1213 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1214 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1215 if not re_valid_version.match(self.pkg.dsc["version"]):
1216 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1218 # Only a limited list of source formats are allowed in each suite
1219 for dist in self.pkg.changes["distribution"].keys():
1220 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1221 if self.pkg.dsc["format"] not in allowed:
1222 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1224 # Validate the Maintainer field
1226 # We ignore the return value
1227 fix_maintainer(self.pkg.dsc["maintainer"])
1228 except ParseMaintError, msg:
1229 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1230 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1232 # Validate the build-depends field(s)
1233 for field_name in [ "build-depends", "build-depends-indep" ]:
1234 field = self.pkg.dsc.get(field_name)
1236 # Have apt try to parse them...
1238 apt_pkg.ParseSrcDepends(field)
1240 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1242 # Ensure the version number in the .dsc matches the version number in the .changes
1243 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1244 changes_version = self.pkg.files[dsc_filename]["version"]
1246 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1247 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1249 # Ensure the Files field contain only what's expected
1250 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1252 # Ensure source is newer than existing source in target suites
1253 session = DBConn().session()
1254 self.check_source_against_db(dsc_filename, session)
1255 self.check_dsc_against_db(dsc_filename, session)
1257 dbchg = get_dbchange(self.pkg.changes_file, session)
1259 # Finally, check if we're missing any files
1260 for f in self.later_check_files:
1262 # Check if we've already processed this file if we have a dbchg object
1265 for pf in dbchg.files:
1266 if pf.filename == f and pf.processed:
1267 self.notes.append('%s was already processed so we can go ahead' % f)
1269 del self.pkg.files[f]
1271 self.rejects.append("Could not find file %s references in changes" % f)
1277 ###########################################################################
1279 def get_changelog_versions(self, source_dir):
1280 """Extracts a the source package and (optionally) grabs the
1281 version history out of debian/changelog for the BTS."""
1285 # Find the .dsc (again)
1287 for f in self.pkg.files.keys():
1288 if self.pkg.files[f]["type"] == "dsc":
1291 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1292 if not dsc_filename:
1295 # Create a symlink mirror of the source files in our temporary directory
1296 for f in self.pkg.files.keys():
1297 m = re_issource.match(f)
1299 src = os.path.join(source_dir, f)
1300 # If a file is missing for whatever reason, give up.
1301 if not os.path.exists(src):
1304 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1305 self.pkg.orig_files[f].has_key("path"):
1307 dest = os.path.join(os.getcwd(), f)
1308 os.symlink(src, dest)
1310 # If the orig files are not a part of the upload, create symlinks to the
1312 for orig_file in self.pkg.orig_files.keys():
1313 if not self.pkg.orig_files[orig_file].has_key("path"):
1315 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1316 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1318 # Extract the source
1320 unpacked = UnpackedSource(dsc_filename)
1322 self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1325 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1328 # Get the upstream version
1329 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1330 if re_strip_revision.search(upstr_version):
1331 upstr_version = re_strip_revision.sub('', upstr_version)
1333 # Ensure the changelog file exists
1334 changelog_file = unpacked.get_changelog_file()
1335 if changelog_file is None:
1336 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1339 # Parse the changelog
1340 self.pkg.dsc["bts changelog"] = ""
1341 for line in changelog_file.readlines():
1342 m = re_changelog_versions.match(line)
1344 self.pkg.dsc["bts changelog"] += line
1345 changelog_file.close()
1348 # Check we found at least one revision in the changelog
1349 if not self.pkg.dsc["bts changelog"]:
1350 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1352 def check_source(self):
1354 # a) there's no source
1355 if not self.pkg.changes["architecture"].has_key("source"):
1358 tmpdir = utils.temp_dirname()
1360 # Move into the temporary directory
1364 # Get the changelog version history
1365 self.get_changelog_versions(cwd)
1367 # Move back and cleanup the temporary tree
1371 shutil.rmtree(tmpdir)
1373 if e.errno != errno.EACCES:
1375 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1377 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1378 # We probably have u-r or u-w directories so chmod everything
1380 cmd = "chmod -R u+rwx %s" % (tmpdir)
1381 result = os.system(cmd)
1383 utils.fubar("'%s' failed with result %s." % (cmd, result))
1384 shutil.rmtree(tmpdir)
1385 except Exception, e:
1386 print "foobar2 (%s)" % e
1387 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1389 ###########################################################################
1390 def ensure_hashes(self):
1391 # Make sure we recognise the format of the Files: field in the .changes
1392 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1393 if len(format) == 2:
1394 format = int(format[0]), int(format[1])
1396 format = int(float(format[0])), 0
1398 # We need to deal with the original changes blob, as the fields we need
1399 # might not be in the changes dict serialised into the .dak anymore.
1400 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1402 # Copy the checksums over to the current changes dict. This will keep
1403 # the existing modifications to it intact.
1404 for field in orig_changes:
1405 if field.startswith('checksums-'):
1406 self.pkg.changes[field] = orig_changes[field]
1408 # Check for unsupported hashes
1409 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1410 self.rejects.append(j)
1412 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1413 self.rejects.append(j)
1415 # We have to calculate the hash if we have an earlier changes version than
1416 # the hash appears in rather than require it exist in the changes file
1417 for hashname, hashfunc, version in utils.known_hashes:
1418 # TODO: Move _ensure_changes_hash into this class
1419 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1420 self.rejects.append(j)
1421 if "source" in self.pkg.changes["architecture"]:
1422 # TODO: Move _ensure_dsc_hash into this class
1423 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1424 self.rejects.append(j)
1426 def check_hashes(self):
1427 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1428 self.rejects.append(m)
1430 for m in utils.check_size(".changes", self.pkg.files):
1431 self.rejects.append(m)
1433 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1434 self.rejects.append(m)
1436 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1437 self.rejects.append(m)
1439 self.ensure_hashes()
1441 ###########################################################################
1443 def ensure_orig(self, target_dir='.', session=None):
1445 Ensures that all orig files mentioned in the changes file are present
1446 in target_dir. If they do not exist, they are symlinked into place.
1448 An list containing the symlinks that were created are returned (so they
1455 for filename, entry in self.pkg.dsc_files.iteritems():
1456 if not re_is_orig_source.match(filename):
1457 # File is not an orig; ignore
1460 if os.path.exists(filename):
1461 # File exists, no need to continue
1464 def symlink_if_valid(path):
1465 f = utils.open_file(path)
1466 md5sum = apt_pkg.md5sum(f)
1469 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1470 expected = (int(entry['size']), entry['md5sum'])
1472 if fingerprint != expected:
1475 dest = os.path.join(target_dir, filename)
1477 os.symlink(path, dest)
1478 symlinked.append(dest)
1484 session_ = DBConn().session()
1489 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1490 poolfile_path = os.path.join(
1491 poolfile.location.path, poolfile.filename
1494 if symlink_if_valid(poolfile_path):
1504 # Look in some other queues for the file
1505 queues = ('New', 'Byhand', 'ProposedUpdates',
1506 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1508 for queue in queues:
1509 if not cnf.get('Dir::Queue::%s' % queue):
1512 queuefile_path = os.path.join(
1513 cnf['Dir::Queue::%s' % queue], filename
1516 if not os.path.exists(queuefile_path):
1517 # Does not exist in this queue
1520 if symlink_if_valid(queuefile_path):
1525 ###########################################################################
1527 def check_lintian(self):
1529 Extends self.rejects by checking the output of lintian against tags
1530 specified in Dinstall::LintianTags.
1535 # Don't reject binary uploads
1536 if not self.pkg.changes['architecture'].has_key('source'):
1539 # Only check some distributions
1540 for dist in ('unstable', 'experimental'):
1541 if dist in self.pkg.changes['distribution']:
1546 # If we do not have a tagfile, don't do anything
1547 tagfile = cnf.get("Dinstall::LintianTags")
1551 # Parse the yaml file
1552 sourcefile = file(tagfile, 'r')
1553 sourcecontent = sourcefile.read()
1557 lintiantags = yaml.load(sourcecontent)['lintian']
1558 except yaml.YAMLError, msg:
1559 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1562 # Try and find all orig mentioned in the .dsc
1563 symlinked = self.ensure_orig()
1565 # Setup the input file for lintian
1566 fd, temp_filename = utils.temp_filename()
1567 temptagfile = os.fdopen(fd, 'w')
1568 for tags in lintiantags.values():
1569 temptagfile.writelines(['%s\n' % x for x in tags])
1573 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1574 (temp_filename, self.pkg.changes_file)
1576 result, output = commands.getstatusoutput(cmd)
1578 # Remove our tempfile and any symlinks we created
1579 os.unlink(temp_filename)
1581 for symlink in symlinked:
1585 utils.warn("lintian failed for %s [return code: %s]." % \
1586 (self.pkg.changes_file, result))
1587 utils.warn(utils.prefix_multi_line_string(output, \
1588 " [possible output:] "))
1593 [self.pkg.changes_file, "check_lintian"] + list(txt)
1597 parsed_tags = parse_lintian_output(output)
1598 self.rejects.extend(
1599 generate_reject_messages(parsed_tags, lintiantags, log=log)
1602 ###########################################################################
1603 def check_urgency(self):
1605 if self.pkg.changes["architecture"].has_key("source"):
1606 if not self.pkg.changes.has_key("urgency"):
1607 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1608 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1609 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1610 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1611 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1612 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1614 ###########################################################################
1616 # Sanity check the time stamps of files inside debs.
1617 # [Files in the near future cause ugly warnings and extreme time
1618 # travel can cause errors on extraction]
1620 def check_timestamps(self):
1623 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1624 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1625 tar = TarTime(future_cutoff, past_cutoff)
1627 for filename, entry in self.pkg.files.items():
1628 if entry["type"] == "deb":
1631 deb_file = utils.open_file(filename)
1632 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1635 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1636 except SystemError, e:
1637 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1638 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1641 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1645 future_files = tar.future_files.keys()
1647 num_future_files = len(future_files)
1648 future_file = future_files[0]
1649 future_date = tar.future_files[future_file]
1650 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1651 % (filename, num_future_files, future_file, time.ctime(future_date)))
1653 ancient_files = tar.ancient_files.keys()
1655 num_ancient_files = len(ancient_files)
1656 ancient_file = ancient_files[0]
1657 ancient_date = tar.ancient_files[ancient_file]
1658 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1659 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1661 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1663 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1664 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1666 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1672 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1673 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1674 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1675 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1676 self.pkg.changes["sponsoremail"] = uid_email
1681 ###########################################################################
1682 # check_signed_by_key checks
1683 ###########################################################################
1685 def check_signed_by_key(self):
1686 """Ensure the .changes is signed by an authorized uploader."""
1687 session = DBConn().session()
1689 # First of all we check that the person has proper upload permissions
1690 # and that this upload isn't blocked
1691 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1694 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1697 # TODO: Check that import-keyring adds UIDs properly
1699 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1702 # Check that the fingerprint which uploaded has permission to do so
1703 self.check_upload_permissions(fpr, session)
1705 # Check that this package is not in a transition
1706 self.check_transition(session)
1711 def check_upload_permissions(self, fpr, session):
1712 # Check any one-off upload blocks
1713 self.check_upload_blocks(fpr, session)
1715 # If the source_acl is None, source is never allowed
1716 if fpr.source_acl is None:
1717 if self.pkg.changes["architecture"].has_key("source"):
1718 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1719 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1720 self.rejects.append(rej)
1722 # Do DM as a special case
1723 # DM is a special case unfortunately, so we check it first
1724 # (keys with no source access get more access than DMs in one
1725 # way; DMs can only upload for their packages whether source
1726 # or binary, whereas keys with no access might be able to
1727 # upload some binaries)
1728 elif fpr.source_acl.access_level == 'dm':
1729 self.check_dm_upload(fpr, session)
1731 # If not a DM, we allow full upload rights
1732 uid_email = "%s@debian.org" % (fpr.uid.uid)
1733 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1736 # Check binary upload permissions
1737 # By this point we know that DMs can't have got here unless they
1738 # are allowed to deal with the package concerned so just apply
1740 if fpr.binary_acl.access_level == 'full':
1743 # Otherwise we're in the map case
1744 tmparches = self.pkg.changes["architecture"].copy()
1745 tmparches.pop('source', None)
1747 for bam in fpr.binary_acl_map:
1748 tmparches.pop(bam.architecture.arch_string, None)
1750 if len(tmparches.keys()) > 0:
1751 if fpr.binary_reject:
1752 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1753 if len(tmparches.keys()) == 1:
1754 rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1756 rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1757 self.rejects.append(rej)
1759 # TODO: This is where we'll implement reject vs throw away binaries later
1760 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1761 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1762 rej += "\nFingerprint: %s", (fpr.fingerprint)
1763 self.rejects.append(rej)
1766 def check_upload_blocks(self, fpr, session):
1767 """Check whether any upload blocks apply to this source, source
1768 version, uid / fpr combination"""
1770 def block_rej_template(fb):
1771 rej = 'Manual upload block in place for package %s' % fb.source
1772 if fb.version is not None:
1773 rej += ', version %s' % fb.version
1776 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1777 # version is None if the block applies to all versions
1778 if fb.version is None or fb.version == self.pkg.changes['version']:
1779 # Check both fpr and uid - either is enough to cause a reject
1780 if fb.fpr is not None:
1781 if fb.fpr.fingerprint == fpr.fingerprint:
1782 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1783 if fb.uid is not None:
1784 if fb.uid == fpr.uid:
1785 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1788 def check_dm_upload(self, fpr, session):
1789 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1790 ## none of the uploaded packages are NEW
1792 for f in self.pkg.files.keys():
1793 if self.pkg.files[f].has_key("byhand"):
1794 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1796 if self.pkg.files[f].has_key("new"):
1797 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1803 r = get_newest_source(self.pkg.changes["source"], session)
1806 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1807 self.rejects.append(rej)
1810 if not r.dm_upload_allowed:
1811 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1812 self.rejects.append(rej)
1815 ## the Maintainer: field of the uploaded .changes file corresponds with
1816 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1818 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1819 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1821 ## the most recent version of the package uploaded to unstable or
1822 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1823 ## non-developer maintainers cannot NMU or hijack packages)
1825 # uploader includes the maintainer
1827 for uploader in r.uploaders:
1828 (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1829 # Eww - I hope we never have two people with the same name in Debian
1830 if email == fpr.uid.uid or name == fpr.uid.name:
1835 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1838 ## none of the packages are being taken over from other source packages
1839 for b in self.pkg.changes["binary"].keys():
1840 for suite in self.pkg.changes["distribution"].keys():
1841 for s in get_source_by_package_and_suite(b, suite, session):
1842 if s.source != self.pkg.changes["source"]:
1843 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1847 def check_transition(self, session):
1850 sourcepkg = self.pkg.changes["source"]
1852 # No sourceful upload -> no need to do anything else, direct return
1853 # We also work with unstable uploads, not experimental or those going to some
1854 # proposed-updates queue
1855 if "source" not in self.pkg.changes["architecture"] or \
1856 "unstable" not in self.pkg.changes["distribution"]:
1859 # Also only check if there is a file defined (and existant) with
1861 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1862 if transpath == "" or not os.path.exists(transpath):
1865 # Parse the yaml file
1866 sourcefile = file(transpath, 'r')
1867 sourcecontent = sourcefile.read()
1869 transitions = yaml.load(sourcecontent)
1870 except yaml.YAMLError, msg:
1871 # This shouldn't happen, there is a wrapper to edit the file which
1872 # checks it, but we prefer to be safe than ending up rejecting
1874 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1877 # Now look through all defined transitions
1878 for trans in transitions:
1879 t = transitions[trans]
1880 source = t["source"]
1883 # Will be None if nothing is in testing.
1884 current = get_source_in_suite(source, "testing", session)
1885 if current is not None:
1886 compare = apt_pkg.VersionCompare(current.version, expected)
1888 if current is None or compare < 0:
1889 # This is still valid, the current version in testing is older than
1890 # the new version we wait for, or there is none in testing yet
1892 # Check if the source we look at is affected by this.
1893 if sourcepkg in t['packages']:
1894 # The source is affected, lets reject it.
1896 rejectmsg = "%s: part of the %s transition.\n\n" % (
1899 if current is not None:
1900 currentlymsg = "at version %s" % (current.version)
1902 currentlymsg = "not present in testing"
1904 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1906 rejectmsg += "\n".join(textwrap.wrap("""Your package
1907 is part of a testing transition designed to get %s migrated (it is
1908 currently %s, we need version %s). This transition is managed by the
1909 Release Team, and %s is the Release-Team member responsible for it.
1910 Please mail debian-release@lists.debian.org or contact %s directly if you
1911 need further assistance. You might want to upload to experimental until this
1912 transition is done."""
1913 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1915 self.rejects.append(rejectmsg)
1918 ###########################################################################
1919 # End check_signed_by_key checks
1920 ###########################################################################
1922 def build_summaries(self):
1923 """ Build a summary of changes the upload introduces. """
1925 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1927 short_summary = summary
1929 # This is for direport's benefit...
1930 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1933 summary += "Changes: " + f
1935 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1937 summary += self.announce(short_summary, 0)
1939 return (summary, short_summary)
1941 ###########################################################################
1943 def close_bugs(self, summary, action):
1945 Send mail to close bugs as instructed by the closes field in the changes file.
1946 Also add a line to summary if any work was done.
1948 @type summary: string
1949 @param summary: summary text, as given by L{build_summaries}
1952 @param action: Set to false no real action will be done.
1955 @return: summary. If action was taken, extended by the list of closed bugs.
1959 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1961 bugs = self.pkg.changes["closes"].keys()
1967 summary += "Closing bugs: "
1969 summary += "%s " % (bug)
1972 self.Subst["__BUG_NUMBER__"] = bug
1973 if self.pkg.changes["distribution"].has_key("stable"):
1974 self.Subst["__STABLE_WARNING__"] = """
1975 Note that this package is not part of the released stable Debian
1976 distribution. It may have dependencies on other unreleased software,
1977 or other instabilities. Please take care if you wish to install it.
1978 The update will eventually make its way into the next released Debian
1981 self.Subst["__STABLE_WARNING__"] = ""
1982 mail_message = utils.TemplateSubst(self.Subst, template)
1983 utils.send_mail(mail_message)
1985 # Clear up after ourselves
1986 del self.Subst["__BUG_NUMBER__"]
1987 del self.Subst["__STABLE_WARNING__"]
1989 if action and self.logger:
1990 self.logger.log(["closing bugs"] + bugs)
1996 ###########################################################################
1998 def announce(self, short_summary, action):
2000 Send an announce mail about a new upload.
2002 @type short_summary: string
2003 @param short_summary: Short summary text to include in the mail
2006 @param action: Set to false no real action will be done.
2009 @return: Textstring about action taken.
2014 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2016 # Only do announcements for source uploads with a recent dpkg-dev installed
2017 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2018 self.pkg.changes["architecture"].has_key("source"):
2024 self.Subst["__SHORT_SUMMARY__"] = short_summary
2026 for dist in self.pkg.changes["distribution"].keys():
2027 suite = get_suite(dist)
2028 if suite is None: continue
2029 announce_list = suite.announce
2030 if announce_list == "" or lists_done.has_key(announce_list):
2033 lists_done[announce_list] = 1
2034 summary += "Announcing to %s\n" % (announce_list)
2038 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2039 if cnf.get("Dinstall::TrackingServer") and \
2040 self.pkg.changes["architecture"].has_key("source"):
2041 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2042 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2044 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2045 utils.send_mail(mail_message)
2047 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2049 if cnf.FindB("Dinstall::CloseBugs"):
2050 summary = self.close_bugs(summary, action)
2052 del self.Subst["__SHORT_SUMMARY__"]
2056 ###########################################################################
2058 def accept (self, summary, short_summary, session=None):
2062 This moves all files referenced from the .changes into the pool,
2063 sends the accepted mail, announces to lists, closes bugs and
2064 also checks for override disparities. If enabled it will write out
2065 the version history for the BTS Version Tracking and will finally call
2068 @type summary: string
2069 @param summary: Summary text
2071 @type short_summary: string
2072 @param short_summary: Short summary
2076 stats = SummaryStats()
2079 self.logger.log(["installing changes", self.pkg.changes_file])
2084 # Add the .dsc file to the DB first
2085 for newfile, entry in self.pkg.files.items():
2086 if entry["type"] == "dsc":
2087 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2091 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2092 for newfile, entry in self.pkg.files.items():
2093 if entry["type"] == "deb":
2094 b, pf = add_deb_to_db(self, newfile, session)
2096 poolfiles.append(pf)
2098 # If this is a sourceful diff only upload that is moving
2099 # cross-component we need to copy the .orig files into the new
2100 # component too for the same reasons as above.
2101 # XXX: mhy: I think this should be in add_dsc_to_db
2102 if self.pkg.changes["architecture"].has_key("source"):
2103 for orig_file in self.pkg.orig_files.keys():
2104 if not self.pkg.orig_files[orig_file].has_key("id"):
2105 continue # Skip if it's not in the pool
2106 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2107 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2108 continue # Skip if the location didn't change
2111 oldf = get_poolfile_by_id(orig_file_id, session)
2112 old_filename = os.path.join(oldf.location.path, oldf.filename)
2113 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2114 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2116 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2118 # TODO: Care about size/md5sum collisions etc
2119 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2121 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2123 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2124 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2128 # Don't reference the old file from this changes
2130 if p.file_id == oldf.file_id:
2133 poolfiles.append(newf)
2135 # Fix up the DSC references
2138 for df in source.srcfiles:
2139 if df.poolfile.file_id == oldf.file_id:
2140 # Add a new DSC entry and mark the old one for deletion
2141 # Don't do it in the loop so we don't change the thing we're iterating over
2143 newdscf.source_id = source.source_id
2144 newdscf.poolfile_id = newf.file_id
2145 session.add(newdscf)
2155 # Make sure that our source object is up-to-date
2156 session.expire(source)
2158 # Add changelog information to the database
2159 self.store_changelog()
2161 # Install the files into the pool
2162 for newfile, entry in self.pkg.files.items():
2163 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2164 utils.move(newfile, destination)
2165 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2166 stats.accept_bytes += float(entry["size"])
2168 # Copy the .changes file across for suite which need it.
2169 copy_changes = dict([(x.copychanges, '')
2170 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2171 if x.copychanges is not None])
2173 for dest in copy_changes.keys():
2174 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2176 # We're done - commit the database changes
2178 # Our SQL session will automatically start a new transaction after
2181 # Now ensure that the metadata has been added
2182 # This has to be done after we copy the files into the pool
2183 # For source if we have it:
2184 if self.pkg.changes["architecture"].has_key("source"):
2185 import_metadata_into_db(source, session)
2187 # Now for any of our binaries
2189 import_metadata_into_db(b, session)
2193 # Move the .changes into the 'done' directory
2194 ye, mo, da = time.gmtime()[0:3]
2195 donedir = os.path.join(cnf["Dir::Queue::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2196 if not os.path.isdir(donedir):
2197 os.makedirs(donedir)
2199 utils.move(self.pkg.changes_file,
2200 os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2202 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2203 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2206 self.Subst["__SUMMARY__"] = summary
2207 mail_message = utils.TemplateSubst(self.Subst,
2208 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2209 utils.send_mail(mail_message)
2210 self.announce(short_summary, 1)
2212 ## Helper stuff for DebBugs Version Tracking
2213 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2214 if self.pkg.changes["architecture"].has_key("source"):
2215 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2216 version_history = os.fdopen(fd, 'w')
2217 version_history.write(self.pkg.dsc["bts changelog"])
2218 version_history.close()
2219 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2220 self.pkg.changes_file[:-8]+".versions")
2221 os.rename(temp_filename, filename)
2222 os.chmod(filename, 0644)
2224 # Write out the binary -> source mapping.
2225 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2226 debinfo = os.fdopen(fd, 'w')
2227 for name, entry in sorted(self.pkg.files.items()):
2228 if entry["type"] == "deb":
2229 line = " ".join([entry["package"], entry["version"],
2230 entry["architecture"], entry["source package"],
2231 entry["source version"]])
2232 debinfo.write(line+"\n")
2234 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2235 self.pkg.changes_file[:-8]+".debinfo")
2236 os.rename(temp_filename, filename)
2237 os.chmod(filename, 0644)
2241 # Set up our copy queues (e.g. buildd queues)
2242 for suite_name in self.pkg.changes["distribution"].keys():
2243 suite = get_suite(suite_name, session)
2244 for q in suite.copy_queues:
2246 q.add_file_from_pool(f)
2251 stats.accept_count += 1
2253 def check_override(self):
2255 Checks override entries for validity. Mails "Override disparity" warnings,
2256 if that feature is enabled.
2258 Abandons the check if
2259 - override disparity checks are disabled
2260 - mail sending is disabled
2265 # Abandon the check if override disparity checks have been disabled
2266 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2269 summary = self.pkg.check_override()
2274 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2277 self.Subst["__SUMMARY__"] = summary
2278 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2279 utils.send_mail(mail_message)
2280 del self.Subst["__SUMMARY__"]
2282 ###########################################################################
2284 def remove(self, from_dir=None):
2286 Used (for instance) in p-u to remove the package from unchecked
2288 Also removes the package from holding area.
2290 if from_dir is None:
2291 from_dir = self.pkg.directory
2294 for f in self.pkg.files.keys():
2295 os.unlink(os.path.join(from_dir, f))
2296 if os.path.exists(os.path.join(h.holding_dir, f)):
2297 os.unlink(os.path.join(h.holding_dir, f))
2299 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2300 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2301 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2303 ###########################################################################
2305 def move_to_queue (self, queue):
2307 Move files to a destination queue using the permissions in the table
2310 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2311 queue.path, perms=int(queue.change_perms, 8))
2312 for f in self.pkg.files.keys():
2313 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2315 ###########################################################################
2317 def force_reject(self, reject_files):
2319 Forcefully move files from the current directory to the
2320 reject directory. If any file already exists in the reject
2321 directory it will be moved to the morgue to make way for
2324 @type reject_files: dict
2325 @param reject_files: file dictionary
2331 for file_entry in reject_files:
2332 # Skip any files which don't exist or which we don't have permission to copy.
2333 if os.access(file_entry, os.R_OK) == 0:
2336 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2339 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2341 # File exists? Let's find a new name by adding a number
2342 if e.errno == errno.EEXIST:
2344 dest_file = utils.find_next_free(dest_file, 255)
2345 except NoFreeFilenameError:
2346 # Something's either gone badly Pete Tong, or
2347 # someone is trying to exploit us.
2348 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2351 # Make sure we really got it
2353 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2356 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2360 # If we got here, we own the destination file, so we can
2361 # safely overwrite it.
2362 utils.move(file_entry, dest_file, 1, perms=0660)
2365 ###########################################################################
2366 def do_reject (self, manual=0, reject_message="", notes=""):
2368 Reject an upload. If called without a reject message or C{manual} is
2369 true, spawn an editor so the user can write one.
2372 @param manual: manual or automated rejection
2374 @type reject_message: string
2375 @param reject_message: A reject message
2380 # If we weren't given a manual rejection message, spawn an
2381 # editor so the user can add one in...
2382 if manual and not reject_message:
2383 (fd, temp_filename) = utils.temp_filename()
2384 temp_file = os.fdopen(fd, 'w')
2387 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2388 % (note.author, note.version, note.notedate, note.comment))
2390 editor = os.environ.get("EDITOR","vi")
2392 while answer == 'E':
2393 os.system("%s %s" % (editor, temp_filename))
2394 temp_fh = utils.open_file(temp_filename)
2395 reject_message = "".join(temp_fh.readlines())
2397 print "Reject message:"
2398 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2399 prompt = "[R]eject, Edit, Abandon, Quit ?"
2401 while prompt.find(answer) == -1:
2402 answer = utils.our_raw_input(prompt)
2403 m = re_default_answer.search(prompt)
2406 answer = answer[:1].upper()
2407 os.unlink(temp_filename)
2413 print "Rejecting.\n"
2417 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2418 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2420 # Move all the files into the reject directory
2421 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2422 self.force_reject(reject_files)
2424 # If we fail here someone is probably trying to exploit the race
2425 # so let's just raise an exception ...
2426 if os.path.exists(reason_filename):
2427 os.unlink(reason_filename)
2428 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2430 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2434 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2435 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2436 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2437 os.write(reason_fd, reject_message)
2438 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2440 # Build up the rejection email
2441 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2442 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2443 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2444 self.Subst["__REJECT_MESSAGE__"] = ""
2445 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2446 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2447 # Write the rejection email out as the <foo>.reason file
2448 os.write(reason_fd, reject_mail_message)
2450 del self.Subst["__REJECTOR_ADDRESS__"]
2451 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2452 del self.Subst["__CC__"]
2456 # Send the rejection mail
2457 utils.send_mail(reject_mail_message)
2460 self.logger.log(["rejected", self.pkg.changes_file])
2464 ################################################################################
2465 def in_override_p(self, package, component, suite, binary_type, filename, session):
2467 Check if a package already has override entries in the DB
2469 @type package: string
2470 @param package: package name
2472 @type component: string
2473 @param component: database id of the component
2476 @param suite: database id of the suite
2478 @type binary_type: string
2479 @param binary_type: type of the package
2481 @type filename: string
2482 @param filename: filename we check
2484 @return: the database result. But noone cares anyway.
2490 if binary_type == "": # must be source
2493 file_type = binary_type
2495 # Override suite name; used for example with proposed-updates
2496 oldsuite = get_suite(suite, session)
2497 if (not oldsuite is None) and oldsuite.overridesuite:
2498 suite = oldsuite.overridesuite
2500 result = get_override(package, suite, component, file_type, session)
2502 # If checking for a source package fall back on the binary override type
2503 if file_type == "dsc" and len(result) < 1:
2504 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2506 # Remember the section and priority so we can check them later if appropriate
2509 self.pkg.files[filename]["override section"] = result.section.section
2510 self.pkg.files[filename]["override priority"] = result.priority.priority
2515 ################################################################################
2516 def get_anyversion(self, sv_list, suite):
2519 @param sv_list: list of (suite, version) tuples to check
2522 @param suite: suite name
2528 anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2529 for (s, v) in sv_list:
2530 if s in [ x.lower() for x in anysuite ]:
2531 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2536 ################################################################################
2538 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2541 @param sv_list: list of (suite, version) tuples to check
2543 @type filename: string
2544 @param filename: XXX
2546 @type new_version: string
2547 @param new_version: XXX
2549 Ensure versions are newer than existing packages in target
2550 suites and that cross-suite version checking rules as
2551 set out in the conf file are satisfied.
2556 # Check versions for each target suite
2557 for target_suite in self.pkg.changes["distribution"].keys():
2558 must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2559 must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2561 # Enforce "must be newer than target suite" even if conffile omits it
2562 if target_suite not in must_be_newer_than:
2563 must_be_newer_than.append(target_suite)
2565 for (suite, existent_version) in sv_list:
2566 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2568 if suite in must_be_newer_than and sourceful and vercmp < 1:
2569 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2571 if suite in must_be_older_than and vercmp > -1:
2574 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2575 # we really use the other suite, ignoring the conflicting one ...
2576 addsuite = self.pkg.changes["distribution-version"][suite]
2578 add_version = self.get_anyversion(sv_list, addsuite)
2579 target_version = self.get_anyversion(sv_list, target_suite)
2582 # not add_version can only happen if we map to a suite
2583 # that doesn't enhance the suite we're propup'ing from.
2584 # so "propup-ver x a b c; map a d" is a problem only if
2585 # d doesn't enhance a.
2587 # i think we could always propagate in this case, rather
2588 # than complaining. either way, this isn't a REJECT issue
2590 # And - we really should complain to the dorks who configured dak
2591 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2592 self.pkg.changes.setdefault("propdistribution", {})
2593 self.pkg.changes["propdistribution"][addsuite] = 1
2595 elif not target_version:
2596 # not targets_version is true when the package is NEW
2597 # we could just stick with the "...old version..." REJECT
2598 # for this, I think.
2599 self.rejects.append("Won't propogate NEW packages.")
2600 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2601 # propogation would be redundant. no need to reject though.
2602 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2604 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2605 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2607 self.warnings.append("Propogating upload to %s" % (addsuite))
2608 self.pkg.changes.setdefault("propdistribution", {})
2609 self.pkg.changes["propdistribution"][addsuite] = 1
2613 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2615 ################################################################################
2616 def check_binary_against_db(self, filename, session):
2617 # Ensure version is sane
2618 self.cross_suite_version_check( \
2619 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2620 self.pkg.files[filename]["architecture"], session),
2621 filename, self.pkg.files[filename]["version"], sourceful=False)
2623 # Check for any existing copies of the file
2624 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2625 q = q.filter_by(version=self.pkg.files[filename]["version"])
2626 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2629 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2631 ################################################################################
2633 def check_source_against_db(self, filename, session):
2634 source = self.pkg.dsc.get("source")
2635 version = self.pkg.dsc.get("version")
2637 # Ensure version is sane
2638 self.cross_suite_version_check( \
2639 get_suite_version_by_source(source, session), filename, version,
2642 ################################################################################
2643 def check_dsc_against_db(self, filename, session):
2646 @warning: NB: this function can remove entries from the 'files' index [if
2647 the orig tarball is a duplicate of the one in the archive]; if
2648 you're iterating over 'files' and call this function as part of
2649 the loop, be sure to add a check to the top of the loop to
2650 ensure you haven't just tried to dereference the deleted entry.
2655 self.pkg.orig_files = {} # XXX: do we need to clear it?
2656 orig_files = self.pkg.orig_files
2658 # Try and find all files mentioned in the .dsc. This has
2659 # to work harder to cope with the multiple possible
2660 # locations of an .orig.tar.gz.
2661 # The ordering on the select is needed to pick the newest orig
2662 # when it exists in multiple places.
2663 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2665 if self.pkg.files.has_key(dsc_name):
2666 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2667 actual_size = int(self.pkg.files[dsc_name]["size"])
2668 found = "%s in incoming" % (dsc_name)
2670 # Check the file does not already exist in the archive
2671 ql = get_poolfile_like_name(dsc_name, session)
2673 # Strip out anything that isn't '%s' or '/%s$'
2675 if not i.filename.endswith(dsc_name):
2678 # "[dak] has not broken them. [dak] has fixed a
2679 # brokenness. Your crappy hack exploited a bug in
2682 # "(Come on! I thought it was always obvious that
2683 # one just doesn't release different files with
2684 # the same name and version.)"
2685 # -- ajk@ on d-devel@l.d.o
2688 # Ignore exact matches for .orig.tar.gz
2690 if re_is_orig_source.match(dsc_name):
2692 if self.pkg.files.has_key(dsc_name) and \
2693 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2694 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2695 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2696 # TODO: Don't delete the entry, just mark it as not needed
2697 # This would fix the stupidity of changing something we often iterate over
2698 # whilst we're doing it
2699 del self.pkg.files[dsc_name]
2700 dsc_entry["files id"] = i.file_id
2701 if not orig_files.has_key(dsc_name):
2702 orig_files[dsc_name] = {}
2703 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2706 # Don't bitch that we couldn't find this file later
2708 self.later_check_files.remove(dsc_name)
2714 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2716 elif re_is_orig_source.match(dsc_name):
2718 ql = get_poolfile_like_name(dsc_name, session)
2720 # Strip out anything that isn't '%s' or '/%s$'
2721 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2723 if not i.filename.endswith(dsc_name):
2727 # Unfortunately, we may get more than one match here if,
2728 # for example, the package was in potato but had an -sa
2729 # upload in woody. So we need to choose the right one.
2731 # default to something sane in case we don't match any or have only one
2736 old_file = os.path.join(i.location.path, i.filename)
2737 old_file_fh = utils.open_file(old_file)
2738 actual_md5 = apt_pkg.md5sum(old_file_fh)
2740 actual_size = os.stat(old_file)[stat.ST_SIZE]
2741 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2744 old_file = os.path.join(i.location.path, i.filename)
2745 old_file_fh = utils.open_file(old_file)
2746 actual_md5 = apt_pkg.md5sum(old_file_fh)
2748 actual_size = os.stat(old_file)[stat.ST_SIZE]
2750 suite_type = x.location.archive_type
2751 # need this for updating dsc_files in install()
2752 dsc_entry["files id"] = x.file_id
2753 # See install() in process-accepted...
2754 if not orig_files.has_key(dsc_name):
2755 orig_files[dsc_name] = {}
2756 orig_files[dsc_name]["id"] = x.file_id
2757 orig_files[dsc_name]["path"] = old_file
2758 orig_files[dsc_name]["location"] = x.location.location_id
2760 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2761 # Not there? Check the queue directories...
2762 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2763 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2765 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2766 if os.path.exists(in_otherdir):
2767 in_otherdir_fh = utils.open_file(in_otherdir)
2768 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2769 in_otherdir_fh.close()
2770 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2772 if not orig_files.has_key(dsc_name):
2773 orig_files[dsc_name] = {}
2774 orig_files[dsc_name]["path"] = in_otherdir
2777 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2780 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2782 if actual_md5 != dsc_entry["md5sum"]:
2783 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2784 if actual_size != int(dsc_entry["size"]):
2785 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2787 ################################################################################
2788 # This is used by process-new and process-holding to recheck a changes file
2789 # at the time we're running. It mainly wraps various other internal functions
2790 # and is similar to accepted_checks - these should probably be tidied up
2792 def recheck(self, session):
2794 for f in self.pkg.files.keys():
2795 # The .orig.tar.gz can disappear out from under us is it's a
2796 # duplicate of one in the archive.
2797 if not self.pkg.files.has_key(f):
2800 entry = self.pkg.files[f]
2802 # Check that the source still exists
2803 if entry["type"] == "deb":
2804 source_version = entry["source version"]
2805 source_package = entry["source package"]
2806 if not self.pkg.changes["architecture"].has_key("source") \
2807 and not source_exists(source_package, source_version, \
2808 suites = self.pkg.changes["distribution"].keys(), session = session):
2809 source_epochless_version = re_no_epoch.sub('', source_version)
2810 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2812 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2813 if cnf.has_key("Dir::Queue::%s" % (q)):
2814 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2817 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2819 # Version and file overwrite checks
2820 if entry["type"] == "deb":
2821 self.check_binary_against_db(f, session)
2822 elif entry["type"] == "dsc":
2823 self.check_source_against_db(f, session)
2824 self.check_dsc_against_db(f, session)
2826 ################################################################################
2827 def accepted_checks(self, overwrite_checks, session):
2828 # Recheck anything that relies on the database; since that's not
2829 # frozen between accept and our run time when called from p-a.
2831 # overwrite_checks is set to False when installing to stable/oldstable
2836 # Find the .dsc (again)
2838 for f in self.pkg.files.keys():
2839 if self.pkg.files[f]["type"] == "dsc":
2842 for checkfile in self.pkg.files.keys():
2843 # The .orig.tar.gz can disappear out from under us is it's a
2844 # duplicate of one in the archive.
2845 if not self.pkg.files.has_key(checkfile):
2848 entry = self.pkg.files[checkfile]
2850 # Check that the source still exists
2851 if entry["type"] == "deb":
2852 source_version = entry["source version"]
2853 source_package = entry["source package"]
2854 if not self.pkg.changes["architecture"].has_key("source") \
2855 and not source_exists(source_package, source_version, \
2856 suites = self.pkg.changes["distribution"].keys(), \
2858 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2860 # Version and file overwrite checks
2861 if overwrite_checks:
2862 if entry["type"] == "deb":
2863 self.check_binary_against_db(checkfile, session)
2864 elif entry["type"] == "dsc":
2865 self.check_source_against_db(checkfile, session)
2866 self.check_dsc_against_db(dsc_filename, session)
2868 # propogate in the case it is in the override tables:
2869 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2870 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2871 propogate[suite] = 1
2873 nopropogate[suite] = 1
2875 for suite in propogate.keys():
2876 if suite in nopropogate:
2878 self.pkg.changes["distribution"][suite] = 1
2880 for checkfile in self.pkg.files.keys():
2881 # Check the package is still in the override tables
2882 for suite in self.pkg.changes["distribution"].keys():
2883 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2884 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2886 ################################################################################
2887 # If any file of an upload has a recent mtime then chances are good
2888 # the file is still being uploaded.
2890 def upload_too_new(self):
2893 # Move back to the original directory to get accurate time stamps
2895 os.chdir(self.pkg.directory)
2896 file_list = self.pkg.files.keys()
2897 file_list.extend(self.pkg.dsc_files.keys())
2898 file_list.append(self.pkg.changes_file)
2901 last_modified = time.time()-os.path.getmtime(f)
2902 if last_modified < int(cnf["Dinstall::SkipTime"]):
2911 def store_changelog(self):
2913 # Skip binary-only upload if it is not a bin-NMU
2914 if not self.pkg.changes['architecture'].has_key('source'):
2915 from daklib.regexes import re_bin_only_nmu
2916 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2919 session = DBConn().session()
2921 # Check if upload already has a changelog entry
2922 query = """SELECT changelog_id FROM changes WHERE source = :source
2923 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2924 if session.execute(query, {'source': self.pkg.changes['source'], \
2925 'version': self.pkg.changes['version'], \
2926 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2930 # Add current changelog text into changelogs_text table, return created ID
2931 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2932 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2934 # Link ID to the upload available in changes table
2935 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2936 AND version = :version AND architecture = :architecture"""
2937 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2938 'version': self.pkg.changes['version'], \
2939 'architecture': " ".join(self.pkg.changes['architecture'].keys())})