5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 # suppress some deprecation warnings in squeeze related to apt_pkg
62 warnings.filterwarnings('ignore', \
63 "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
65 warnings.filterwarnings('ignore', \
66 "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
69 ###############################################################################
71 def get_type(f, session):
73 Get the file type of C{f}
76 @param f: file entry from Changes object
78 @type session: SQLA Session
79 @param session: SQL Alchemy session object
86 if f.has_key("dbtype"):
87 file_type = f["dbtype"]
88 elif re_source_ext.match(f["type"]):
92 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
94 # Validate the override type
95 type_id = get_override_type(file_type, session)
97 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
101 ################################################################################
103 # Determine what parts in a .changes are NEW
105 def determine_new(filename, changes, files, warn=1, session = None):
107 Determine what parts in a C{changes} file are NEW.
110 @param filename: changes filename
112 @type changes: Upload.Pkg.changes dict
113 @param changes: Changes dictionary
115 @type files: Upload.Pkg.files dict
116 @param files: Files dictionary
119 @param warn: Warn if overrides are added for (old)stable
122 @return: dictionary of NEW components.
125 # TODO: This should all use the database instead of parsing the changes
130 dbchg = get_dbchange(filename, session)
132 print "Warning: cannot find changes file in database; won't check byhand"
134 # Build up a list of potentially new things
135 for name, f in files.items():
136 # Keep a record of byhand elements
137 if f["section"] == "byhand":
142 priority = f["priority"]
143 section = f["section"]
144 file_type = get_type(f, session)
145 component = f["component"]
147 if file_type == "dsc":
150 if not new.has_key(pkg):
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
156 new[pkg]["files"] = []
158 old_type = new[pkg]["type"]
159 if old_type != file_type:
160 # source gets trumped by deb or udeb
161 if old_type == "dsc":
162 new[pkg]["priority"] = priority
163 new[pkg]["section"] = section
164 new[pkg]["type"] = file_type
165 new[pkg]["component"] = component
167 new[pkg]["files"].append(name)
169 if f.has_key("othercomponents"):
170 new[pkg]["othercomponents"] = f["othercomponents"]
172 # Fix up the list of target suites
174 for suite in changes["suite"].keys():
175 oldsuite = get_suite(suite, session)
177 print "WARNING: Invalid suite %s found" % suite
180 if oldsuite.overridesuite:
181 newsuite = get_suite(oldsuite.overridesuite, session)
184 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
185 oldsuite.overridesuite, suite)
186 del changes["suite"][suite]
187 changes["suite"][oldsuite.overridesuite] = 1
189 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
190 oldsuite.overridesuite, suite)
192 # Check for unprocessed byhand files
193 if dbchg is not None:
194 for b in byhand.keys():
195 # Find the file entry in the database
197 for f in dbchg.files:
200 # If it's processed, we can ignore it
206 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
208 # Check for new stuff
209 for suite in changes["suite"].keys():
210 for pkg in new.keys():
211 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
213 for file_entry in new[pkg]["files"]:
214 if files[file_entry].has_key("new"):
215 del files[file_entry]["new"]
219 for s in ['stable', 'oldstable']:
220 if changes["suite"].has_key(s):
221 print "WARNING: overrides will be added for %s!" % s
222 for pkg in new.keys():
223 if new[pkg].has_key("othercomponents"):
224 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
228 ################################################################################
230 def check_valid(new, session = None):
232 Check if section and priority for NEW packages exist in database.
233 Additionally does sanity checks:
234 - debian-installer packages have to be udeb (or source)
235 - non debian-installer packages can not be udeb
236 - source priority can only be assigned to dsc file types
239 @param new: Dict of new packages with their section, priority and type.
242 for pkg in new.keys():
243 section_name = new[pkg]["section"]
244 priority_name = new[pkg]["priority"]
245 file_type = new[pkg]["type"]
247 section = get_section(section_name, session)
249 new[pkg]["section id"] = -1
251 new[pkg]["section id"] = section.section_id
253 priority = get_priority(priority_name, session)
255 new[pkg]["priority id"] = -1
257 new[pkg]["priority id"] = priority.priority_id
260 di = section_name.find("debian-installer") != -1
262 # If d-i, we must be udeb and vice-versa
263 if (di and file_type not in ("udeb", "dsc")) or \
264 (not di and file_type == "udeb"):
265 new[pkg]["section id"] = -1
267 # If dsc we need to be source and vice-versa
268 if (priority == "source" and file_type != "dsc") or \
269 (priority != "source" and file_type == "dsc"):
270 new[pkg]["priority id"] = -1
272 ###############################################################################
274 # Used by Upload.check_timestamps
275 class TarTime(object):
276 def __init__(self, future_cutoff, past_cutoff):
278 self.future_cutoff = future_cutoff
279 self.past_cutoff = past_cutoff
282 self.future_files = {}
283 self.ancient_files = {}
285 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
286 if MTime > self.future_cutoff:
287 self.future_files[Name] = MTime
288 if MTime < self.past_cutoff:
289 self.ancient_files[Name] = MTime
291 ###############################################################################
293 def prod_maintainer(notes, upload):
296 # Here we prepare an editor and get them ready to prod...
297 (fd, temp_filename) = utils.temp_filename()
298 temp_file = os.fdopen(fd, 'w')
300 temp_file.write(note.comment)
302 editor = os.environ.get("EDITOR","vi")
305 os.system("%s %s" % (editor, temp_filename))
306 temp_fh = utils.open_file(temp_filename)
307 prod_message = "".join(temp_fh.readlines())
309 print "Prod message:"
310 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
311 prompt = "[P]rod, Edit, Abandon, Quit ?"
313 while prompt.find(answer) == -1:
314 answer = utils.our_raw_input(prompt)
315 m = re_default_answer.search(prompt)
318 answer = answer[:1].upper()
319 os.unlink(temp_filename)
325 # Otherwise, do the proding...
326 user_email_address = utils.whoami() + " <%s>" % (
327 cnf["Dinstall::MyAdminAddress"])
331 Subst["__FROM_ADDRESS__"] = user_email_address
332 Subst["__PROD_MESSAGE__"] = prod_message
333 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
335 prod_mail_message = utils.TemplateSubst(
336 Subst,cnf["Dir::Templates"]+"/process-new.prod")
339 utils.send_mail(prod_mail_message)
341 print "Sent prodding message"
343 ################################################################################
345 def edit_note(note, upload, session, trainee=False):
346 # Write the current data to a temporary file
347 (fd, temp_filename) = utils.temp_filename()
348 editor = os.environ.get("EDITOR","vi")
351 os.system("%s %s" % (editor, temp_filename))
352 temp_file = utils.open_file(temp_filename)
353 newnote = temp_file.read().rstrip()
356 print utils.prefix_multi_line_string(newnote," ")
357 prompt = "[D]one, Edit, Abandon, Quit ?"
359 while prompt.find(answer) == -1:
360 answer = utils.our_raw_input(prompt)
361 m = re_default_answer.search(prompt)
364 answer = answer[:1].upper()
365 os.unlink(temp_filename)
372 comment = NewComment()
373 comment.package = upload.pkg.changes["source"]
374 comment.version = upload.pkg.changes["version"]
375 comment.comment = newnote
376 comment.author = utils.whoami()
377 comment.trainee = trainee
381 ###############################################################################
383 # suite names DMs can upload to
384 dm_suites = ['unstable', 'experimental']
386 def get_newest_source(source, session):
387 'returns the newest DBSource object in dm_suites'
388 ## the most recent version of the package uploaded to unstable or
389 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
390 ## section of its control file
391 q = session.query(DBSource).filter_by(source = source). \
392 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
393 order_by(desc('source.version'))
396 def get_suite_version_by_source(source, session):
397 'returns a list of tuples (suite_name, version) for source package'
398 q = session.query(Suite.suite_name, DBSource.version). \
399 join(Suite.sources).filter_by(source = source)
402 def get_source_by_package_and_suite(package, suite_name, session):
404 returns a DBSource query filtered by DBBinary.package and this package's
407 return session.query(DBSource). \
408 join(DBSource.binaries).filter_by(package = package). \
409 join(DBBinary.suites).filter_by(suite_name = suite_name)
411 def get_suite_version_by_package(package, arch_string, session):
413 returns a list of tuples (suite_name, version) for binary package and
416 return session.query(Suite.suite_name, DBBinary.version). \
417 join(Suite.binaries).filter_by(package = package). \
418 join(DBBinary.architecture). \
419 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
421 class Upload(object):
423 Everything that has to do with an upload processed.
431 ###########################################################################
434 """ Reset a number of internal variables."""
436 # Initialize the substitution template map
439 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
440 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
441 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
442 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
448 self.later_check_files = []
452 def package_info(self):
454 Format various messages from this Upload to send to the maintainer.
458 ('Reject Reasons', self.rejects),
459 ('Warnings', self.warnings),
460 ('Notes', self.notes),
464 for title, messages in msgs:
466 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
471 ###########################################################################
472 def update_subst(self):
473 """ Set up the per-package template substitution mappings """
477 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
478 if not self.pkg.changes.has_key("architecture") or not \
479 isinstance(self.pkg.changes["architecture"], dict):
480 self.pkg.changes["architecture"] = { "Unknown" : "" }
482 # and maintainer2047 may not exist.
483 if not self.pkg.changes.has_key("maintainer2047"):
484 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
486 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
487 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
488 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
490 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
491 if self.pkg.changes["architecture"].has_key("source") and \
492 self.pkg.changes["changedby822"] != "" and \
493 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
495 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
496 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
497 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
499 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
500 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
501 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
503 # Process policy doesn't set the fingerprint field and I don't want to make it
504 # do it for now as I don't want to have to deal with the case where we accepted
505 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
506 # the meantime so the package will be remarked as rejectable. Urgh.
507 # TODO: Fix this properly
508 if self.pkg.changes.has_key('fingerprint'):
509 session = DBConn().session()
510 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
511 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
512 if self.pkg.changes.has_key("sponsoremail"):
513 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
516 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
517 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
519 # Apply any global override of the Maintainer field
520 if cnf.get("Dinstall::OverrideMaintainer"):
521 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
522 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
524 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
525 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
526 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
527 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
529 ###########################################################################
530 def load_changes(self, filename):
532 Load a changes file and setup a dictionary around it. Also checks for mandantory
535 @type filename: string
536 @param filename: Changes filename, full path.
539 @return: whether the changes file was valid or not. We may want to
540 reject even if this is True (see what gets put in self.rejects).
541 This is simply to prevent us even trying things later which will
542 fail because we couldn't properly parse the file.
545 self.pkg.changes_file = filename
547 # Parse the .changes field into a dictionary
549 self.pkg.changes.update(parse_changes(filename))
550 except CantOpenError:
551 self.rejects.append("%s: can't read file." % (filename))
553 except ParseChangesError, line:
554 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
556 except ChangesUnicodeError:
557 self.rejects.append("%s: changes file not proper utf-8" % (filename))
560 # Parse the Files field from the .changes into another dictionary
562 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
563 except ParseChangesError, line:
564 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
566 except UnknownFormatError, format:
567 self.rejects.append("%s: unknown format '%s'." % (filename, format))
570 # Check for mandatory fields
571 for i in ("distribution", "source", "binary", "architecture",
572 "version", "maintainer", "files", "changes", "description"):
573 if not self.pkg.changes.has_key(i):
574 # Avoid undefined errors later
575 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
578 # Strip a source version in brackets from the source field
579 if re_strip_srcver.search(self.pkg.changes["source"]):
580 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
582 # Ensure the source field is a valid package name.
583 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
584 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
586 # Split multi-value fields into a lower-level dictionary
587 for i in ("architecture", "distribution", "binary", "closes"):
588 o = self.pkg.changes.get(i, "")
590 del self.pkg.changes[i]
592 self.pkg.changes[i] = {}
595 self.pkg.changes[i][j] = 1
597 # Fix the Maintainer: field to be RFC822/2047 compatible
599 (self.pkg.changes["maintainer822"],
600 self.pkg.changes["maintainer2047"],
601 self.pkg.changes["maintainername"],
602 self.pkg.changes["maintaineremail"]) = \
603 fix_maintainer (self.pkg.changes["maintainer"])
604 except ParseMaintError, msg:
605 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
606 % (filename, self.pkg.changes["maintainer"], msg))
608 # ...likewise for the Changed-By: field if it exists.
610 (self.pkg.changes["changedby822"],
611 self.pkg.changes["changedby2047"],
612 self.pkg.changes["changedbyname"],
613 self.pkg.changes["changedbyemail"]) = \
614 fix_maintainer (self.pkg.changes.get("changed-by", ""))
615 except ParseMaintError, msg:
616 self.pkg.changes["changedby822"] = ""
617 self.pkg.changes["changedby2047"] = ""
618 self.pkg.changes["changedbyname"] = ""
619 self.pkg.changes["changedbyemail"] = ""
621 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
622 % (filename, self.pkg.changes["changed-by"], msg))
624 # Ensure all the values in Closes: are numbers
625 if self.pkg.changes.has_key("closes"):
626 for i in self.pkg.changes["closes"].keys():
627 if re_isanum.match (i) == None:
628 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
630 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
631 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
632 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
634 # Check the .changes is non-empty
635 if not self.pkg.files:
636 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
639 # Changes was syntactically valid even if we'll reject
642 ###########################################################################
644 def check_distributions(self):
645 "Check and map the Distribution field"
649 # Handle suite mappings
650 for m in Cnf.ValueList("SuiteMappings"):
653 if mtype == "map" or mtype == "silent-map":
654 (source, dest) = args[1:3]
655 if self.pkg.changes["distribution"].has_key(source):
656 del self.pkg.changes["distribution"][source]
657 self.pkg.changes["distribution"][dest] = 1
658 if mtype != "silent-map":
659 self.notes.append("Mapping %s to %s." % (source, dest))
660 if self.pkg.changes.has_key("distribution-version"):
661 if self.pkg.changes["distribution-version"].has_key(source):
662 self.pkg.changes["distribution-version"][source]=dest
663 elif mtype == "map-unreleased":
664 (source, dest) = args[1:3]
665 if self.pkg.changes["distribution"].has_key(source):
666 for arch in self.pkg.changes["architecture"].keys():
667 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
668 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
669 del self.pkg.changes["distribution"][source]
670 self.pkg.changes["distribution"][dest] = 1
672 elif mtype == "ignore":
674 if self.pkg.changes["distribution"].has_key(suite):
675 del self.pkg.changes["distribution"][suite]
676 self.warnings.append("Ignoring %s as a target suite." % (suite))
677 elif mtype == "reject":
679 if self.pkg.changes["distribution"].has_key(suite):
680 self.rejects.append("Uploads to %s are not accepted." % (suite))
681 elif mtype == "propup-version":
682 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
684 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
685 if self.pkg.changes["distribution"].has_key(args[1]):
686 self.pkg.changes.setdefault("distribution-version", {})
687 for suite in args[2:]:
688 self.pkg.changes["distribution-version"][suite] = suite
690 # Ensure there is (still) a target distribution
691 if len(self.pkg.changes["distribution"].keys()) < 1:
692 self.rejects.append("No valid distribution remaining.")
694 # Ensure target distributions exist
695 for suite in self.pkg.changes["distribution"].keys():
696 if not Cnf.has_key("Suite::%s" % (suite)):
697 self.rejects.append("Unknown distribution `%s'." % (suite))
699 ###########################################################################
701 def binary_file_checks(self, f, session):
703 entry = self.pkg.files[f]
705 # Extract package control information
706 deb_file = utils.open_file(f)
708 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
710 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
712 # Can't continue, none of the checks on control would work.
715 # Check for mandantory "Description:"
718 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
720 self.rejects.append("%s: Missing Description in binary package" % (f))
725 # Check for mandatory fields
726 for field in [ "Package", "Architecture", "Version" ]:
727 if control.Find(field) == None:
729 self.rejects.append("%s: No %s field in control." % (f, field))
732 # Ensure the package name matches the one give in the .changes
733 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
734 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
736 # Validate the package field
737 package = control.Find("Package")
738 if not re_valid_pkg_name.match(package):
739 self.rejects.append("%s: invalid package name '%s'." % (f, package))
741 # Validate the version field
742 version = control.Find("Version")
743 if not re_valid_version.match(version):
744 self.rejects.append("%s: invalid version number '%s'." % (f, version))
746 # Ensure the architecture of the .deb is one we know about.
747 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
748 architecture = control.Find("Architecture")
749 upload_suite = self.pkg.changes["distribution"].keys()[0]
751 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
752 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
753 self.rejects.append("Unknown architecture '%s'." % (architecture))
755 # Ensure the architecture of the .deb is one of the ones
756 # listed in the .changes.
757 if not self.pkg.changes["architecture"].has_key(architecture):
758 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
760 # Sanity-check the Depends field
761 depends = control.Find("Depends")
763 self.rejects.append("%s: Depends field is empty." % (f))
765 # Sanity-check the Provides field
766 provides = control.Find("Provides")
768 provide = re_spacestrip.sub('', provides)
770 self.rejects.append("%s: Provides field is empty." % (f))
771 prov_list = provide.split(",")
772 for prov in prov_list:
773 if not re_valid_pkg_name.match(prov):
774 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
776 # If there is a Built-Using field, we need to check we can find the
777 # exact source version
778 built_using = control.Find("Built-Using")
781 entry["built-using"] = []
782 for dep in apt_pkg.parse_depends(built_using):
783 bu_s, bu_v, bu_e = dep[0]
784 # Check that it's an exact match dependency and we have
785 # some form of version
786 if bu_e != "=" or len(bu_v) < 1:
787 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
789 # Find the source id for this version
790 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
792 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
794 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
796 except ValueError, e:
797 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
800 # Check the section & priority match those given in the .changes (non-fatal)
801 if control.Find("Section") and entry["section"] != "" \
802 and entry["section"] != control.Find("Section"):
803 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
804 (f, control.Find("Section", ""), entry["section"]))
805 if control.Find("Priority") and entry["priority"] != "" \
806 and entry["priority"] != control.Find("Priority"):
807 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
808 (f, control.Find("Priority", ""), entry["priority"]))
810 entry["package"] = package
811 entry["architecture"] = architecture
812 entry["version"] = version
813 entry["maintainer"] = control.Find("Maintainer", "")
815 if f.endswith(".udeb"):
816 self.pkg.files[f]["dbtype"] = "udeb"
817 elif f.endswith(".deb"):
818 self.pkg.files[f]["dbtype"] = "deb"
820 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
822 entry["source"] = control.Find("Source", entry["package"])
824 # Get the source version
825 source = entry["source"]
828 if source.find("(") != -1:
829 m = re_extract_src_version.match(source)
831 source_version = m.group(2)
833 if not source_version:
834 source_version = self.pkg.files[f]["version"]
836 entry["source package"] = source
837 entry["source version"] = source_version
839 # Ensure the filename matches the contents of the .deb
840 m = re_isadeb.match(f)
843 file_package = m.group(1)
844 if entry["package"] != file_package:
845 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
846 (f, file_package, entry["dbtype"], entry["package"]))
847 epochless_version = re_no_epoch.sub('', control.Find("Version"))
850 file_version = m.group(2)
851 if epochless_version != file_version:
852 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
853 (f, file_version, entry["dbtype"], epochless_version))
856 file_architecture = m.group(3)
857 if entry["architecture"] != file_architecture:
858 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
859 (f, file_architecture, entry["dbtype"], entry["architecture"]))
861 # Check for existent source
862 source_version = entry["source version"]
863 source_package = entry["source package"]
864 if self.pkg.changes["architecture"].has_key("source"):
865 if source_version != self.pkg.changes["version"]:
866 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
867 (source_version, f, self.pkg.changes["version"]))
869 # Check in the SQL database
870 if not source_exists(source_package, source_version, suites = \
871 self.pkg.changes["distribution"].keys(), session = session):
872 # Check in one of the other directories
873 source_epochless_version = re_no_epoch.sub('', source_version)
874 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
875 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
877 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
880 dsc_file_exists = False
881 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
882 if cnf.has_key("Dir::Queue::%s" % (myq)):
883 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
884 dsc_file_exists = True
887 if not dsc_file_exists:
888 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
890 # Check the version and for file overwrites
891 self.check_binary_against_db(f, session)
893 def source_file_checks(self, f, session):
894 entry = self.pkg.files[f]
896 m = re_issource.match(f)
900 entry["package"] = m.group(1)
901 entry["version"] = m.group(2)
902 entry["type"] = m.group(3)
904 # Ensure the source package name matches the Source filed in the .changes
905 if self.pkg.changes["source"] != entry["package"]:
906 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
908 # Ensure the source version matches the version in the .changes file
909 if re_is_orig_source.match(f):
910 changes_version = self.pkg.changes["chopversion2"]
912 changes_version = self.pkg.changes["chopversion"]
914 if changes_version != entry["version"]:
915 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
917 # Ensure the .changes lists source in the Architecture field
918 if not self.pkg.changes["architecture"].has_key("source"):
919 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
921 # Check the signature of a .dsc file
922 if entry["type"] == "dsc":
923 # check_signature returns either:
924 # (None, [list, of, rejects]) or (signature, [])
925 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
927 self.rejects.append(j)
929 entry["architecture"] = "source"
931 def per_suite_file_checks(self, f, suite, session):
933 entry = self.pkg.files[f]
936 if entry.has_key("byhand"):
939 # Check we have fields we need to do these checks
941 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
942 if not entry.has_key(m):
943 self.rejects.append("file '%s' does not have field %s set" % (f, m))
949 # Handle component mappings
950 for m in cnf.ValueList("ComponentMappings"):
951 (source, dest) = m.split()
952 if entry["component"] == source:
953 entry["original component"] = source
954 entry["component"] = dest
956 # Ensure the component is valid for the target suite
957 if cnf.has_key("Suite:%s::Components" % (suite)) and \
958 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
959 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
962 # Validate the component
963 if not get_component(entry["component"], session):
964 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
967 # See if the package is NEW
968 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
971 # Validate the priority
972 if entry["priority"].find('/') != -1:
973 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
975 # Determine the location
976 location = cnf["Dir::Pool"]
977 l = get_location(location, entry["component"], session=session)
979 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
980 entry["location id"] = -1
982 entry["location id"] = l.location_id
984 # Check the md5sum & size against existing files (if any)
985 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
987 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
988 entry["size"], entry["md5sum"], entry["location id"])
991 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
992 elif found is False and poolfile is not None:
993 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
996 entry["files id"] = None
998 entry["files id"] = poolfile.file_id
1000 # Check for packages that have moved from one component to another
1001 entry['suite'] = suite
1002 arch_list = [entry["architecture"], 'all']
1003 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1004 [suite], arch_list = arch_list, session = session)
1005 if component is not None:
1006 entry["othercomponents"] = component
1008 def check_files(self, action=True):
1009 file_keys = self.pkg.files.keys()
1015 os.chdir(self.pkg.directory)
1017 ret = holding.copy_to_holding(f)
1019 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1023 # check we already know the changes file
1024 # [NB: this check must be done post-suite mapping]
1025 base_filename = os.path.basename(self.pkg.changes_file)
1027 session = DBConn().session()
1030 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1031 # if in the pool or in a queue other than unchecked, reject
1032 if (dbc.in_queue is None) \
1033 or (dbc.in_queue is not None
1034 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1035 self.rejects.append("%s file already known to dak" % base_filename)
1036 except NoResultFound, e:
1040 has_binaries = False
1043 for f, entry in self.pkg.files.items():
1044 # Ensure the file does not already exist in one of the accepted directories
1045 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1046 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1047 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1048 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1050 if not re_taint_free.match(f):
1051 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1053 # Check the file is readable
1054 if os.access(f, os.R_OK) == 0:
1055 # When running in -n, copy_to_holding() won't have
1056 # generated the reject_message, so we need to.
1058 if os.path.exists(f):
1059 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1061 # Don't directly reject, mark to check later to deal with orig's
1062 # we can find in the pool
1063 self.later_check_files.append(f)
1064 entry["type"] = "unreadable"
1067 # If it's byhand skip remaining checks
1068 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1070 entry["type"] = "byhand"
1072 # Checks for a binary package...
1073 elif re_isadeb.match(f):
1075 entry["type"] = "deb"
1077 # This routine appends to self.rejects/warnings as appropriate
1078 self.binary_file_checks(f, session)
1080 # Checks for a source package...
1081 elif re_issource.match(f):
1084 # This routine appends to self.rejects/warnings as appropriate
1085 self.source_file_checks(f, session)
1087 # Not a binary or source package? Assume byhand...
1090 entry["type"] = "byhand"
1092 # Per-suite file checks
1093 entry["oldfiles"] = {}
1094 for suite in self.pkg.changes["distribution"].keys():
1095 self.per_suite_file_checks(f, suite, session)
1099 # If the .changes file says it has source, it must have source.
1100 if self.pkg.changes["architecture"].has_key("source"):
1102 self.rejects.append("no source found and Architecture line in changes mention source.")
1104 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1105 self.rejects.append("source only uploads are not supported.")
1107 ###########################################################################
1109 def __dsc_filename(self):
1111 Returns: (Status, Dsc_Filename)
1113 Status: Boolean; True when there was no error, False otherwise
1114 Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1119 for name, entry in self.pkg.files.items():
1120 if entry.has_key("type") and entry["type"] == "dsc":
1122 return False, "cannot process a .changes file with multiple .dsc's."
1126 if not dsc_filename:
1127 return False, "source uploads must contain a dsc file"
1129 return True, dsc_filename
1131 def load_dsc(self, action=True, signing_rules=1):
1133 Find and load the dsc from self.pkg.files into self.dsc
1135 Returns: (Status, Reason)
1137 Status: Boolean; True when there was no error, False otherwise
1138 Reason: String; When Status is False this describes the error
1142 (status, dsc_filename) = self.__dsc_filename()
1144 # If status is false, dsc_filename has the reason
1145 return False, dsc_filename
1148 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1149 except CantOpenError:
1151 return False, "%s: can't read file." % (dsc_filename)
1152 except ParseChangesError, line:
1153 return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1154 except InvalidDscError, line:
1155 return False, "%s: syntax error on line %s." % (dsc_filename, line)
1156 except ChangesUnicodeError:
1157 return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1161 ###########################################################################
1163 def check_dsc(self, action=True, session=None):
1164 """Returns bool indicating whether or not the source changes are valid"""
1165 # Ensure there is source to check
1166 if not self.pkg.changes["architecture"].has_key("source"):
1169 (status, reason) = self.load_dsc(action=action)
1171 self.rejects.append(reason)
1173 (status, dsc_filename) = self.__dsc_filename()
1175 # If status is false, dsc_filename has the reason
1176 self.rejects.append(dsc_filename)
1179 # Build up the file list of files mentioned by the .dsc
1181 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1182 except NoFilesFieldError:
1183 self.rejects.append("%s: no Files: field." % (dsc_filename))
1185 except UnknownFormatError, format:
1186 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1188 except ParseChangesError, line:
1189 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1192 # Enforce mandatory fields
1193 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1194 if not self.pkg.dsc.has_key(i):
1195 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1198 # Validate the source and version fields
1199 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1200 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1201 if not re_valid_version.match(self.pkg.dsc["version"]):
1202 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1204 # Only a limited list of source formats are allowed in each suite
1205 for dist in self.pkg.changes["distribution"].keys():
1206 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1207 if self.pkg.dsc["format"] not in allowed:
1208 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1210 # Validate the Maintainer field
1212 # We ignore the return value
1213 fix_maintainer(self.pkg.dsc["maintainer"])
1214 except ParseMaintError, msg:
1215 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1216 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1218 # Validate the build-depends field(s)
1219 for field_name in [ "build-depends", "build-depends-indep" ]:
1220 field = self.pkg.dsc.get(field_name)
1222 # Have apt try to parse them...
1224 apt_pkg.ParseSrcDepends(field)
1226 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1228 # Ensure the version number in the .dsc matches the version number in the .changes
1229 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1230 changes_version = self.pkg.files[dsc_filename]["version"]
1232 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1233 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1235 # Ensure the Files field contain only what's expected
1236 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1238 # Ensure source is newer than existing source in target suites
1239 session = DBConn().session()
1240 self.check_source_against_db(dsc_filename, session)
1241 self.check_dsc_against_db(dsc_filename, session)
1243 dbchg = get_dbchange(self.pkg.changes_file, session)
1245 # Finally, check if we're missing any files
1246 for f in self.later_check_files:
1248 # Check if we've already processed this file if we have a dbchg object
1251 for pf in dbchg.files:
1252 if pf.filename == f and pf.processed:
1253 self.notes.append('%s was already processed so we can go ahead' % f)
1255 del self.pkg.files[f]
1257 self.rejects.append("Could not find file %s references in changes" % f)
1263 ###########################################################################
1265 def get_changelog_versions(self, source_dir):
1266 """Extracts a the source package and (optionally) grabs the
1267 version history out of debian/changelog for the BTS."""
1271 # Find the .dsc (again)
1273 for f in self.pkg.files.keys():
1274 if self.pkg.files[f]["type"] == "dsc":
1277 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1278 if not dsc_filename:
1281 # Create a symlink mirror of the source files in our temporary directory
1282 for f in self.pkg.files.keys():
1283 m = re_issource.match(f)
1285 src = os.path.join(source_dir, f)
1286 # If a file is missing for whatever reason, give up.
1287 if not os.path.exists(src):
1290 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1291 self.pkg.orig_files[f].has_key("path"):
1293 dest = os.path.join(os.getcwd(), f)
1294 os.symlink(src, dest)
1296 # If the orig files are not a part of the upload, create symlinks to the
1298 for orig_file in self.pkg.orig_files.keys():
1299 if not self.pkg.orig_files[orig_file].has_key("path"):
1301 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1302 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1304 # Extract the source
1306 unpacked = UnpackedSource(dsc_filename)
1308 self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1311 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1314 # Get the upstream version
1315 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1316 if re_strip_revision.search(upstr_version):
1317 upstr_version = re_strip_revision.sub('', upstr_version)
1319 # Ensure the changelog file exists
1320 changelog_file = unpacked.get_changelog_file()
1321 if changelog_file is None:
1322 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1325 # Parse the changelog
1326 self.pkg.dsc["bts changelog"] = ""
1327 for line in changelog_file.readlines():
1328 m = re_changelog_versions.match(line)
1330 self.pkg.dsc["bts changelog"] += line
1331 changelog_file.close()
1334 # Check we found at least one revision in the changelog
1335 if not self.pkg.dsc["bts changelog"]:
1336 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1338 def check_source(self):
1340 # a) there's no source
1341 if not self.pkg.changes["architecture"].has_key("source"):
1344 tmpdir = utils.temp_dirname()
1346 # Move into the temporary directory
1350 # Get the changelog version history
1351 self.get_changelog_versions(cwd)
1353 # Move back and cleanup the temporary tree
1357 shutil.rmtree(tmpdir)
1359 if e.errno != errno.EACCES:
1361 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1363 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1364 # We probably have u-r or u-w directories so chmod everything
1366 cmd = "chmod -R u+rwx %s" % (tmpdir)
1367 result = os.system(cmd)
1369 utils.fubar("'%s' failed with result %s." % (cmd, result))
1370 shutil.rmtree(tmpdir)
1371 except Exception, e:
1372 print "foobar2 (%s)" % e
1373 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1375 ###########################################################################
1376 def ensure_hashes(self):
1377 # Make sure we recognise the format of the Files: field in the .changes
1378 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1379 if len(format) == 2:
1380 format = int(format[0]), int(format[1])
1382 format = int(float(format[0])), 0
1384 # We need to deal with the original changes blob, as the fields we need
1385 # might not be in the changes dict serialised into the .dak anymore.
1386 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1388 # Copy the checksums over to the current changes dict. This will keep
1389 # the existing modifications to it intact.
1390 for field in orig_changes:
1391 if field.startswith('checksums-'):
1392 self.pkg.changes[field] = orig_changes[field]
1394 # Check for unsupported hashes
1395 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1396 self.rejects.append(j)
1398 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1399 self.rejects.append(j)
1401 # We have to calculate the hash if we have an earlier changes version than
1402 # the hash appears in rather than require it exist in the changes file
1403 for hashname, hashfunc, version in utils.known_hashes:
1404 # TODO: Move _ensure_changes_hash into this class
1405 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1406 self.rejects.append(j)
1407 if "source" in self.pkg.changes["architecture"]:
1408 # TODO: Move _ensure_dsc_hash into this class
1409 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1410 self.rejects.append(j)
1412 def check_hashes(self):
1413 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1414 self.rejects.append(m)
1416 for m in utils.check_size(".changes", self.pkg.files):
1417 self.rejects.append(m)
1419 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1420 self.rejects.append(m)
1422 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1423 self.rejects.append(m)
1425 self.ensure_hashes()
1427 ###########################################################################
1429 def ensure_orig(self, target_dir='.', session=None):
1431 Ensures that all orig files mentioned in the changes file are present
1432 in target_dir. If they do not exist, they are symlinked into place.
1434 An list containing the symlinks that were created are returned (so they
1441 for filename, entry in self.pkg.dsc_files.iteritems():
1442 if not re_is_orig_source.match(filename):
1443 # File is not an orig; ignore
1446 if os.path.exists(filename):
1447 # File exists, no need to continue
1450 def symlink_if_valid(path):
1451 f = utils.open_file(path)
1452 md5sum = apt_pkg.md5sum(f)
1455 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1456 expected = (int(entry['size']), entry['md5sum'])
1458 if fingerprint != expected:
1461 dest = os.path.join(target_dir, filename)
1463 os.symlink(path, dest)
1464 symlinked.append(dest)
1470 session_ = DBConn().session()
1475 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1476 poolfile_path = os.path.join(
1477 poolfile.location.path, poolfile.filename
1480 if symlink_if_valid(poolfile_path):
1490 # Look in some other queues for the file
1491 queues = ('New', 'Byhand', 'ProposedUpdates',
1492 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1494 for queue in queues:
1495 if not cnf.get('Dir::Queue::%s' % queue):
1498 queuefile_path = os.path.join(
1499 cnf['Dir::Queue::%s' % queue], filename
1502 if not os.path.exists(queuefile_path):
1503 # Does not exist in this queue
1506 if symlink_if_valid(queuefile_path):
1511 ###########################################################################
1513 def check_lintian(self):
1515 Extends self.rejects by checking the output of lintian against tags
1516 specified in Dinstall::LintianTags.
1521 # Don't reject binary uploads
1522 if not self.pkg.changes['architecture'].has_key('source'):
1525 # Only check some distributions
1526 for dist in ('unstable', 'experimental'):
1527 if dist in self.pkg.changes['distribution']:
1532 # If we do not have a tagfile, don't do anything
1533 tagfile = cnf.get("Dinstall::LintianTags")
1537 # Parse the yaml file
1538 sourcefile = file(tagfile, 'r')
1539 sourcecontent = sourcefile.read()
1543 lintiantags = yaml.load(sourcecontent)['lintian']
1544 except yaml.YAMLError, msg:
1545 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1548 # Try and find all orig mentioned in the .dsc
1549 symlinked = self.ensure_orig()
1551 # Setup the input file for lintian
1552 fd, temp_filename = utils.temp_filename()
1553 temptagfile = os.fdopen(fd, 'w')
1554 for tags in lintiantags.values():
1555 temptagfile.writelines(['%s\n' % x for x in tags])
1559 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1560 (temp_filename, self.pkg.changes_file)
1562 result, output = commands.getstatusoutput(cmd)
1564 # Remove our tempfile and any symlinks we created
1565 os.unlink(temp_filename)
1567 for symlink in symlinked:
1571 utils.warn("lintian failed for %s [return code: %s]." % \
1572 (self.pkg.changes_file, result))
1573 utils.warn(utils.prefix_multi_line_string(output, \
1574 " [possible output:] "))
1579 [self.pkg.changes_file, "check_lintian"] + list(txt)
1583 parsed_tags = parse_lintian_output(output)
1584 self.rejects.extend(
1585 generate_reject_messages(parsed_tags, lintiantags, log=log)
1588 ###########################################################################
1589 def check_urgency(self):
1591 if self.pkg.changes["architecture"].has_key("source"):
1592 if not self.pkg.changes.has_key("urgency"):
1593 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1594 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1595 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1596 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1597 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1598 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1600 ###########################################################################
1602 # Sanity check the time stamps of files inside debs.
1603 # [Files in the near future cause ugly warnings and extreme time
1604 # travel can cause errors on extraction]
1606 def check_timestamps(self):
1609 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1610 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1611 tar = TarTime(future_cutoff, past_cutoff)
1613 for filename, entry in self.pkg.files.items():
1614 if entry["type"] == "deb":
1617 deb_file = utils.open_file(filename)
1618 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1621 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1622 except SystemError, e:
1623 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1624 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1627 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1631 future_files = tar.future_files.keys()
1633 num_future_files = len(future_files)
1634 future_file = future_files[0]
1635 future_date = tar.future_files[future_file]
1636 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1637 % (filename, num_future_files, future_file, time.ctime(future_date)))
1639 ancient_files = tar.ancient_files.keys()
1641 num_ancient_files = len(ancient_files)
1642 ancient_file = ancient_files[0]
1643 ancient_date = tar.ancient_files[ancient_file]
1644 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1645 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1647 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1649 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1650 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1652 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1658 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1659 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1660 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1661 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1662 self.pkg.changes["sponsoremail"] = uid_email
1667 ###########################################################################
1668 # check_signed_by_key checks
1669 ###########################################################################
1671 def check_signed_by_key(self):
1672 """Ensure the .changes is signed by an authorized uploader."""
1673 session = DBConn().session()
1675 # First of all we check that the person has proper upload permissions
1676 # and that this upload isn't blocked
1677 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1680 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1683 # TODO: Check that import-keyring adds UIDs properly
1685 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1688 # Check that the fingerprint which uploaded has permission to do so
1689 self.check_upload_permissions(fpr, session)
1691 # Check that this package is not in a transition
1692 self.check_transition(session)
1697 def check_upload_permissions(self, fpr, session):
1698 # Check any one-off upload blocks
1699 self.check_upload_blocks(fpr, session)
1701 # Start with DM as a special case
1702 # DM is a special case unfortunately, so we check it first
1703 # (keys with no source access get more access than DMs in one
1704 # way; DMs can only upload for their packages whether source
1705 # or binary, whereas keys with no access might be able to
1706 # upload some binaries)
1707 if fpr.source_acl.access_level == 'dm':
1708 self.check_dm_upload(fpr, session)
1710 # Check source-based permissions for other types
1711 if self.pkg.changes["architecture"].has_key("source") and \
1712 fpr.source_acl.access_level is None:
1713 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1714 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1715 self.rejects.append(rej)
1717 # If not a DM, we allow full upload rights
1718 uid_email = "%s@debian.org" % (fpr.uid.uid)
1719 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1722 # Check binary upload permissions
1723 # By this point we know that DMs can't have got here unless they
1724 # are allowed to deal with the package concerned so just apply
1726 if fpr.binary_acl.access_level == 'full':
1729 # Otherwise we're in the map case
1730 tmparches = self.pkg.changes["architecture"].copy()
1731 tmparches.pop('source', None)
1733 for bam in fpr.binary_acl_map:
1734 tmparches.pop(bam.architecture.arch_string, None)
1736 if len(tmparches.keys()) > 0:
1737 if fpr.binary_reject:
1738 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1739 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1740 self.rejects.append(rej)
1742 # TODO: This is where we'll implement reject vs throw away binaries later
1743 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1744 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1745 rej += "\nFingerprint: %s", (fpr.fingerprint)
1746 self.rejects.append(rej)
1749 def check_upload_blocks(self, fpr, session):
1750 """Check whether any upload blocks apply to this source, source
1751 version, uid / fpr combination"""
1753 def block_rej_template(fb):
1754 rej = 'Manual upload block in place for package %s' % fb.source
1755 if fb.version is not None:
1756 rej += ', version %s' % fb.version
1759 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1760 # version is None if the block applies to all versions
1761 if fb.version is None or fb.version == self.pkg.changes['version']:
1762 # Check both fpr and uid - either is enough to cause a reject
1763 if fb.fpr is not None:
1764 if fb.fpr.fingerprint == fpr.fingerprint:
1765 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1766 if fb.uid is not None:
1767 if fb.uid == fpr.uid:
1768 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1771 def check_dm_upload(self, fpr, session):
1772 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1773 ## none of the uploaded packages are NEW
1775 for f in self.pkg.files.keys():
1776 if self.pkg.files[f].has_key("byhand"):
1777 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1779 if self.pkg.files[f].has_key("new"):
1780 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1786 r = get_newest_source(self.pkg.changes["source"], session)
1789 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1790 self.rejects.append(rej)
1793 if not r.dm_upload_allowed:
1794 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1795 self.rejects.append(rej)
1798 ## the Maintainer: field of the uploaded .changes file corresponds with
1799 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1801 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1802 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1804 ## the most recent version of the package uploaded to unstable or
1805 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1806 ## non-developer maintainers cannot NMU or hijack packages)
1808 # srcuploaders includes the maintainer
1810 for sup in r.srcuploaders:
1811 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1812 # Eww - I hope we never have two people with the same name in Debian
1813 if email == fpr.uid.uid or name == fpr.uid.name:
1818 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1821 ## none of the packages are being taken over from other source packages
1822 for b in self.pkg.changes["binary"].keys():
1823 for suite in self.pkg.changes["distribution"].keys():
1824 for s in get_source_by_package_and_suite(b, suite, session):
1825 if s.source != self.pkg.changes["source"]:
1826 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1830 def check_transition(self, session):
1833 sourcepkg = self.pkg.changes["source"]
1835 # No sourceful upload -> no need to do anything else, direct return
1836 # We also work with unstable uploads, not experimental or those going to some
1837 # proposed-updates queue
1838 if "source" not in self.pkg.changes["architecture"] or \
1839 "unstable" not in self.pkg.changes["distribution"]:
1842 # Also only check if there is a file defined (and existant) with
1844 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1845 if transpath == "" or not os.path.exists(transpath):
1848 # Parse the yaml file
1849 sourcefile = file(transpath, 'r')
1850 sourcecontent = sourcefile.read()
1852 transitions = yaml.load(sourcecontent)
1853 except yaml.YAMLError, msg:
1854 # This shouldn't happen, there is a wrapper to edit the file which
1855 # checks it, but we prefer to be safe than ending up rejecting
1857 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1860 # Now look through all defined transitions
1861 for trans in transitions:
1862 t = transitions[trans]
1863 source = t["source"]
1866 # Will be None if nothing is in testing.
1867 current = get_source_in_suite(source, "testing", session)
1868 if current is not None:
1869 compare = apt_pkg.VersionCompare(current.version, expected)
1871 if current is None or compare < 0:
1872 # This is still valid, the current version in testing is older than
1873 # the new version we wait for, or there is none in testing yet
1875 # Check if the source we look at is affected by this.
1876 if sourcepkg in t['packages']:
1877 # The source is affected, lets reject it.
1879 rejectmsg = "%s: part of the %s transition.\n\n" % (
1882 if current is not None:
1883 currentlymsg = "at version %s" % (current.version)
1885 currentlymsg = "not present in testing"
1887 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1889 rejectmsg += "\n".join(textwrap.wrap("""Your package
1890 is part of a testing transition designed to get %s migrated (it is
1891 currently %s, we need version %s). This transition is managed by the
1892 Release Team, and %s is the Release-Team member responsible for it.
1893 Please mail debian-release@lists.debian.org or contact %s directly if you
1894 need further assistance. You might want to upload to experimental until this
1895 transition is done."""
1896 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1898 self.rejects.append(rejectmsg)
1901 ###########################################################################
1902 # End check_signed_by_key checks
1903 ###########################################################################
1905 def build_summaries(self):
1906 """ Build a summary of changes the upload introduces. """
1908 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1910 short_summary = summary
1912 # This is for direport's benefit...
1913 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1916 summary += "Changes: " + f
1918 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1920 summary += self.announce(short_summary, 0)
1922 return (summary, short_summary)
1924 ###########################################################################
1926 def close_bugs(self, summary, action):
1928 Send mail to close bugs as instructed by the closes field in the changes file.
1929 Also add a line to summary if any work was done.
1931 @type summary: string
1932 @param summary: summary text, as given by L{build_summaries}
1935 @param action: Set to false no real action will be done.
1938 @return: summary. If action was taken, extended by the list of closed bugs.
1942 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1944 bugs = self.pkg.changes["closes"].keys()
1950 summary += "Closing bugs: "
1952 summary += "%s " % (bug)
1955 self.Subst["__BUG_NUMBER__"] = bug
1956 if self.pkg.changes["distribution"].has_key("stable"):
1957 self.Subst["__STABLE_WARNING__"] = """
1958 Note that this package is not part of the released stable Debian
1959 distribution. It may have dependencies on other unreleased software,
1960 or other instabilities. Please take care if you wish to install it.
1961 The update will eventually make its way into the next released Debian
1964 self.Subst["__STABLE_WARNING__"] = ""
1965 mail_message = utils.TemplateSubst(self.Subst, template)
1966 utils.send_mail(mail_message)
1968 # Clear up after ourselves
1969 del self.Subst["__BUG_NUMBER__"]
1970 del self.Subst["__STABLE_WARNING__"]
1972 if action and self.logger:
1973 self.logger.log(["closing bugs"] + bugs)
1979 ###########################################################################
1981 def announce(self, short_summary, action):
1983 Send an announce mail about a new upload.
1985 @type short_summary: string
1986 @param short_summary: Short summary text to include in the mail
1989 @param action: Set to false no real action will be done.
1992 @return: Textstring about action taken.
1997 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1999 # Only do announcements for source uploads with a recent dpkg-dev installed
2000 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2001 self.pkg.changes["architecture"].has_key("source"):
2007 self.Subst["__SHORT_SUMMARY__"] = short_summary
2009 for dist in self.pkg.changes["distribution"].keys():
2010 suite = get_suite(dist)
2011 if suite is None: continue
2012 announce_list = suite.announce
2013 if announce_list == "" or lists_done.has_key(announce_list):
2016 lists_done[announce_list] = 1
2017 summary += "Announcing to %s\n" % (announce_list)
2021 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2022 if cnf.get("Dinstall::TrackingServer") and \
2023 self.pkg.changes["architecture"].has_key("source"):
2024 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2025 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2027 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2028 utils.send_mail(mail_message)
2030 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2032 if cnf.FindB("Dinstall::CloseBugs"):
2033 summary = self.close_bugs(summary, action)
2035 del self.Subst["__SHORT_SUMMARY__"]
2039 ###########################################################################
2041 def accept (self, summary, short_summary, session=None):
2045 This moves all files referenced from the .changes into the pool,
2046 sends the accepted mail, announces to lists, closes bugs and
2047 also checks for override disparities. If enabled it will write out
2048 the version history for the BTS Version Tracking and will finally call
2051 @type summary: string
2052 @param summary: Summary text
2054 @type short_summary: string
2055 @param short_summary: Short summary
2059 stats = SummaryStats()
2062 self.logger.log(["installing changes", self.pkg.changes_file])
2066 # Add the .dsc file to the DB first
2067 for newfile, entry in self.pkg.files.items():
2068 if entry["type"] == "dsc":
2069 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2073 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2074 for newfile, entry in self.pkg.files.items():
2075 if entry["type"] == "deb":
2076 poolfiles.append(add_deb_to_db(self, newfile, session))
2078 # If this is a sourceful diff only upload that is moving
2079 # cross-component we need to copy the .orig files into the new
2080 # component too for the same reasons as above.
2081 # XXX: mhy: I think this should be in add_dsc_to_db
2082 if self.pkg.changes["architecture"].has_key("source"):
2083 for orig_file in self.pkg.orig_files.keys():
2084 if not self.pkg.orig_files[orig_file].has_key("id"):
2085 continue # Skip if it's not in the pool
2086 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2087 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2088 continue # Skip if the location didn't change
2091 oldf = get_poolfile_by_id(orig_file_id, session)
2092 old_filename = os.path.join(oldf.location.path, oldf.filename)
2093 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2094 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2096 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2098 # TODO: Care about size/md5sum collisions etc
2099 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2101 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2103 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2104 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2108 # Don't reference the old file from this changes
2110 if p.file_id == oldf.file_id:
2113 poolfiles.append(newf)
2115 # Fix up the DSC references
2118 for df in source.srcfiles:
2119 if df.poolfile.file_id == oldf.file_id:
2120 # Add a new DSC entry and mark the old one for deletion
2121 # Don't do it in the loop so we don't change the thing we're iterating over
2123 newdscf.source_id = source.source_id
2124 newdscf.poolfile_id = newf.file_id
2125 session.add(newdscf)
2135 # Make sure that our source object is up-to-date
2136 session.expire(source)
2138 # Add changelog information to the database
2139 self.store_changelog()
2141 # Install the files into the pool
2142 for newfile, entry in self.pkg.files.items():
2143 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2144 utils.move(newfile, destination)
2145 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2146 stats.accept_bytes += float(entry["size"])
2148 # Copy the .changes file across for suite which need it.
2149 copy_changes = dict([(x.copychanges, '')
2150 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2151 if x.copychanges is not None])
2153 for dest in copy_changes.keys():
2154 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2156 # We're done - commit the database changes
2158 # Our SQL session will automatically start a new transaction after
2161 # Move the .changes into the 'done' directory
2162 utils.move(self.pkg.changes_file,
2163 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2165 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2166 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2169 self.Subst["__SUMMARY__"] = summary
2170 mail_message = utils.TemplateSubst(self.Subst,
2171 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2172 utils.send_mail(mail_message)
2173 self.announce(short_summary, 1)
2175 ## Helper stuff for DebBugs Version Tracking
2176 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2177 if self.pkg.changes["architecture"].has_key("source"):
2178 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2179 version_history = os.fdopen(fd, 'w')
2180 version_history.write(self.pkg.dsc["bts changelog"])
2181 version_history.close()
2182 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2183 self.pkg.changes_file[:-8]+".versions")
2184 os.rename(temp_filename, filename)
2185 os.chmod(filename, 0644)
2187 # Write out the binary -> source mapping.
2188 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2189 debinfo = os.fdopen(fd, 'w')
2190 for name, entry in sorted(self.pkg.files.items()):
2191 if entry["type"] == "deb":
2192 line = " ".join([entry["package"], entry["version"],
2193 entry["architecture"], entry["source package"],
2194 entry["source version"]])
2195 debinfo.write(line+"\n")
2197 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2198 self.pkg.changes_file[:-8]+".debinfo")
2199 os.rename(temp_filename, filename)
2200 os.chmod(filename, 0644)
2204 # Set up our copy queues (e.g. buildd queues)
2205 for suite_name in self.pkg.changes["distribution"].keys():
2206 suite = get_suite(suite_name, session)
2207 for q in suite.copy_queues:
2209 q.add_file_from_pool(f)
2214 stats.accept_count += 1
2216 def check_override(self):
2218 Checks override entries for validity. Mails "Override disparity" warnings,
2219 if that feature is enabled.
2221 Abandons the check if
2222 - override disparity checks are disabled
2223 - mail sending is disabled
2228 # Abandon the check if override disparity checks have been disabled
2229 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2232 summary = self.pkg.check_override()
2237 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2240 self.Subst["__SUMMARY__"] = summary
2241 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2242 utils.send_mail(mail_message)
2243 del self.Subst["__SUMMARY__"]
2245 ###########################################################################
2247 def remove(self, from_dir=None):
2249 Used (for instance) in p-u to remove the package from unchecked
2251 Also removes the package from holding area.
2253 if from_dir is None:
2254 from_dir = self.pkg.directory
2257 for f in self.pkg.files.keys():
2258 os.unlink(os.path.join(from_dir, f))
2259 if os.path.exists(os.path.join(h.holding_dir, f)):
2260 os.unlink(os.path.join(h.holding_dir, f))
2262 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2263 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2264 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2266 ###########################################################################
2268 def move_to_queue (self, queue):
2270 Move files to a destination queue using the permissions in the table
2273 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2274 queue.path, perms=int(queue.change_perms, 8))
2275 for f in self.pkg.files.keys():
2276 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2278 ###########################################################################
2280 def force_reject(self, reject_files):
2282 Forcefully move files from the current directory to the
2283 reject directory. If any file already exists in the reject
2284 directory it will be moved to the morgue to make way for
2287 @type reject_files: dict
2288 @param reject_files: file dictionary
2294 for file_entry in reject_files:
2295 # Skip any files which don't exist or which we don't have permission to copy.
2296 if os.access(file_entry, os.R_OK) == 0:
2299 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2302 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2304 # File exists? Let's find a new name by adding a number
2305 if e.errno == errno.EEXIST:
2307 dest_file = utils.find_next_free(dest_file, 255)
2308 except NoFreeFilenameError:
2309 # Something's either gone badly Pete Tong, or
2310 # someone is trying to exploit us.
2311 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2314 # Make sure we really got it
2316 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2319 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2323 # If we got here, we own the destination file, so we can
2324 # safely overwrite it.
2325 utils.move(file_entry, dest_file, 1, perms=0660)
2328 ###########################################################################
2329 def do_reject (self, manual=0, reject_message="", notes=""):
2331 Reject an upload. If called without a reject message or C{manual} is
2332 true, spawn an editor so the user can write one.
2335 @param manual: manual or automated rejection
2337 @type reject_message: string
2338 @param reject_message: A reject message
2343 # If we weren't given a manual rejection message, spawn an
2344 # editor so the user can add one in...
2345 if manual and not reject_message:
2346 (fd, temp_filename) = utils.temp_filename()
2347 temp_file = os.fdopen(fd, 'w')
2350 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2351 % (note.author, note.version, note.notedate, note.comment))
2353 editor = os.environ.get("EDITOR","vi")
2355 while answer == 'E':
2356 os.system("%s %s" % (editor, temp_filename))
2357 temp_fh = utils.open_file(temp_filename)
2358 reject_message = "".join(temp_fh.readlines())
2360 print "Reject message:"
2361 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2362 prompt = "[R]eject, Edit, Abandon, Quit ?"
2364 while prompt.find(answer) == -1:
2365 answer = utils.our_raw_input(prompt)
2366 m = re_default_answer.search(prompt)
2369 answer = answer[:1].upper()
2370 os.unlink(temp_filename)
2376 print "Rejecting.\n"
2380 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2381 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2383 # Move all the files into the reject directory
2384 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2385 self.force_reject(reject_files)
2387 # If we fail here someone is probably trying to exploit the race
2388 # so let's just raise an exception ...
2389 if os.path.exists(reason_filename):
2390 os.unlink(reason_filename)
2391 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2393 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2397 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2398 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2399 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2400 os.write(reason_fd, reject_message)
2401 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2403 # Build up the rejection email
2404 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2405 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2406 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2407 self.Subst["__REJECT_MESSAGE__"] = ""
2408 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2409 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2410 # Write the rejection email out as the <foo>.reason file
2411 os.write(reason_fd, reject_mail_message)
2413 del self.Subst["__REJECTOR_ADDRESS__"]
2414 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2415 del self.Subst["__CC__"]
2419 # Send the rejection mail
2420 utils.send_mail(reject_mail_message)
2423 self.logger.log(["rejected", self.pkg.changes_file])
2427 ################################################################################
2428 def in_override_p(self, package, component, suite, binary_type, filename, session):
2430 Check if a package already has override entries in the DB
2432 @type package: string
2433 @param package: package name
2435 @type component: string
2436 @param component: database id of the component
2439 @param suite: database id of the suite
2441 @type binary_type: string
2442 @param binary_type: type of the package
2444 @type filename: string
2445 @param filename: filename we check
2447 @return: the database result. But noone cares anyway.
2453 if binary_type == "": # must be source
2456 file_type = binary_type
2458 # Override suite name; used for example with proposed-updates
2459 oldsuite = get_suite(suite, session)
2460 if (not oldsuite is None) and oldsuite.overridesuite:
2461 suite = oldsuite.overridesuite
2463 result = get_override(package, suite, component, file_type, session)
2465 # If checking for a source package fall back on the binary override type
2466 if file_type == "dsc" and len(result) < 1:
2467 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2469 # Remember the section and priority so we can check them later if appropriate
2472 self.pkg.files[filename]["override section"] = result.section.section
2473 self.pkg.files[filename]["override priority"] = result.priority.priority
2478 ################################################################################
2479 def get_anyversion(self, sv_list, suite):
2482 @param sv_list: list of (suite, version) tuples to check
2485 @param suite: suite name
2491 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2492 for (s, v) in sv_list:
2493 if s in [ x.lower() for x in anysuite ]:
2494 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2499 ################################################################################
2501 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2504 @param sv_list: list of (suite, version) tuples to check
2506 @type filename: string
2507 @param filename: XXX
2509 @type new_version: string
2510 @param new_version: XXX
2512 Ensure versions are newer than existing packages in target
2513 suites and that cross-suite version checking rules as
2514 set out in the conf file are satisfied.
2519 # Check versions for each target suite
2520 for target_suite in self.pkg.changes["distribution"].keys():
2521 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2522 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2524 # Enforce "must be newer than target suite" even if conffile omits it
2525 if target_suite not in must_be_newer_than:
2526 must_be_newer_than.append(target_suite)
2528 for (suite, existent_version) in sv_list:
2529 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2531 if suite in must_be_newer_than and sourceful and vercmp < 1:
2532 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2534 if suite in must_be_older_than and vercmp > -1:
2537 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2538 # we really use the other suite, ignoring the conflicting one ...
2539 addsuite = self.pkg.changes["distribution-version"][suite]
2541 add_version = self.get_anyversion(sv_list, addsuite)
2542 target_version = self.get_anyversion(sv_list, target_suite)
2545 # not add_version can only happen if we map to a suite
2546 # that doesn't enhance the suite we're propup'ing from.
2547 # so "propup-ver x a b c; map a d" is a problem only if
2548 # d doesn't enhance a.
2550 # i think we could always propagate in this case, rather
2551 # than complaining. either way, this isn't a REJECT issue
2553 # And - we really should complain to the dorks who configured dak
2554 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2555 self.pkg.changes.setdefault("propdistribution", {})
2556 self.pkg.changes["propdistribution"][addsuite] = 1
2558 elif not target_version:
2559 # not targets_version is true when the package is NEW
2560 # we could just stick with the "...old version..." REJECT
2561 # for this, I think.
2562 self.rejects.append("Won't propogate NEW packages.")
2563 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2564 # propogation would be redundant. no need to reject though.
2565 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2567 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2568 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2570 self.warnings.append("Propogating upload to %s" % (addsuite))
2571 self.pkg.changes.setdefault("propdistribution", {})
2572 self.pkg.changes["propdistribution"][addsuite] = 1
2576 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2578 ################################################################################
2579 def check_binary_against_db(self, filename, session):
2580 # Ensure version is sane
2581 self.cross_suite_version_check( \
2582 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2583 self.pkg.files[filename]["architecture"], session),
2584 filename, self.pkg.files[filename]["version"], sourceful=False)
2586 # Check for any existing copies of the file
2587 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2588 q = q.filter_by(version=self.pkg.files[filename]["version"])
2589 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2592 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2594 ################################################################################
2596 def check_source_against_db(self, filename, session):
2597 source = self.pkg.dsc.get("source")
2598 version = self.pkg.dsc.get("version")
2600 # Ensure version is sane
2601 self.cross_suite_version_check( \
2602 get_suite_version_by_source(source, session), filename, version,
2605 ################################################################################
2606 def check_dsc_against_db(self, filename, session):
2609 @warning: NB: this function can remove entries from the 'files' index [if
2610 the orig tarball is a duplicate of the one in the archive]; if
2611 you're iterating over 'files' and call this function as part of
2612 the loop, be sure to add a check to the top of the loop to
2613 ensure you haven't just tried to dereference the deleted entry.
2618 self.pkg.orig_files = {} # XXX: do we need to clear it?
2619 orig_files = self.pkg.orig_files
2621 # Try and find all files mentioned in the .dsc. This has
2622 # to work harder to cope with the multiple possible
2623 # locations of an .orig.tar.gz.
2624 # The ordering on the select is needed to pick the newest orig
2625 # when it exists in multiple places.
2626 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2628 if self.pkg.files.has_key(dsc_name):
2629 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2630 actual_size = int(self.pkg.files[dsc_name]["size"])
2631 found = "%s in incoming" % (dsc_name)
2633 # Check the file does not already exist in the archive
2634 ql = get_poolfile_like_name(dsc_name, session)
2636 # Strip out anything that isn't '%s' or '/%s$'
2638 if not i.filename.endswith(dsc_name):
2641 # "[dak] has not broken them. [dak] has fixed a
2642 # brokenness. Your crappy hack exploited a bug in
2645 # "(Come on! I thought it was always obvious that
2646 # one just doesn't release different files with
2647 # the same name and version.)"
2648 # -- ajk@ on d-devel@l.d.o
2651 # Ignore exact matches for .orig.tar.gz
2653 if re_is_orig_source.match(dsc_name):
2655 if self.pkg.files.has_key(dsc_name) and \
2656 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2657 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2658 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2659 # TODO: Don't delete the entry, just mark it as not needed
2660 # This would fix the stupidity of changing something we often iterate over
2661 # whilst we're doing it
2662 del self.pkg.files[dsc_name]
2663 dsc_entry["files id"] = i.file_id
2664 if not orig_files.has_key(dsc_name):
2665 orig_files[dsc_name] = {}
2666 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2669 # Don't bitch that we couldn't find this file later
2671 self.later_check_files.remove(dsc_name)
2677 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2679 elif re_is_orig_source.match(dsc_name):
2681 ql = get_poolfile_like_name(dsc_name, session)
2683 # Strip out anything that isn't '%s' or '/%s$'
2684 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2686 if not i.filename.endswith(dsc_name):
2690 # Unfortunately, we may get more than one match here if,
2691 # for example, the package was in potato but had an -sa
2692 # upload in woody. So we need to choose the right one.
2694 # default to something sane in case we don't match any or have only one
2699 old_file = os.path.join(i.location.path, i.filename)
2700 old_file_fh = utils.open_file(old_file)
2701 actual_md5 = apt_pkg.md5sum(old_file_fh)
2703 actual_size = os.stat(old_file)[stat.ST_SIZE]
2704 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2707 old_file = os.path.join(i.location.path, i.filename)
2708 old_file_fh = utils.open_file(old_file)
2709 actual_md5 = apt_pkg.md5sum(old_file_fh)
2711 actual_size = os.stat(old_file)[stat.ST_SIZE]
2713 suite_type = x.location.archive_type
2714 # need this for updating dsc_files in install()
2715 dsc_entry["files id"] = x.file_id
2716 # See install() in process-accepted...
2717 if not orig_files.has_key(dsc_name):
2718 orig_files[dsc_name] = {}
2719 orig_files[dsc_name]["id"] = x.file_id
2720 orig_files[dsc_name]["path"] = old_file
2721 orig_files[dsc_name]["location"] = x.location.location_id
2723 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2724 # Not there? Check the queue directories...
2725 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2726 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2728 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2729 if os.path.exists(in_otherdir):
2730 in_otherdir_fh = utils.open_file(in_otherdir)
2731 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2732 in_otherdir_fh.close()
2733 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2735 if not orig_files.has_key(dsc_name):
2736 orig_files[dsc_name] = {}
2737 orig_files[dsc_name]["path"] = in_otherdir
2740 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2743 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2745 if actual_md5 != dsc_entry["md5sum"]:
2746 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2747 if actual_size != int(dsc_entry["size"]):
2748 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2750 ################################################################################
2751 # This is used by process-new and process-holding to recheck a changes file
2752 # at the time we're running. It mainly wraps various other internal functions
2753 # and is similar to accepted_checks - these should probably be tidied up
2755 def recheck(self, session):
2757 for f in self.pkg.files.keys():
2758 # The .orig.tar.gz can disappear out from under us is it's a
2759 # duplicate of one in the archive.
2760 if not self.pkg.files.has_key(f):
2763 entry = self.pkg.files[f]
2765 # Check that the source still exists
2766 if entry["type"] == "deb":
2767 source_version = entry["source version"]
2768 source_package = entry["source package"]
2769 if not self.pkg.changes["architecture"].has_key("source") \
2770 and not source_exists(source_package, source_version, \
2771 suites = self.pkg.changes["distribution"].keys(), session = session):
2772 source_epochless_version = re_no_epoch.sub('', source_version)
2773 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2775 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2776 if cnf.has_key("Dir::Queue::%s" % (q)):
2777 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2780 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2782 # Version and file overwrite checks
2783 if entry["type"] == "deb":
2784 self.check_binary_against_db(f, session)
2785 elif entry["type"] == "dsc":
2786 self.check_source_against_db(f, session)
2787 self.check_dsc_against_db(f, session)
2789 ################################################################################
2790 def accepted_checks(self, overwrite_checks, session):
2791 # Recheck anything that relies on the database; since that's not
2792 # frozen between accept and our run time when called from p-a.
2794 # overwrite_checks is set to False when installing to stable/oldstable
2799 # Find the .dsc (again)
2801 for f in self.pkg.files.keys():
2802 if self.pkg.files[f]["type"] == "dsc":
2805 for checkfile in self.pkg.files.keys():
2806 # The .orig.tar.gz can disappear out from under us is it's a
2807 # duplicate of one in the archive.
2808 if not self.pkg.files.has_key(checkfile):
2811 entry = self.pkg.files[checkfile]
2813 # Check that the source still exists
2814 if entry["type"] == "deb":
2815 source_version = entry["source version"]
2816 source_package = entry["source package"]
2817 if not self.pkg.changes["architecture"].has_key("source") \
2818 and not source_exists(source_package, source_version, \
2819 suites = self.pkg.changes["distribution"].keys(), \
2821 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2823 # Version and file overwrite checks
2824 if overwrite_checks:
2825 if entry["type"] == "deb":
2826 self.check_binary_against_db(checkfile, session)
2827 elif entry["type"] == "dsc":
2828 self.check_source_against_db(checkfile, session)
2829 self.check_dsc_against_db(dsc_filename, session)
2831 # propogate in the case it is in the override tables:
2832 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2833 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2834 propogate[suite] = 1
2836 nopropogate[suite] = 1
2838 for suite in propogate.keys():
2839 if suite in nopropogate:
2841 self.pkg.changes["distribution"][suite] = 1
2843 for checkfile in self.pkg.files.keys():
2844 # Check the package is still in the override tables
2845 for suite in self.pkg.changes["distribution"].keys():
2846 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2847 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2849 ################################################################################
2850 # If any file of an upload has a recent mtime then chances are good
2851 # the file is still being uploaded.
2853 def upload_too_new(self):
2856 # Move back to the original directory to get accurate time stamps
2858 os.chdir(self.pkg.directory)
2859 file_list = self.pkg.files.keys()
2860 file_list.extend(self.pkg.dsc_files.keys())
2861 file_list.append(self.pkg.changes_file)
2864 last_modified = time.time()-os.path.getmtime(f)
2865 if last_modified < int(cnf["Dinstall::SkipTime"]):
2874 def store_changelog(self):
2876 # Skip binary-only upload if it is not a bin-NMU
2877 if not self.pkg.changes['architecture'].has_key('source'):
2878 from daklib.regexes import re_bin_only_nmu
2879 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2882 session = DBConn().session()
2884 # Check if upload already has a changelog entry
2885 query = """SELECT changelog_id FROM changes WHERE source = :source
2886 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2887 if session.execute(query, {'source': self.pkg.changes['source'], \
2888 'version': self.pkg.changes['version'], \
2889 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2893 # Add current changelog text into changelogs_text table, return created ID
2894 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2895 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2897 # Link ID to the upload available in changes table
2898 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2899 AND version = :version AND architecture = :architecture"""
2900 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2901 'version': self.pkg.changes['version'], \
2902 'architecture': " ".join(self.pkg.changes['architecture'].keys())})