5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(filename, changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
100 @param filename: changes filename
102 @type changes: Upload.Pkg.changes dict
103 @param changes: Changes dictionary
105 @type files: Upload.Pkg.files dict
106 @param files: Files dictionary
109 @param warn: Warn if overrides are added for (old)stable
112 @return: dictionary of NEW components.
115 # TODO: This should all use the database instead of parsing the changes
120 dbchg = get_dbchange(filename, session)
122 print "Warning: cannot find changes file in database; won't check byhand"
124 # Build up a list of potentially new things
125 for name, f in files.items():
126 # Keep a record of byhand elements
127 if f["section"] == "byhand":
132 priority = f["priority"]
133 section = f["section"]
134 file_type = get_type(f, session)
135 component = f["component"]
137 if file_type == "dsc":
140 if not new.has_key(pkg):
142 new[pkg]["priority"] = priority
143 new[pkg]["section"] = section
144 new[pkg]["type"] = file_type
145 new[pkg]["component"] = component
146 new[pkg]["files"] = []
148 old_type = new[pkg]["type"]
149 if old_type != file_type:
150 # source gets trumped by deb or udeb
151 if old_type == "dsc":
152 new[pkg]["priority"] = priority
153 new[pkg]["section"] = section
154 new[pkg]["type"] = file_type
155 new[pkg]["component"] = component
157 new[pkg]["files"].append(name)
159 if f.has_key("othercomponents"):
160 new[pkg]["othercomponents"] = f["othercomponents"]
162 # Fix up the list of target suites
164 for suite in changes["suite"].keys():
165 oldsuite = get_suite(suite, session)
167 print "WARNING: Invalid suite %s found" % suite
170 if oldsuite.overridesuite:
171 newsuite = get_suite(oldsuite.overridesuite, session)
174 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175 oldsuite.overridesuite, suite)
176 del changes["suite"][suite]
177 changes["suite"][oldsuite.overridesuite] = 1
179 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
180 oldsuite.overridesuite, suite)
182 # Check for unprocessed byhand files
183 if dbchg is not None:
184 for b in byhand.keys():
185 # Find the file entry in the database
187 for f in dbchg.files:
190 # If it's processed, we can ignore it
196 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
198 # Check for new stuff
199 for suite in changes["suite"].keys():
200 for pkg in new.keys():
201 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
203 for file_entry in new[pkg]["files"]:
204 if files[file_entry].has_key("new"):
205 del files[file_entry]["new"]
209 for s in ['stable', 'oldstable']:
210 if changes["suite"].has_key(s):
211 print "WARNING: overrides will be added for %s!" % s
212 for pkg in new.keys():
213 if new[pkg].has_key("othercomponents"):
214 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
218 ################################################################################
220 def check_valid(new, session = None):
222 Check if section and priority for NEW packages exist in database.
223 Additionally does sanity checks:
224 - debian-installer packages have to be udeb (or source)
225 - non debian-installer packages can not be udeb
226 - source priority can only be assigned to dsc file types
229 @param new: Dict of new packages with their section, priority and type.
232 for pkg in new.keys():
233 section_name = new[pkg]["section"]
234 priority_name = new[pkg]["priority"]
235 file_type = new[pkg]["type"]
237 section = get_section(section_name, session)
239 new[pkg]["section id"] = -1
241 new[pkg]["section id"] = section.section_id
243 priority = get_priority(priority_name, session)
245 new[pkg]["priority id"] = -1
247 new[pkg]["priority id"] = priority.priority_id
250 di = section_name.find("debian-installer") != -1
252 # If d-i, we must be udeb and vice-versa
253 if (di and file_type not in ("udeb", "dsc")) or \
254 (not di and file_type == "udeb"):
255 new[pkg]["section id"] = -1
257 # If dsc we need to be source and vice-versa
258 if (priority == "source" and file_type != "dsc") or \
259 (priority != "source" and file_type == "dsc"):
260 new[pkg]["priority id"] = -1
262 ###############################################################################
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266 def __init__(self, future_cutoff, past_cutoff):
268 self.future_cutoff = future_cutoff
269 self.past_cutoff = past_cutoff
272 self.future_files = {}
273 self.ancient_files = {}
275 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276 if MTime > self.future_cutoff:
277 self.future_files[Name] = MTime
278 if MTime < self.past_cutoff:
279 self.ancient_files[Name] = MTime
281 ###############################################################################
283 def prod_maintainer(notes, upload):
286 # Here we prepare an editor and get them ready to prod...
287 (fd, temp_filename) = utils.temp_filename()
288 temp_file = os.fdopen(fd, 'w')
290 temp_file.write(note.comment)
292 editor = os.environ.get("EDITOR","vi")
295 os.system("%s %s" % (editor, temp_filename))
296 temp_fh = utils.open_file(temp_filename)
297 prod_message = "".join(temp_fh.readlines())
299 print "Prod message:"
300 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
301 prompt = "[P]rod, Edit, Abandon, Quit ?"
303 while prompt.find(answer) == -1:
304 answer = utils.our_raw_input(prompt)
305 m = re_default_answer.search(prompt)
308 answer = answer[:1].upper()
309 os.unlink(temp_filename)
315 # Otherwise, do the proding...
316 user_email_address = utils.whoami() + " <%s>" % (
317 cnf["Dinstall::MyAdminAddress"])
321 Subst["__FROM_ADDRESS__"] = user_email_address
322 Subst["__PROD_MESSAGE__"] = prod_message
323 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
325 prod_mail_message = utils.TemplateSubst(
326 Subst,cnf["Dir::Templates"]+"/process-new.prod")
329 utils.send_mail(prod_mail_message)
331 print "Sent prodding message"
333 ################################################################################
335 def edit_note(note, upload, session, trainee=False):
336 # Write the current data to a temporary file
337 (fd, temp_filename) = utils.temp_filename()
338 editor = os.environ.get("EDITOR","vi")
341 os.system("%s %s" % (editor, temp_filename))
342 temp_file = utils.open_file(temp_filename)
343 newnote = temp_file.read().rstrip()
346 print utils.prefix_multi_line_string(newnote," ")
347 prompt = "[D]one, Edit, Abandon, Quit ?"
349 while prompt.find(answer) == -1:
350 answer = utils.our_raw_input(prompt)
351 m = re_default_answer.search(prompt)
354 answer = answer[:1].upper()
355 os.unlink(temp_filename)
362 comment = NewComment()
363 comment.package = upload.pkg.changes["source"]
364 comment.version = upload.pkg.changes["version"]
365 comment.comment = newnote
366 comment.author = utils.whoami()
367 comment.trainee = trainee
371 ###############################################################################
373 # suite names DMs can upload to
374 dm_suites = ['unstable', 'experimental']
376 def get_newest_source(source, session):
377 'returns the newest DBSource object in dm_suites'
378 ## the most recent version of the package uploaded to unstable or
379 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
380 ## section of its control file
381 q = session.query(DBSource).filter_by(source = source). \
382 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
383 order_by(desc('source.version'))
386 def get_suite_version(source, session):
387 'returns a list of tuples (suite_name, version) for source package'
388 q = session.query(Suite.suite_name, DBSource.version). \
389 join(Suite.sources).filter_by(source = source)
392 def get_source_by_package_and_suite(package, suite_name, session):
394 returns a DBSource query filtered by DBBinary.package and this package's
397 return session.query(DBSource). \
398 join(DBSource.binaries).filter_by(package = package). \
399 join(DBBinary.suites).filter_by(suite_name = suite_name)
401 class Upload(object):
403 Everything that has to do with an upload processed.
411 ###########################################################################
414 """ Reset a number of internal variables."""
416 # Initialize the substitution template map
419 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
420 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
421 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
422 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
428 self.later_check_files = []
432 def package_info(self):
434 Format various messages from this Upload to send to the maintainer.
438 ('Reject Reasons', self.rejects),
439 ('Warnings', self.warnings),
440 ('Notes', self.notes),
444 for title, messages in msgs:
446 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
451 ###########################################################################
452 def update_subst(self):
453 """ Set up the per-package template substitution mappings """
457 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
458 if not self.pkg.changes.has_key("architecture") or not \
459 isinstance(self.pkg.changes["architecture"], dict):
460 self.pkg.changes["architecture"] = { "Unknown" : "" }
462 # and maintainer2047 may not exist.
463 if not self.pkg.changes.has_key("maintainer2047"):
464 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
466 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
467 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
468 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
470 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
471 if self.pkg.changes["architecture"].has_key("source") and \
472 self.pkg.changes["changedby822"] != "" and \
473 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
475 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
476 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
477 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
479 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
480 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
481 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
483 # Process policy doesn't set the fingerprint field and I don't want to make it
484 # do it for now as I don't want to have to deal with the case where we accepted
485 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
486 # the meantime so the package will be remarked as rejectable. Urgh.
487 # TODO: Fix this properly
488 if self.pkg.changes.has_key('fingerprint'):
489 session = DBConn().session()
490 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
491 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
492 if self.pkg.changes.has_key("sponsoremail"):
493 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
496 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
497 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
499 # Apply any global override of the Maintainer field
500 if cnf.get("Dinstall::OverrideMaintainer"):
501 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
502 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
504 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
505 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
506 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
507 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
509 ###########################################################################
510 def load_changes(self, filename):
512 Load a changes file and setup a dictionary around it. Also checks for mandantory
515 @type filename: string
516 @param filename: Changes filename, full path.
519 @return: whether the changes file was valid or not. We may want to
520 reject even if this is True (see what gets put in self.rejects).
521 This is simply to prevent us even trying things later which will
522 fail because we couldn't properly parse the file.
525 self.pkg.changes_file = filename
527 # Parse the .changes field into a dictionary
529 self.pkg.changes.update(parse_changes(filename))
530 except CantOpenError:
531 self.rejects.append("%s: can't read file." % (filename))
533 except ParseChangesError, line:
534 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
536 except ChangesUnicodeError:
537 self.rejects.append("%s: changes file not proper utf-8" % (filename))
540 # Parse the Files field from the .changes into another dictionary
542 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
543 except ParseChangesError, line:
544 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
546 except UnknownFormatError, format:
547 self.rejects.append("%s: unknown format '%s'." % (filename, format))
550 # Check for mandatory fields
551 for i in ("distribution", "source", "binary", "architecture",
552 "version", "maintainer", "files", "changes", "description"):
553 if not self.pkg.changes.has_key(i):
554 # Avoid undefined errors later
555 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
558 # Strip a source version in brackets from the source field
559 if re_strip_srcver.search(self.pkg.changes["source"]):
560 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
562 # Ensure the source field is a valid package name.
563 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
564 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
566 # Split multi-value fields into a lower-level dictionary
567 for i in ("architecture", "distribution", "binary", "closes"):
568 o = self.pkg.changes.get(i, "")
570 del self.pkg.changes[i]
572 self.pkg.changes[i] = {}
575 self.pkg.changes[i][j] = 1
577 # Fix the Maintainer: field to be RFC822/2047 compatible
579 (self.pkg.changes["maintainer822"],
580 self.pkg.changes["maintainer2047"],
581 self.pkg.changes["maintainername"],
582 self.pkg.changes["maintaineremail"]) = \
583 fix_maintainer (self.pkg.changes["maintainer"])
584 except ParseMaintError, msg:
585 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
586 % (filename, self.pkg.changes["maintainer"], msg))
588 # ...likewise for the Changed-By: field if it exists.
590 (self.pkg.changes["changedby822"],
591 self.pkg.changes["changedby2047"],
592 self.pkg.changes["changedbyname"],
593 self.pkg.changes["changedbyemail"]) = \
594 fix_maintainer (self.pkg.changes.get("changed-by", ""))
595 except ParseMaintError, msg:
596 self.pkg.changes["changedby822"] = ""
597 self.pkg.changes["changedby2047"] = ""
598 self.pkg.changes["changedbyname"] = ""
599 self.pkg.changes["changedbyemail"] = ""
601 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
602 % (filename, self.pkg.changes["changed-by"], msg))
604 # Ensure all the values in Closes: are numbers
605 if self.pkg.changes.has_key("closes"):
606 for i in self.pkg.changes["closes"].keys():
607 if re_isanum.match (i) == None:
608 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
610 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
611 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
612 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
614 # Check the .changes is non-empty
615 if not self.pkg.files:
616 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
619 # Changes was syntactically valid even if we'll reject
622 ###########################################################################
624 def check_distributions(self):
625 "Check and map the Distribution field"
629 # Handle suite mappings
630 for m in Cnf.ValueList("SuiteMappings"):
633 if mtype == "map" or mtype == "silent-map":
634 (source, dest) = args[1:3]
635 if self.pkg.changes["distribution"].has_key(source):
636 del self.pkg.changes["distribution"][source]
637 self.pkg.changes["distribution"][dest] = 1
638 if mtype != "silent-map":
639 self.notes.append("Mapping %s to %s." % (source, dest))
640 if self.pkg.changes.has_key("distribution-version"):
641 if self.pkg.changes["distribution-version"].has_key(source):
642 self.pkg.changes["distribution-version"][source]=dest
643 elif mtype == "map-unreleased":
644 (source, dest) = args[1:3]
645 if self.pkg.changes["distribution"].has_key(source):
646 for arch in self.pkg.changes["architecture"].keys():
647 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
648 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
649 del self.pkg.changes["distribution"][source]
650 self.pkg.changes["distribution"][dest] = 1
652 elif mtype == "ignore":
654 if self.pkg.changes["distribution"].has_key(suite):
655 del self.pkg.changes["distribution"][suite]
656 self.warnings.append("Ignoring %s as a target suite." % (suite))
657 elif mtype == "reject":
659 if self.pkg.changes["distribution"].has_key(suite):
660 self.rejects.append("Uploads to %s are not accepted." % (suite))
661 elif mtype == "propup-version":
662 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
664 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
665 if self.pkg.changes["distribution"].has_key(args[1]):
666 self.pkg.changes.setdefault("distribution-version", {})
667 for suite in args[2:]:
668 self.pkg.changes["distribution-version"][suite] = suite
670 # Ensure there is (still) a target distribution
671 if len(self.pkg.changes["distribution"].keys()) < 1:
672 self.rejects.append("No valid distribution remaining.")
674 # Ensure target distributions exist
675 for suite in self.pkg.changes["distribution"].keys():
676 if not Cnf.has_key("Suite::%s" % (suite)):
677 self.rejects.append("Unknown distribution `%s'." % (suite))
679 ###########################################################################
681 def binary_file_checks(self, f, session):
683 entry = self.pkg.files[f]
685 # Extract package control information
686 deb_file = utils.open_file(f)
688 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
690 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
692 # Can't continue, none of the checks on control would work.
695 # Check for mandantory "Description:"
698 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
700 self.rejects.append("%s: Missing Description in binary package" % (f))
705 # Check for mandatory fields
706 for field in [ "Package", "Architecture", "Version" ]:
707 if control.Find(field) == None:
709 self.rejects.append("%s: No %s field in control." % (f, field))
712 # Ensure the package name matches the one give in the .changes
713 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
714 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
716 # Validate the package field
717 package = control.Find("Package")
718 if not re_valid_pkg_name.match(package):
719 self.rejects.append("%s: invalid package name '%s'." % (f, package))
721 # Validate the version field
722 version = control.Find("Version")
723 if not re_valid_version.match(version):
724 self.rejects.append("%s: invalid version number '%s'." % (f, version))
726 # Ensure the architecture of the .deb is one we know about.
727 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
728 architecture = control.Find("Architecture")
729 upload_suite = self.pkg.changes["distribution"].keys()[0]
731 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
732 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
733 self.rejects.append("Unknown architecture '%s'." % (architecture))
735 # Ensure the architecture of the .deb is one of the ones
736 # listed in the .changes.
737 if not self.pkg.changes["architecture"].has_key(architecture):
738 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
740 # Sanity-check the Depends field
741 depends = control.Find("Depends")
743 self.rejects.append("%s: Depends field is empty." % (f))
745 # Sanity-check the Provides field
746 provides = control.Find("Provides")
748 provide = re_spacestrip.sub('', provides)
750 self.rejects.append("%s: Provides field is empty." % (f))
751 prov_list = provide.split(",")
752 for prov in prov_list:
753 if not re_valid_pkg_name.match(prov):
754 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
756 # Check the section & priority match those given in the .changes (non-fatal)
757 if control.Find("Section") and entry["section"] != "" \
758 and entry["section"] != control.Find("Section"):
759 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
760 (f, control.Find("Section", ""), entry["section"]))
761 if control.Find("Priority") and entry["priority"] != "" \
762 and entry["priority"] != control.Find("Priority"):
763 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
764 (f, control.Find("Priority", ""), entry["priority"]))
766 entry["package"] = package
767 entry["architecture"] = architecture
768 entry["version"] = version
769 entry["maintainer"] = control.Find("Maintainer", "")
771 if f.endswith(".udeb"):
772 self.pkg.files[f]["dbtype"] = "udeb"
773 elif f.endswith(".deb"):
774 self.pkg.files[f]["dbtype"] = "deb"
776 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
778 entry["source"] = control.Find("Source", entry["package"])
780 # Get the source version
781 source = entry["source"]
784 if source.find("(") != -1:
785 m = re_extract_src_version.match(source)
787 source_version = m.group(2)
789 if not source_version:
790 source_version = self.pkg.files[f]["version"]
792 entry["source package"] = source
793 entry["source version"] = source_version
795 # Ensure the filename matches the contents of the .deb
796 m = re_isadeb.match(f)
799 file_package = m.group(1)
800 if entry["package"] != file_package:
801 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
802 (f, file_package, entry["dbtype"], entry["package"]))
803 epochless_version = re_no_epoch.sub('', control.Find("Version"))
806 file_version = m.group(2)
807 if epochless_version != file_version:
808 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
809 (f, file_version, entry["dbtype"], epochless_version))
812 file_architecture = m.group(3)
813 if entry["architecture"] != file_architecture:
814 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
815 (f, file_architecture, entry["dbtype"], entry["architecture"]))
817 # Check for existent source
818 source_version = entry["source version"]
819 source_package = entry["source package"]
820 if self.pkg.changes["architecture"].has_key("source"):
821 if source_version != self.pkg.changes["version"]:
822 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
823 (source_version, f, self.pkg.changes["version"]))
825 # Check in the SQL database
826 if not source_exists(source_package, source_version, suites = \
827 self.pkg.changes["distribution"].keys(), session = session):
828 # Check in one of the other directories
829 source_epochless_version = re_no_epoch.sub('', source_version)
830 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
831 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
833 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
836 dsc_file_exists = False
837 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
838 if cnf.has_key("Dir::Queue::%s" % (myq)):
839 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
840 dsc_file_exists = True
843 if not dsc_file_exists:
844 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
846 # Check the version and for file overwrites
847 self.check_binary_against_db(f, session)
849 # Temporarily disable contents generation until we change the table storage layout
852 #if len(b.rejects) > 0:
853 # for j in b.rejects:
854 # self.rejects.append(j)
856 def source_file_checks(self, f, session):
857 entry = self.pkg.files[f]
859 m = re_issource.match(f)
863 entry["package"] = m.group(1)
864 entry["version"] = m.group(2)
865 entry["type"] = m.group(3)
867 # Ensure the source package name matches the Source filed in the .changes
868 if self.pkg.changes["source"] != entry["package"]:
869 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
871 # Ensure the source version matches the version in the .changes file
872 if re_is_orig_source.match(f):
873 changes_version = self.pkg.changes["chopversion2"]
875 changes_version = self.pkg.changes["chopversion"]
877 if changes_version != entry["version"]:
878 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
880 # Ensure the .changes lists source in the Architecture field
881 if not self.pkg.changes["architecture"].has_key("source"):
882 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
884 # Check the signature of a .dsc file
885 if entry["type"] == "dsc":
886 # check_signature returns either:
887 # (None, [list, of, rejects]) or (signature, [])
888 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
890 self.rejects.append(j)
892 entry["architecture"] = "source"
894 def per_suite_file_checks(self, f, suite, session):
896 entry = self.pkg.files[f]
899 if entry.has_key("byhand"):
902 # Check we have fields we need to do these checks
904 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
905 if not entry.has_key(m):
906 self.rejects.append("file '%s' does not have field %s set" % (f, m))
912 # Handle component mappings
913 for m in cnf.ValueList("ComponentMappings"):
914 (source, dest) = m.split()
915 if entry["component"] == source:
916 entry["original component"] = source
917 entry["component"] = dest
919 # Ensure the component is valid for the target suite
920 if cnf.has_key("Suite:%s::Components" % (suite)) and \
921 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
922 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
925 # Validate the component
926 if not get_component(entry["component"], session):
927 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
930 # See if the package is NEW
931 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
934 # Validate the priority
935 if entry["priority"].find('/') != -1:
936 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
938 # Determine the location
939 location = cnf["Dir::Pool"]
940 l = get_location(location, entry["component"], session=session)
942 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
943 entry["location id"] = -1
945 entry["location id"] = l.location_id
947 # Check the md5sum & size against existing files (if any)
948 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
950 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
951 entry["size"], entry["md5sum"], entry["location id"])
954 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
955 elif found is False and poolfile is not None:
956 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
959 entry["files id"] = None
961 entry["files id"] = poolfile.file_id
963 # Check for packages that have moved from one component to another
964 entry['suite'] = suite
965 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
967 entry["othercomponents"] = res.fetchone()[0]
969 def check_files(self, action=True):
970 file_keys = self.pkg.files.keys()
976 os.chdir(self.pkg.directory)
978 ret = holding.copy_to_holding(f)
980 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
984 # check we already know the changes file
985 # [NB: this check must be done post-suite mapping]
986 base_filename = os.path.basename(self.pkg.changes_file)
988 session = DBConn().session()
991 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
992 # if in the pool or in a queue other than unchecked, reject
993 if (dbc.in_queue is None) \
994 or (dbc.in_queue is not None
995 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
996 self.rejects.append("%s file already known to dak" % base_filename)
997 except NoResultFound, e:
1001 has_binaries = False
1004 for f, entry in self.pkg.files.items():
1005 # Ensure the file does not already exist in one of the accepted directories
1006 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1007 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1008 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1009 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1011 if not re_taint_free.match(f):
1012 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1014 # Check the file is readable
1015 if os.access(f, os.R_OK) == 0:
1016 # When running in -n, copy_to_holding() won't have
1017 # generated the reject_message, so we need to.
1019 if os.path.exists(f):
1020 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1022 # Don't directly reject, mark to check later to deal with orig's
1023 # we can find in the pool
1024 self.later_check_files.append(f)
1025 entry["type"] = "unreadable"
1028 # If it's byhand skip remaining checks
1029 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1031 entry["type"] = "byhand"
1033 # Checks for a binary package...
1034 elif re_isadeb.match(f):
1036 entry["type"] = "deb"
1038 # This routine appends to self.rejects/warnings as appropriate
1039 self.binary_file_checks(f, session)
1041 # Checks for a source package...
1042 elif re_issource.match(f):
1045 # This routine appends to self.rejects/warnings as appropriate
1046 self.source_file_checks(f, session)
1048 # Not a binary or source package? Assume byhand...
1051 entry["type"] = "byhand"
1053 # Per-suite file checks
1054 entry["oldfiles"] = {}
1055 for suite in self.pkg.changes["distribution"].keys():
1056 self.per_suite_file_checks(f, suite, session)
1060 # If the .changes file says it has source, it must have source.
1061 if self.pkg.changes["architecture"].has_key("source"):
1063 self.rejects.append("no source found and Architecture line in changes mention source.")
1065 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1066 self.rejects.append("source only uploads are not supported.")
1068 ###########################################################################
1069 def check_dsc(self, action=True, session=None):
1070 """Returns bool indicating whether or not the source changes are valid"""
1071 # Ensure there is source to check
1072 if not self.pkg.changes["architecture"].has_key("source"):
1077 for f, entry in self.pkg.files.items():
1078 if entry["type"] == "dsc":
1080 self.rejects.append("can not process a .changes file with multiple .dsc's.")
1085 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1086 if not dsc_filename:
1087 self.rejects.append("source uploads must contain a dsc file")
1090 # Parse the .dsc file
1092 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1093 except CantOpenError:
1094 # if not -n copy_to_holding() will have done this for us...
1096 self.rejects.append("%s: can't read file." % (dsc_filename))
1097 except ParseChangesError, line:
1098 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1099 except InvalidDscError, line:
1100 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1101 except ChangesUnicodeError:
1102 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1104 # Build up the file list of files mentioned by the .dsc
1106 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1107 except NoFilesFieldError:
1108 self.rejects.append("%s: no Files: field." % (dsc_filename))
1110 except UnknownFormatError, format:
1111 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1113 except ParseChangesError, line:
1114 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1117 # Enforce mandatory fields
1118 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1119 if not self.pkg.dsc.has_key(i):
1120 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1123 # Validate the source and version fields
1124 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1125 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1126 if not re_valid_version.match(self.pkg.dsc["version"]):
1127 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1129 # Only a limited list of source formats are allowed in each suite
1130 for dist in self.pkg.changes["distribution"].keys():
1131 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1132 if self.pkg.dsc["format"] not in allowed:
1133 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1135 # Validate the Maintainer field
1137 # We ignore the return value
1138 fix_maintainer(self.pkg.dsc["maintainer"])
1139 except ParseMaintError, msg:
1140 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1141 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1143 # Validate the build-depends field(s)
1144 for field_name in [ "build-depends", "build-depends-indep" ]:
1145 field = self.pkg.dsc.get(field_name)
1147 # Have apt try to parse them...
1149 apt_pkg.ParseSrcDepends(field)
1151 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1153 # Ensure the version number in the .dsc matches the version number in the .changes
1154 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1155 changes_version = self.pkg.files[dsc_filename]["version"]
1157 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1158 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1160 # Ensure the Files field contain only what's expected
1161 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1163 # Ensure source is newer than existing source in target suites
1164 session = DBConn().session()
1165 self.check_source_against_db(dsc_filename, session)
1166 self.check_dsc_against_db(dsc_filename, session)
1168 dbchg = get_dbchange(self.pkg.changes_file, session)
1170 # Finally, check if we're missing any files
1171 for f in self.later_check_files:
1173 # Check if we've already processed this file if we have a dbchg object
1176 for pf in dbchg.files:
1177 if pf.filename == f and pf.processed:
1178 self.notes.append('%s was already processed so we can go ahead' % f)
1180 del self.pkg.files[f]
1182 self.rejects.append("Could not find file %s references in changes" % f)
1188 ###########################################################################
1190 def get_changelog_versions(self, source_dir):
1191 """Extracts a the source package and (optionally) grabs the
1192 version history out of debian/changelog for the BTS."""
1196 # Find the .dsc (again)
1198 for f in self.pkg.files.keys():
1199 if self.pkg.files[f]["type"] == "dsc":
1202 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1203 if not dsc_filename:
1206 # Create a symlink mirror of the source files in our temporary directory
1207 for f in self.pkg.files.keys():
1208 m = re_issource.match(f)
1210 src = os.path.join(source_dir, f)
1211 # If a file is missing for whatever reason, give up.
1212 if not os.path.exists(src):
1215 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1216 self.pkg.orig_files[f].has_key("path"):
1218 dest = os.path.join(os.getcwd(), f)
1219 os.symlink(src, dest)
1221 # If the orig files are not a part of the upload, create symlinks to the
1223 for orig_file in self.pkg.orig_files.keys():
1224 if not self.pkg.orig_files[orig_file].has_key("path"):
1226 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1227 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1229 # Extract the source
1230 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1231 (result, output) = commands.getstatusoutput(cmd)
1233 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1234 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1237 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1240 # Get the upstream version
1241 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1242 if re_strip_revision.search(upstr_version):
1243 upstr_version = re_strip_revision.sub('', upstr_version)
1245 # Ensure the changelog file exists
1246 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1247 if not os.path.exists(changelog_filename):
1248 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1251 # Parse the changelog
1252 self.pkg.dsc["bts changelog"] = ""
1253 changelog_file = utils.open_file(changelog_filename)
1254 for line in changelog_file.readlines():
1255 m = re_changelog_versions.match(line)
1257 self.pkg.dsc["bts changelog"] += line
1258 changelog_file.close()
1260 # Check we found at least one revision in the changelog
1261 if not self.pkg.dsc["bts changelog"]:
1262 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1264 def check_source(self):
1266 # a) there's no source
1267 if not self.pkg.changes["architecture"].has_key("source"):
1270 tmpdir = utils.temp_dirname()
1272 # Move into the temporary directory
1276 # Get the changelog version history
1277 self.get_changelog_versions(cwd)
1279 # Move back and cleanup the temporary tree
1283 shutil.rmtree(tmpdir)
1285 if e.errno != errno.EACCES:
1287 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1289 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1290 # We probably have u-r or u-w directories so chmod everything
1292 cmd = "chmod -R u+rwx %s" % (tmpdir)
1293 result = os.system(cmd)
1295 utils.fubar("'%s' failed with result %s." % (cmd, result))
1296 shutil.rmtree(tmpdir)
1297 except Exception, e:
1298 print "foobar2 (%s)" % e
1299 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1301 ###########################################################################
1302 def ensure_hashes(self):
1303 # Make sure we recognise the format of the Files: field in the .changes
1304 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1305 if len(format) == 2:
1306 format = int(format[0]), int(format[1])
1308 format = int(float(format[0])), 0
1310 # We need to deal with the original changes blob, as the fields we need
1311 # might not be in the changes dict serialised into the .dak anymore.
1312 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1314 # Copy the checksums over to the current changes dict. This will keep
1315 # the existing modifications to it intact.
1316 for field in orig_changes:
1317 if field.startswith('checksums-'):
1318 self.pkg.changes[field] = orig_changes[field]
1320 # Check for unsupported hashes
1321 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1322 self.rejects.append(j)
1324 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1325 self.rejects.append(j)
1327 # We have to calculate the hash if we have an earlier changes version than
1328 # the hash appears in rather than require it exist in the changes file
1329 for hashname, hashfunc, version in utils.known_hashes:
1330 # TODO: Move _ensure_changes_hash into this class
1331 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1332 self.rejects.append(j)
1333 if "source" in self.pkg.changes["architecture"]:
1334 # TODO: Move _ensure_dsc_hash into this class
1335 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1336 self.rejects.append(j)
1338 def check_hashes(self):
1339 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1340 self.rejects.append(m)
1342 for m in utils.check_size(".changes", self.pkg.files):
1343 self.rejects.append(m)
1345 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1346 self.rejects.append(m)
1348 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1349 self.rejects.append(m)
1351 self.ensure_hashes()
1353 ###########################################################################
1355 def ensure_orig(self, target_dir='.', session=None):
1357 Ensures that all orig files mentioned in the changes file are present
1358 in target_dir. If they do not exist, they are symlinked into place.
1360 An list containing the symlinks that were created are returned (so they
1367 for filename, entry in self.pkg.dsc_files.iteritems():
1368 if not re_is_orig_source.match(filename):
1369 # File is not an orig; ignore
1372 if os.path.exists(filename):
1373 # File exists, no need to continue
1376 def symlink_if_valid(path):
1377 f = utils.open_file(path)
1378 md5sum = apt_pkg.md5sum(f)
1381 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1382 expected = (int(entry['size']), entry['md5sum'])
1384 if fingerprint != expected:
1387 dest = os.path.join(target_dir, filename)
1389 os.symlink(path, dest)
1390 symlinked.append(dest)
1396 session_ = DBConn().session()
1401 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1402 poolfile_path = os.path.join(
1403 poolfile.location.path, poolfile.filename
1406 if symlink_if_valid(poolfile_path):
1416 # Look in some other queues for the file
1417 queues = ('New', 'Byhand', 'ProposedUpdates',
1418 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1420 for queue in queues:
1421 if not cnf.get('Dir::Queue::%s' % queue):
1424 queuefile_path = os.path.join(
1425 cnf['Dir::Queue::%s' % queue], filename
1428 if not os.path.exists(queuefile_path):
1429 # Does not exist in this queue
1432 if symlink_if_valid(queuefile_path):
1437 ###########################################################################
1439 def check_lintian(self):
1441 Extends self.rejects by checking the output of lintian against tags
1442 specified in Dinstall::LintianTags.
1447 # Don't reject binary uploads
1448 if not self.pkg.changes['architecture'].has_key('source'):
1451 # Only check some distributions
1452 for dist in ('unstable', 'experimental'):
1453 if dist in self.pkg.changes['distribution']:
1458 # If we do not have a tagfile, don't do anything
1459 tagfile = cnf.get("Dinstall::LintianTags")
1463 # Parse the yaml file
1464 sourcefile = file(tagfile, 'r')
1465 sourcecontent = sourcefile.read()
1469 lintiantags = yaml.load(sourcecontent)['lintian']
1470 except yaml.YAMLError, msg:
1471 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1474 # Try and find all orig mentioned in the .dsc
1475 symlinked = self.ensure_orig()
1477 # Setup the input file for lintian
1478 fd, temp_filename = utils.temp_filename()
1479 temptagfile = os.fdopen(fd, 'w')
1480 for tags in lintiantags.values():
1481 temptagfile.writelines(['%s\n' % x for x in tags])
1485 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1486 (temp_filename, self.pkg.changes_file)
1488 result, output = commands.getstatusoutput(cmd)
1490 # Remove our tempfile and any symlinks we created
1491 os.unlink(temp_filename)
1493 for symlink in symlinked:
1497 utils.warn("lintian failed for %s [return code: %s]." % \
1498 (self.pkg.changes_file, result))
1499 utils.warn(utils.prefix_multi_line_string(output, \
1500 " [possible output:] "))
1505 [self.pkg.changes_file, "check_lintian"] + list(txt)
1509 parsed_tags = parse_lintian_output(output)
1510 self.rejects.extend(
1511 generate_reject_messages(parsed_tags, lintiantags, log=log)
1514 ###########################################################################
1515 def check_urgency(self):
1517 if self.pkg.changes["architecture"].has_key("source"):
1518 if not self.pkg.changes.has_key("urgency"):
1519 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1520 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1521 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1522 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1523 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1524 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1526 ###########################################################################
1528 # Sanity check the time stamps of files inside debs.
1529 # [Files in the near future cause ugly warnings and extreme time
1530 # travel can cause errors on extraction]
1532 def check_timestamps(self):
1535 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1536 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1537 tar = TarTime(future_cutoff, past_cutoff)
1539 for filename, entry in self.pkg.files.items():
1540 if entry["type"] == "deb":
1543 deb_file = utils.open_file(filename)
1544 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1547 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1548 except SystemError, e:
1549 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1550 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1553 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1557 future_files = tar.future_files.keys()
1559 num_future_files = len(future_files)
1560 future_file = future_files[0]
1561 future_date = tar.future_files[future_file]
1562 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1563 % (filename, num_future_files, future_file, time.ctime(future_date)))
1565 ancient_files = tar.ancient_files.keys()
1567 num_ancient_files = len(ancient_files)
1568 ancient_file = ancient_files[0]
1569 ancient_date = tar.ancient_files[ancient_file]
1570 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1571 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1573 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1575 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1576 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1578 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1584 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1585 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1586 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1587 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1588 self.pkg.changes["sponsoremail"] = uid_email
1593 ###########################################################################
1594 # check_signed_by_key checks
1595 ###########################################################################
1597 def check_signed_by_key(self):
1598 """Ensure the .changes is signed by an authorized uploader."""
1599 session = DBConn().session()
1601 # First of all we check that the person has proper upload permissions
1602 # and that this upload isn't blocked
1603 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1606 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1609 # TODO: Check that import-keyring adds UIDs properly
1611 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1614 # Check that the fingerprint which uploaded has permission to do so
1615 self.check_upload_permissions(fpr, session)
1617 # Check that this package is not in a transition
1618 self.check_transition(session)
1623 def check_upload_permissions(self, fpr, session):
1624 # Check any one-off upload blocks
1625 self.check_upload_blocks(fpr, session)
1627 # Start with DM as a special case
1628 # DM is a special case unfortunately, so we check it first
1629 # (keys with no source access get more access than DMs in one
1630 # way; DMs can only upload for their packages whether source
1631 # or binary, whereas keys with no access might be able to
1632 # upload some binaries)
1633 if fpr.source_acl.access_level == 'dm':
1634 self.check_dm_upload(fpr, session)
1636 # Check source-based permissions for other types
1637 if self.pkg.changes["architecture"].has_key("source") and \
1638 fpr.source_acl.access_level is None:
1639 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1640 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1641 self.rejects.append(rej)
1643 # If not a DM, we allow full upload rights
1644 uid_email = "%s@debian.org" % (fpr.uid.uid)
1645 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1648 # Check binary upload permissions
1649 # By this point we know that DMs can't have got here unless they
1650 # are allowed to deal with the package concerned so just apply
1652 if fpr.binary_acl.access_level == 'full':
1655 # Otherwise we're in the map case
1656 tmparches = self.pkg.changes["architecture"].copy()
1657 tmparches.pop('source', None)
1659 for bam in fpr.binary_acl_map:
1660 tmparches.pop(bam.architecture.arch_string, None)
1662 if len(tmparches.keys()) > 0:
1663 if fpr.binary_reject:
1664 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1665 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1666 self.rejects.append(rej)
1668 # TODO: This is where we'll implement reject vs throw away binaries later
1669 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1670 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1671 rej += "\nFingerprint: %s", (fpr.fingerprint)
1672 self.rejects.append(rej)
1675 def check_upload_blocks(self, fpr, session):
1676 """Check whether any upload blocks apply to this source, source
1677 version, uid / fpr combination"""
1679 def block_rej_template(fb):
1680 rej = 'Manual upload block in place for package %s' % fb.source
1681 if fb.version is not None:
1682 rej += ', version %s' % fb.version
1685 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1686 # version is None if the block applies to all versions
1687 if fb.version is None or fb.version == self.pkg.changes['version']:
1688 # Check both fpr and uid - either is enough to cause a reject
1689 if fb.fpr is not None:
1690 if fb.fpr.fingerprint == fpr.fingerprint:
1691 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1692 if fb.uid is not None:
1693 if fb.uid == fpr.uid:
1694 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1697 def check_dm_upload(self, fpr, session):
1698 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1699 ## none of the uploaded packages are NEW
1701 for f in self.pkg.files.keys():
1702 if self.pkg.files[f].has_key("byhand"):
1703 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1705 if self.pkg.files[f].has_key("new"):
1706 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1712 r = get_newest_source(self.pkg.changes["source"], session)
1715 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1716 self.rejects.append(rej)
1719 if not r.dm_upload_allowed:
1720 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1721 self.rejects.append(rej)
1724 ## the Maintainer: field of the uploaded .changes file corresponds with
1725 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1727 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1728 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1730 ## the most recent version of the package uploaded to unstable or
1731 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1732 ## non-developer maintainers cannot NMU or hijack packages)
1734 # srcuploaders includes the maintainer
1736 for sup in r.srcuploaders:
1737 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1738 # Eww - I hope we never have two people with the same name in Debian
1739 if email == fpr.uid.uid or name == fpr.uid.name:
1744 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1747 ## none of the packages are being taken over from other source packages
1748 for b in self.pkg.changes["binary"].keys():
1749 for suite in self.pkg.changes["distribution"].keys():
1750 for s in get_source_by_package_and_suite(b, suite, session):
1751 if s.source != self.pkg.changes["source"]:
1752 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1756 def check_transition(self, session):
1759 sourcepkg = self.pkg.changes["source"]
1761 # No sourceful upload -> no need to do anything else, direct return
1762 # We also work with unstable uploads, not experimental or those going to some
1763 # proposed-updates queue
1764 if "source" not in self.pkg.changes["architecture"] or \
1765 "unstable" not in self.pkg.changes["distribution"]:
1768 # Also only check if there is a file defined (and existant) with
1770 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1771 if transpath == "" or not os.path.exists(transpath):
1774 # Parse the yaml file
1775 sourcefile = file(transpath, 'r')
1776 sourcecontent = sourcefile.read()
1778 transitions = yaml.load(sourcecontent)
1779 except yaml.YAMLError, msg:
1780 # This shouldn't happen, there is a wrapper to edit the file which
1781 # checks it, but we prefer to be safe than ending up rejecting
1783 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1786 # Now look through all defined transitions
1787 for trans in transitions:
1788 t = transitions[trans]
1789 source = t["source"]
1792 # Will be None if nothing is in testing.
1793 current = get_source_in_suite(source, "testing", session)
1794 if current is not None:
1795 compare = apt_pkg.VersionCompare(current.version, expected)
1797 if current is None or compare < 0:
1798 # This is still valid, the current version in testing is older than
1799 # the new version we wait for, or there is none in testing yet
1801 # Check if the source we look at is affected by this.
1802 if sourcepkg in t['packages']:
1803 # The source is affected, lets reject it.
1805 rejectmsg = "%s: part of the %s transition.\n\n" % (
1808 if current is not None:
1809 currentlymsg = "at version %s" % (current.version)
1811 currentlymsg = "not present in testing"
1813 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1815 rejectmsg += "\n".join(textwrap.wrap("""Your package
1816 is part of a testing transition designed to get %s migrated (it is
1817 currently %s, we need version %s). This transition is managed by the
1818 Release Team, and %s is the Release-Team member responsible for it.
1819 Please mail debian-release@lists.debian.org or contact %s directly if you
1820 need further assistance. You might want to upload to experimental until this
1821 transition is done."""
1822 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1824 self.rejects.append(rejectmsg)
1827 ###########################################################################
1828 # End check_signed_by_key checks
1829 ###########################################################################
1831 def build_summaries(self):
1832 """ Build a summary of changes the upload introduces. """
1834 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1836 short_summary = summary
1838 # This is for direport's benefit...
1839 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1842 summary += "Changes: " + f
1844 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1846 summary += self.announce(short_summary, 0)
1848 return (summary, short_summary)
1850 ###########################################################################
1852 def close_bugs(self, summary, action):
1854 Send mail to close bugs as instructed by the closes field in the changes file.
1855 Also add a line to summary if any work was done.
1857 @type summary: string
1858 @param summary: summary text, as given by L{build_summaries}
1861 @param action: Set to false no real action will be done.
1864 @return: summary. If action was taken, extended by the list of closed bugs.
1868 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1870 bugs = self.pkg.changes["closes"].keys()
1876 summary += "Closing bugs: "
1878 summary += "%s " % (bug)
1881 self.Subst["__BUG_NUMBER__"] = bug
1882 if self.pkg.changes["distribution"].has_key("stable"):
1883 self.Subst["__STABLE_WARNING__"] = """
1884 Note that this package is not part of the released stable Debian
1885 distribution. It may have dependencies on other unreleased software,
1886 or other instabilities. Please take care if you wish to install it.
1887 The update will eventually make its way into the next released Debian
1890 self.Subst["__STABLE_WARNING__"] = ""
1891 mail_message = utils.TemplateSubst(self.Subst, template)
1892 utils.send_mail(mail_message)
1894 # Clear up after ourselves
1895 del self.Subst["__BUG_NUMBER__"]
1896 del self.Subst["__STABLE_WARNING__"]
1898 if action and self.logger:
1899 self.logger.log(["closing bugs"] + bugs)
1905 ###########################################################################
1907 def announce(self, short_summary, action):
1909 Send an announce mail about a new upload.
1911 @type short_summary: string
1912 @param short_summary: Short summary text to include in the mail
1915 @param action: Set to false no real action will be done.
1918 @return: Textstring about action taken.
1923 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1925 # Only do announcements for source uploads with a recent dpkg-dev installed
1926 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1927 self.pkg.changes["architecture"].has_key("source"):
1933 self.Subst["__SHORT_SUMMARY__"] = short_summary
1935 for dist in self.pkg.changes["distribution"].keys():
1936 suite = get_suite(dist)
1937 if suite is None: continue
1938 announce_list = suite.announce
1939 if announce_list == "" or lists_done.has_key(announce_list):
1942 lists_done[announce_list] = 1
1943 summary += "Announcing to %s\n" % (announce_list)
1947 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1948 if cnf.get("Dinstall::TrackingServer") and \
1949 self.pkg.changes["architecture"].has_key("source"):
1950 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1951 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1953 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1954 utils.send_mail(mail_message)
1956 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1958 if cnf.FindB("Dinstall::CloseBugs"):
1959 summary = self.close_bugs(summary, action)
1961 del self.Subst["__SHORT_SUMMARY__"]
1965 ###########################################################################
1967 def accept (self, summary, short_summary, session=None):
1971 This moves all files referenced from the .changes into the pool,
1972 sends the accepted mail, announces to lists, closes bugs and
1973 also checks for override disparities. If enabled it will write out
1974 the version history for the BTS Version Tracking and will finally call
1977 @type summary: string
1978 @param summary: Summary text
1980 @type short_summary: string
1981 @param short_summary: Short summary
1985 stats = SummaryStats()
1988 self.logger.log(["installing changes", self.pkg.changes_file])
1992 # Add the .dsc file to the DB first
1993 for newfile, entry in self.pkg.files.items():
1994 if entry["type"] == "dsc":
1995 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1999 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2000 for newfile, entry in self.pkg.files.items():
2001 if entry["type"] == "deb":
2002 poolfiles.append(add_deb_to_db(self, newfile, session))
2004 # If this is a sourceful diff only upload that is moving
2005 # cross-component we need to copy the .orig files into the new
2006 # component too for the same reasons as above.
2007 # XXX: mhy: I think this should be in add_dsc_to_db
2008 if self.pkg.changes["architecture"].has_key("source"):
2009 for orig_file in self.pkg.orig_files.keys():
2010 if not self.pkg.orig_files[orig_file].has_key("id"):
2011 continue # Skip if it's not in the pool
2012 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2013 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2014 continue # Skip if the location didn't change
2017 oldf = get_poolfile_by_id(orig_file_id, session)
2018 old_filename = os.path.join(oldf.location.path, oldf.filename)
2019 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2020 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2022 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2024 # TODO: Care about size/md5sum collisions etc
2025 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2027 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2029 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2030 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2034 # Don't reference the old file from this changes
2036 if p.file_id == oldf.file_id:
2039 poolfiles.append(newf)
2041 # Fix up the DSC references
2044 for df in source.srcfiles:
2045 if df.poolfile.file_id == oldf.file_id:
2046 # Add a new DSC entry and mark the old one for deletion
2047 # Don't do it in the loop so we don't change the thing we're iterating over
2049 newdscf.source_id = source.source_id
2050 newdscf.poolfile_id = newf.file_id
2051 session.add(newdscf)
2061 # Make sure that our source object is up-to-date
2062 session.expire(source)
2064 # Add changelog information to the database
2065 self.store_changelog()
2067 # Install the files into the pool
2068 for newfile, entry in self.pkg.files.items():
2069 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2070 utils.move(newfile, destination)
2071 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2072 stats.accept_bytes += float(entry["size"])
2074 # Copy the .changes file across for suite which need it.
2075 copy_changes = dict([(x.copychanges, '')
2076 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2077 if x.copychanges is not None])
2079 for dest in copy_changes.keys():
2080 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2082 # We're done - commit the database changes
2084 # Our SQL session will automatically start a new transaction after
2087 # Move the .changes into the 'done' directory
2088 utils.move(self.pkg.changes_file,
2089 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2091 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2092 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2095 self.Subst["__SUMMARY__"] = summary
2096 mail_message = utils.TemplateSubst(self.Subst,
2097 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2098 utils.send_mail(mail_message)
2099 self.announce(short_summary, 1)
2101 ## Helper stuff for DebBugs Version Tracking
2102 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2103 if self.pkg.changes["architecture"].has_key("source"):
2104 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2105 version_history = os.fdopen(fd, 'w')
2106 version_history.write(self.pkg.dsc["bts changelog"])
2107 version_history.close()
2108 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2109 self.pkg.changes_file[:-8]+".versions")
2110 os.rename(temp_filename, filename)
2111 os.chmod(filename, 0644)
2113 # Write out the binary -> source mapping.
2114 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2115 debinfo = os.fdopen(fd, 'w')
2116 for name, entry in sorted(self.pkg.files.items()):
2117 if entry["type"] == "deb":
2118 line = " ".join([entry["package"], entry["version"],
2119 entry["architecture"], entry["source package"],
2120 entry["source version"]])
2121 debinfo.write(line+"\n")
2123 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2124 self.pkg.changes_file[:-8]+".debinfo")
2125 os.rename(temp_filename, filename)
2126 os.chmod(filename, 0644)
2130 # Set up our copy queues (e.g. buildd queues)
2131 for suite_name in self.pkg.changes["distribution"].keys():
2132 suite = get_suite(suite_name, session)
2133 for q in suite.copy_queues:
2135 q.add_file_from_pool(f)
2140 stats.accept_count += 1
2142 def check_override(self):
2144 Checks override entries for validity. Mails "Override disparity" warnings,
2145 if that feature is enabled.
2147 Abandons the check if
2148 - override disparity checks are disabled
2149 - mail sending is disabled
2154 # Abandon the check if override disparity checks have been disabled
2155 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2158 summary = self.pkg.check_override()
2163 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2166 self.Subst["__SUMMARY__"] = summary
2167 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2168 utils.send_mail(mail_message)
2169 del self.Subst["__SUMMARY__"]
2171 ###########################################################################
2173 def remove(self, from_dir=None):
2175 Used (for instance) in p-u to remove the package from unchecked
2177 Also removes the package from holding area.
2179 if from_dir is None:
2180 from_dir = self.pkg.directory
2183 for f in self.pkg.files.keys():
2184 os.unlink(os.path.join(from_dir, f))
2185 if os.path.exists(os.path.join(h.holding_dir, f)):
2186 os.unlink(os.path.join(h.holding_dir, f))
2188 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2189 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2190 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2192 ###########################################################################
2194 def move_to_queue (self, queue):
2196 Move files to a destination queue using the permissions in the table
2199 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2200 queue.path, perms=int(queue.change_perms, 8))
2201 for f in self.pkg.files.keys():
2202 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2204 ###########################################################################
2206 def force_reject(self, reject_files):
2208 Forcefully move files from the current directory to the
2209 reject directory. If any file already exists in the reject
2210 directory it will be moved to the morgue to make way for
2213 @type reject_files: dict
2214 @param reject_files: file dictionary
2220 for file_entry in reject_files:
2221 # Skip any files which don't exist or which we don't have permission to copy.
2222 if os.access(file_entry, os.R_OK) == 0:
2225 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2228 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2230 # File exists? Let's find a new name by adding a number
2231 if e.errno == errno.EEXIST:
2233 dest_file = utils.find_next_free(dest_file, 255)
2234 except NoFreeFilenameError:
2235 # Something's either gone badly Pete Tong, or
2236 # someone is trying to exploit us.
2237 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2240 # Make sure we really got it
2242 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2245 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2249 # If we got here, we own the destination file, so we can
2250 # safely overwrite it.
2251 utils.move(file_entry, dest_file, 1, perms=0660)
2254 ###########################################################################
2255 def do_reject (self, manual=0, reject_message="", notes=""):
2257 Reject an upload. If called without a reject message or C{manual} is
2258 true, spawn an editor so the user can write one.
2261 @param manual: manual or automated rejection
2263 @type reject_message: string
2264 @param reject_message: A reject message
2269 # If we weren't given a manual rejection message, spawn an
2270 # editor so the user can add one in...
2271 if manual and not reject_message:
2272 (fd, temp_filename) = utils.temp_filename()
2273 temp_file = os.fdopen(fd, 'w')
2276 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2277 % (note.author, note.version, note.notedate, note.comment))
2279 editor = os.environ.get("EDITOR","vi")
2281 while answer == 'E':
2282 os.system("%s %s" % (editor, temp_filename))
2283 temp_fh = utils.open_file(temp_filename)
2284 reject_message = "".join(temp_fh.readlines())
2286 print "Reject message:"
2287 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2288 prompt = "[R]eject, Edit, Abandon, Quit ?"
2290 while prompt.find(answer) == -1:
2291 answer = utils.our_raw_input(prompt)
2292 m = re_default_answer.search(prompt)
2295 answer = answer[:1].upper()
2296 os.unlink(temp_filename)
2302 print "Rejecting.\n"
2306 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2307 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2309 # Move all the files into the reject directory
2310 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2311 self.force_reject(reject_files)
2313 # If we fail here someone is probably trying to exploit the race
2314 # so let's just raise an exception ...
2315 if os.path.exists(reason_filename):
2316 os.unlink(reason_filename)
2317 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2319 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2323 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2324 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2325 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2326 os.write(reason_fd, reject_message)
2327 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2329 # Build up the rejection email
2330 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2331 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2332 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2333 self.Subst["__REJECT_MESSAGE__"] = ""
2334 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2335 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2336 # Write the rejection email out as the <foo>.reason file
2337 os.write(reason_fd, reject_mail_message)
2339 del self.Subst["__REJECTOR_ADDRESS__"]
2340 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2341 del self.Subst["__CC__"]
2345 # Send the rejection mail
2346 utils.send_mail(reject_mail_message)
2349 self.logger.log(["rejected", self.pkg.changes_file])
2353 ################################################################################
2354 def in_override_p(self, package, component, suite, binary_type, filename, session):
2356 Check if a package already has override entries in the DB
2358 @type package: string
2359 @param package: package name
2361 @type component: string
2362 @param component: database id of the component
2365 @param suite: database id of the suite
2367 @type binary_type: string
2368 @param binary_type: type of the package
2370 @type filename: string
2371 @param filename: filename we check
2373 @return: the database result. But noone cares anyway.
2379 if binary_type == "": # must be source
2382 file_type = binary_type
2384 # Override suite name; used for example with proposed-updates
2385 oldsuite = get_suite(suite, session)
2386 if (not oldsuite is None) and oldsuite.overridesuite:
2387 suite = oldsuite.overridesuite
2389 result = get_override(package, suite, component, file_type, session)
2391 # If checking for a source package fall back on the binary override type
2392 if file_type == "dsc" and len(result) < 1:
2393 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2395 # Remember the section and priority so we can check them later if appropriate
2398 self.pkg.files[filename]["override section"] = result.section.section
2399 self.pkg.files[filename]["override priority"] = result.priority.priority
2404 ################################################################################
2405 def get_anyversion(self, sv_list, suite):
2408 @param sv_list: list of (suite, version) tuples to check
2411 @param suite: suite name
2417 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2418 for (s, v) in sv_list:
2419 if s in [ x.lower() for x in anysuite ]:
2420 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2425 ################################################################################
2427 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2430 @param sv_list: list of (suite, version) tuples to check
2432 @type filename: string
2433 @param filename: XXX
2435 @type new_version: string
2436 @param new_version: XXX
2438 Ensure versions are newer than existing packages in target
2439 suites and that cross-suite version checking rules as
2440 set out in the conf file are satisfied.
2445 # Check versions for each target suite
2446 for target_suite in self.pkg.changes["distribution"].keys():
2447 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2448 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2450 # Enforce "must be newer than target suite" even if conffile omits it
2451 if target_suite not in must_be_newer_than:
2452 must_be_newer_than.append(target_suite)
2454 for (suite, existent_version) in sv_list:
2455 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2457 if suite in must_be_newer_than and sourceful and vercmp < 1:
2458 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2460 if suite in must_be_older_than and vercmp > -1:
2463 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2464 # we really use the other suite, ignoring the conflicting one ...
2465 addsuite = self.pkg.changes["distribution-version"][suite]
2467 add_version = self.get_anyversion(sv_list, addsuite)
2468 target_version = self.get_anyversion(sv_list, target_suite)
2471 # not add_version can only happen if we map to a suite
2472 # that doesn't enhance the suite we're propup'ing from.
2473 # so "propup-ver x a b c; map a d" is a problem only if
2474 # d doesn't enhance a.
2476 # i think we could always propagate in this case, rather
2477 # than complaining. either way, this isn't a REJECT issue
2479 # And - we really should complain to the dorks who configured dak
2480 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2481 self.pkg.changes.setdefault("propdistribution", {})
2482 self.pkg.changes["propdistribution"][addsuite] = 1
2484 elif not target_version:
2485 # not targets_version is true when the package is NEW
2486 # we could just stick with the "...old version..." REJECT
2487 # for this, I think.
2488 self.rejects.append("Won't propogate NEW packages.")
2489 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2490 # propogation would be redundant. no need to reject though.
2491 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2493 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2494 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2496 self.warnings.append("Propogating upload to %s" % (addsuite))
2497 self.pkg.changes.setdefault("propdistribution", {})
2498 self.pkg.changes["propdistribution"][addsuite] = 1
2502 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2504 ################################################################################
2505 def check_binary_against_db(self, filename, session):
2506 # Ensure version is sane
2507 q = session.query(BinAssociation)
2508 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2509 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2511 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2512 filename, self.pkg.files[filename]["version"], sourceful=False)
2514 # Check for any existing copies of the file
2515 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2516 q = q.filter_by(version=self.pkg.files[filename]["version"])
2517 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2520 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2522 ################################################################################
2524 def check_source_against_db(self, filename, session):
2525 source = self.pkg.dsc.get("source")
2526 version = self.pkg.dsc.get("version")
2528 # Ensure version is sane
2529 self.cross_suite_version_check(get_suite_version(source, session),
2530 filename, version, sourceful=True)
2532 ################################################################################
2533 def check_dsc_against_db(self, filename, session):
2536 @warning: NB: this function can remove entries from the 'files' index [if
2537 the orig tarball is a duplicate of the one in the archive]; if
2538 you're iterating over 'files' and call this function as part of
2539 the loop, be sure to add a check to the top of the loop to
2540 ensure you haven't just tried to dereference the deleted entry.
2545 self.pkg.orig_files = {} # XXX: do we need to clear it?
2546 orig_files = self.pkg.orig_files
2548 # Try and find all files mentioned in the .dsc. This has
2549 # to work harder to cope with the multiple possible
2550 # locations of an .orig.tar.gz.
2551 # The ordering on the select is needed to pick the newest orig
2552 # when it exists in multiple places.
2553 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2555 if self.pkg.files.has_key(dsc_name):
2556 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2557 actual_size = int(self.pkg.files[dsc_name]["size"])
2558 found = "%s in incoming" % (dsc_name)
2560 # Check the file does not already exist in the archive
2561 ql = get_poolfile_like_name(dsc_name, session)
2563 # Strip out anything that isn't '%s' or '/%s$'
2565 if not i.filename.endswith(dsc_name):
2568 # "[dak] has not broken them. [dak] has fixed a
2569 # brokenness. Your crappy hack exploited a bug in
2572 # "(Come on! I thought it was always obvious that
2573 # one just doesn't release different files with
2574 # the same name and version.)"
2575 # -- ajk@ on d-devel@l.d.o
2578 # Ignore exact matches for .orig.tar.gz
2580 if re_is_orig_source.match(dsc_name):
2582 if self.pkg.files.has_key(dsc_name) and \
2583 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2584 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2585 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2586 # TODO: Don't delete the entry, just mark it as not needed
2587 # This would fix the stupidity of changing something we often iterate over
2588 # whilst we're doing it
2589 del self.pkg.files[dsc_name]
2590 dsc_entry["files id"] = i.file_id
2591 if not orig_files.has_key(dsc_name):
2592 orig_files[dsc_name] = {}
2593 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2596 # Don't bitch that we couldn't find this file later
2598 self.later_check_files.remove(dsc_name)
2604 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2606 elif re_is_orig_source.match(dsc_name):
2608 ql = get_poolfile_like_name(dsc_name, session)
2610 # Strip out anything that isn't '%s' or '/%s$'
2611 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2613 if not i.filename.endswith(dsc_name):
2617 # Unfortunately, we may get more than one match here if,
2618 # for example, the package was in potato but had an -sa
2619 # upload in woody. So we need to choose the right one.
2621 # default to something sane in case we don't match any or have only one
2626 old_file = os.path.join(i.location.path, i.filename)
2627 old_file_fh = utils.open_file(old_file)
2628 actual_md5 = apt_pkg.md5sum(old_file_fh)
2630 actual_size = os.stat(old_file)[stat.ST_SIZE]
2631 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2634 old_file = os.path.join(i.location.path, i.filename)
2635 old_file_fh = utils.open_file(old_file)
2636 actual_md5 = apt_pkg.md5sum(old_file_fh)
2638 actual_size = os.stat(old_file)[stat.ST_SIZE]
2640 suite_type = x.location.archive_type
2641 # need this for updating dsc_files in install()
2642 dsc_entry["files id"] = x.file_id
2643 # See install() in process-accepted...
2644 if not orig_files.has_key(dsc_name):
2645 orig_files[dsc_name] = {}
2646 orig_files[dsc_name]["id"] = x.file_id
2647 orig_files[dsc_name]["path"] = old_file
2648 orig_files[dsc_name]["location"] = x.location.location_id
2650 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2651 # Not there? Check the queue directories...
2652 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2653 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2655 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2656 if os.path.exists(in_otherdir):
2657 in_otherdir_fh = utils.open_file(in_otherdir)
2658 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2659 in_otherdir_fh.close()
2660 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2662 if not orig_files.has_key(dsc_name):
2663 orig_files[dsc_name] = {}
2664 orig_files[dsc_name]["path"] = in_otherdir
2667 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2670 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2672 if actual_md5 != dsc_entry["md5sum"]:
2673 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2674 if actual_size != int(dsc_entry["size"]):
2675 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2677 ################################################################################
2678 # This is used by process-new and process-holding to recheck a changes file
2679 # at the time we're running. It mainly wraps various other internal functions
2680 # and is similar to accepted_checks - these should probably be tidied up
2682 def recheck(self, session):
2684 for f in self.pkg.files.keys():
2685 # The .orig.tar.gz can disappear out from under us is it's a
2686 # duplicate of one in the archive.
2687 if not self.pkg.files.has_key(f):
2690 entry = self.pkg.files[f]
2692 # Check that the source still exists
2693 if entry["type"] == "deb":
2694 source_version = entry["source version"]
2695 source_package = entry["source package"]
2696 if not self.pkg.changes["architecture"].has_key("source") \
2697 and not source_exists(source_package, source_version, \
2698 suites = self.pkg.changes["distribution"].keys(), session = session):
2699 source_epochless_version = re_no_epoch.sub('', source_version)
2700 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2702 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2703 if cnf.has_key("Dir::Queue::%s" % (q)):
2704 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2707 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2709 # Version and file overwrite checks
2710 if entry["type"] == "deb":
2711 self.check_binary_against_db(f, session)
2712 elif entry["type"] == "dsc":
2713 self.check_source_against_db(f, session)
2714 self.check_dsc_against_db(f, session)
2716 ################################################################################
2717 def accepted_checks(self, overwrite_checks, session):
2718 # Recheck anything that relies on the database; since that's not
2719 # frozen between accept and our run time when called from p-a.
2721 # overwrite_checks is set to False when installing to stable/oldstable
2726 # Find the .dsc (again)
2728 for f in self.pkg.files.keys():
2729 if self.pkg.files[f]["type"] == "dsc":
2732 for checkfile in self.pkg.files.keys():
2733 # The .orig.tar.gz can disappear out from under us is it's a
2734 # duplicate of one in the archive.
2735 if not self.pkg.files.has_key(checkfile):
2738 entry = self.pkg.files[checkfile]
2740 # Check that the source still exists
2741 if entry["type"] == "deb":
2742 source_version = entry["source version"]
2743 source_package = entry["source package"]
2744 if not self.pkg.changes["architecture"].has_key("source") \
2745 and not source_exists(source_package, source_version, \
2746 suites = self.pkg.changes["distribution"].keys(), \
2748 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2750 # Version and file overwrite checks
2751 if overwrite_checks:
2752 if entry["type"] == "deb":
2753 self.check_binary_against_db(checkfile, session)
2754 elif entry["type"] == "dsc":
2755 self.check_source_against_db(checkfile, session)
2756 self.check_dsc_against_db(dsc_filename, session)
2758 # propogate in the case it is in the override tables:
2759 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2760 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2761 propogate[suite] = 1
2763 nopropogate[suite] = 1
2765 for suite in propogate.keys():
2766 if suite in nopropogate:
2768 self.pkg.changes["distribution"][suite] = 1
2770 for checkfile in self.pkg.files.keys():
2771 # Check the package is still in the override tables
2772 for suite in self.pkg.changes["distribution"].keys():
2773 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2774 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2776 ################################################################################
2777 # If any file of an upload has a recent mtime then chances are good
2778 # the file is still being uploaded.
2780 def upload_too_new(self):
2783 # Move back to the original directory to get accurate time stamps
2785 os.chdir(self.pkg.directory)
2786 file_list = self.pkg.files.keys()
2787 file_list.extend(self.pkg.dsc_files.keys())
2788 file_list.append(self.pkg.changes_file)
2791 last_modified = time.time()-os.path.getmtime(f)
2792 if last_modified < int(cnf["Dinstall::SkipTime"]):
2801 def store_changelog(self):
2803 # Skip binary-only upload if it is not a bin-NMU
2804 if not self.pkg.changes['architecture'].has_key('source'):
2805 from daklib.regexes import re_bin_only_nmu
2806 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2809 session = DBConn().session()
2811 # Check if upload already has a changelog entry
2812 query = """SELECT changelog_id FROM changes WHERE source = :source
2813 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2814 if session.execute(query, {'source': self.pkg.changes['source'], \
2815 'version': self.pkg.changes['version'], \
2816 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2820 # Add current changelog text into changelogs_text table, return created ID
2821 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2822 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2824 # Link ID to the upload available in changes table
2825 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2826 AND version = :version AND architecture = :architecture"""
2827 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2828 'version': self.pkg.changes['version'], \
2829 'architecture': " ".join(self.pkg.changes['architecture'].keys())})