]> git.donarmstrong.com Git - dak.git/blob - daklib/queue.py
a8ea3035df38102937f0f6b32720c5c43fa05272
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81         utils.warn('unreadable source file (will continue and hope for the best)')
82         return f["type"]
83     else:
84         file_type = f["type"]
85         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
86
87     # Validate the override type
88     type_id = get_override_type(file_type, session)
89     if type_id is None:
90         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
91
92     return file_type
93
94 ################################################################################
95
96 # Determine what parts in a .changes are NEW
97
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
99     """
100     Determine what parts in a C{changes} file are NEW.
101
102     @type filename: str
103     @param filename: changes filename
104
105     @type changes: Upload.Pkg.changes dict
106     @param changes: Changes dictionary
107
108     @type files: Upload.Pkg.files dict
109     @param files: Files dictionary
110
111     @type warn: bool
112     @param warn: Warn if overrides are added for (old)stable
113
114     @type dsc: Upload.Pkg.dsc dict
115     @param dsc: (optional); Dsc dictionary
116
117     @type new: dict
118     @param new: new packages as returned by a previous call to this function, but override information may have changed
119
120     @rtype: dict
121     @return: dictionary of NEW components.
122
123     """
124     # TODO: This should all use the database instead of parsing the changes
125     # file again
126     byhand = {}
127     if new is None:
128         new = {}
129
130     dbchg = get_dbchange(filename, session)
131     if dbchg is None:
132         print "Warning: cannot find changes file in database; won't check byhand"
133
134     # Try to get the Package-Set field from an included .dsc file (if possible).
135     if dsc:
136         for package, entry in build_package_set(dsc, session).items():
137             if not new.has_key(package):
138                 new[package] = entry
139
140     # Build up a list of potentially new things
141     for name, f in files.items():
142         # Keep a record of byhand elements
143         if f["section"] == "byhand":
144             byhand[name] = 1
145             continue
146
147         pkg = f["package"]
148         priority = f["priority"]
149         section = f["section"]
150         file_type = get_type(f, session)
151         component = f["component"]
152
153         if file_type == "dsc":
154             priority = "source"
155
156         if not new.has_key(pkg):
157             new[pkg] = {}
158             new[pkg]["priority"] = priority
159             new[pkg]["section"] = section
160             new[pkg]["type"] = file_type
161             new[pkg]["component"] = component
162             new[pkg]["files"] = []
163         else:
164             old_type = new[pkg]["type"]
165             if old_type != file_type:
166                 # source gets trumped by deb or udeb
167                 if old_type == "dsc":
168                     new[pkg]["priority"] = priority
169                     new[pkg]["section"] = section
170                     new[pkg]["type"] = file_type
171                     new[pkg]["component"] = component
172
173         new[pkg]["files"].append(name)
174
175         if f.has_key("othercomponents"):
176             new[pkg]["othercomponents"] = f["othercomponents"]
177
178     # Fix up the list of target suites
179     cnf = Config()
180     for suite in changes["suite"].keys():
181         oldsuite = get_suite(suite, session)
182         if not oldsuite:
183             print "WARNING: Invalid suite %s found" % suite
184             continue
185
186         if oldsuite.overridesuite:
187             newsuite = get_suite(oldsuite.overridesuite, session)
188
189             if newsuite:
190                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191                     oldsuite.overridesuite, suite)
192                 del changes["suite"][suite]
193                 changes["suite"][oldsuite.overridesuite] = 1
194             else:
195                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
196                     oldsuite.overridesuite, suite)
197
198     # Check for unprocessed byhand files
199     if dbchg is not None:
200         for b in byhand.keys():
201             # Find the file entry in the database
202             found = False
203             for f in dbchg.files:
204                 if f.filename == b:
205                     found = True
206                     # If it's processed, we can ignore it
207                     if f.processed:
208                         del byhand[b]
209                     break
210
211             if not found:
212                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
213
214     # Check for new stuff
215     for suite in changes["suite"].keys():
216         for pkg in new.keys():
217             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
218             if len(ql) > 0:
219                 for file_entry in new[pkg]["files"]:
220                     if files[file_entry].has_key("new"):
221                         del files[file_entry]["new"]
222                 del new[pkg]
223
224     if warn:
225         for s in ['stable', 'oldstable']:
226             if changes["suite"].has_key(s):
227                 print "WARNING: overrides will be added for %s!" % s
228         for pkg in new.keys():
229             if new[pkg].has_key("othercomponents"):
230                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
231
232     return new, byhand
233
234 ################################################################################
235
236 def check_valid(new, session = None):
237     """
238     Check if section and priority for NEW packages exist in database.
239     Additionally does sanity checks:
240       - debian-installer packages have to be udeb (or source)
241       - non debian-installer packages can not be udeb
242       - source priority can only be assigned to dsc file types
243
244     @type new: dict
245     @param new: Dict of new packages with their section, priority and type.
246
247     """
248     for pkg in new.keys():
249         section_name = new[pkg]["section"]
250         priority_name = new[pkg]["priority"]
251         file_type = new[pkg]["type"]
252
253         section = get_section(section_name, session)
254         if section is None:
255             new[pkg]["section id"] = -1
256         else:
257             new[pkg]["section id"] = section.section_id
258
259         priority = get_priority(priority_name, session)
260         if priority is None:
261             new[pkg]["priority id"] = -1
262         else:
263             new[pkg]["priority id"] = priority.priority_id
264
265         # Sanity checks
266         di = section_name.find("debian-installer") != -1
267
268         # If d-i, we must be udeb and vice-versa
269         if     (di and file_type not in ("udeb", "dsc")) or \
270            (not di and file_type == "udeb"):
271             new[pkg]["section id"] = -1
272
273         # If dsc we need to be source and vice-versa
274         if (priority == "source" and file_type != "dsc") or \
275            (priority != "source" and file_type == "dsc"):
276             new[pkg]["priority id"] = -1
277
278 ###############################################################################
279
280 # Used by Upload.check_timestamps
281 class TarTime(object):
282     def __init__(self, future_cutoff, past_cutoff):
283         self.reset()
284         self.future_cutoff = future_cutoff
285         self.past_cutoff = past_cutoff
286
287     def reset(self):
288         self.future_files = {}
289         self.ancient_files = {}
290
291     def callback(self, member, data):
292         if member.mtime > self.future_cutoff:
293             self.future_files[Name] = MTime
294         if member.mtime < self.past_cutoff:
295             self.ancient_files[Name] = MTime
296
297 ###############################################################################
298
299 def prod_maintainer(notes, upload):
300     cnf = Config()
301
302     # Here we prepare an editor and get them ready to prod...
303     (fd, temp_filename) = utils.temp_filename()
304     temp_file = os.fdopen(fd, 'w')
305     for note in notes:
306         temp_file.write(note.comment)
307     temp_file.close()
308     editor = os.environ.get("EDITOR","vi")
309     answer = 'E'
310     while answer == 'E':
311         os.system("%s %s" % (editor, temp_filename))
312         temp_fh = utils.open_file(temp_filename)
313         prod_message = "".join(temp_fh.readlines())
314         temp_fh.close()
315         print "Prod message:"
316         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
317         prompt = "[P]rod, Edit, Abandon, Quit ?"
318         answer = "XXX"
319         while prompt.find(answer) == -1:
320             answer = utils.our_raw_input(prompt)
321             m = re_default_answer.search(prompt)
322             if answer == "":
323                 answer = m.group(1)
324             answer = answer[:1].upper()
325     os.unlink(temp_filename)
326     if answer == 'A':
327         return
328     elif answer == 'Q':
329         end()
330         sys.exit(0)
331     # Otherwise, do the proding...
332     user_email_address = utils.whoami() + " <%s>" % (
333         cnf["Dinstall::MyAdminAddress"])
334
335     Subst = upload.Subst
336
337     Subst["__FROM_ADDRESS__"] = user_email_address
338     Subst["__PROD_MESSAGE__"] = prod_message
339     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
340
341     prod_mail_message = utils.TemplateSubst(
342         Subst,cnf["Dir::Templates"]+"/process-new.prod")
343
344     # Send the prod mail
345     utils.send_mail(prod_mail_message)
346
347     print "Sent prodding message"
348
349 ################################################################################
350
351 def edit_note(note, upload, session, trainee=False):
352     # Write the current data to a temporary file
353     (fd, temp_filename) = utils.temp_filename()
354     editor = os.environ.get("EDITOR","vi")
355     answer = 'E'
356     while answer == 'E':
357         os.system("%s %s" % (editor, temp_filename))
358         temp_file = utils.open_file(temp_filename)
359         newnote = temp_file.read().rstrip()
360         temp_file.close()
361         print "New Note:"
362         print utils.prefix_multi_line_string(newnote,"  ")
363         prompt = "[D]one, Edit, Abandon, Quit ?"
364         answer = "XXX"
365         while prompt.find(answer) == -1:
366             answer = utils.our_raw_input(prompt)
367             m = re_default_answer.search(prompt)
368             if answer == "":
369                 answer = m.group(1)
370             answer = answer[:1].upper()
371     os.unlink(temp_filename)
372     if answer == 'A':
373         return
374     elif answer == 'Q':
375         end()
376         sys.exit(0)
377
378     comment = NewComment()
379     comment.package = upload.pkg.changes["source"]
380     comment.version = upload.pkg.changes["version"]
381     comment.comment = newnote
382     comment.author  = utils.whoami()
383     comment.trainee = trainee
384     session.add(comment)
385     session.commit()
386
387 ###############################################################################
388
389 # suite names DMs can upload to
390 dm_suites = ['unstable', 'experimental']
391
392 def get_newest_source(source, session):
393     'returns the newest DBSource object in dm_suites'
394     ## the most recent version of the package uploaded to unstable or
395     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
396     ## section of its control file
397     q = session.query(DBSource).filter_by(source = source). \
398         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
399         order_by(desc('source.version'))
400     return q.first()
401
402 def get_suite_version_by_source(source, session):
403     'returns a list of tuples (suite_name, version) for source package'
404     q = session.query(Suite.suite_name, DBSource.version). \
405         join(Suite.sources).filter_by(source = source)
406     return q.all()
407
408 def get_source_by_package_and_suite(package, suite_name, session):
409     '''
410     returns a DBSource query filtered by DBBinary.package and this package's
411     suite_name
412     '''
413     return session.query(DBSource). \
414         join(DBSource.binaries).filter_by(package = package). \
415         join(DBBinary.suites).filter_by(suite_name = suite_name)
416
417 def get_suite_version_by_package(package, arch_string, session):
418     '''
419     returns a list of tuples (suite_name, version) for binary package and
420     arch_string
421     '''
422     return session.query(Suite.suite_name, DBBinary.version). \
423         join(Suite.binaries).filter_by(package = package). \
424         join(DBBinary.architecture). \
425         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
426
427 class Upload(object):
428     """
429     Everything that has to do with an upload processed.
430
431     """
432     def __init__(self):
433         self.logger = None
434         self.pkg = Changes()
435         self.reset()
436
437     ###########################################################################
438
439     def reset (self):
440         """ Reset a number of internal variables."""
441
442         # Initialize the substitution template map
443         cnf = Config()
444         self.Subst = {}
445         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
446         if cnf.has_key("Dinstall::BugServer"):
447             self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
448         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
449         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
450
451         self.rejects = []
452         self.warnings = []
453         self.notes = []
454
455         self.later_check_files = []
456
457         self.pkg.reset()
458
459     def package_info(self):
460         """
461         Format various messages from this Upload to send to the maintainer.
462         """
463
464         msgs = (
465             ('Reject Reasons', self.rejects),
466             ('Warnings', self.warnings),
467             ('Notes', self.notes),
468         )
469
470         msg = ''
471         for title, messages in msgs:
472             if messages:
473                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
474         msg += '\n\n'
475
476         return msg
477
478     ###########################################################################
479     def update_subst(self):
480         """ Set up the per-package template substitution mappings """
481
482         cnf = Config()
483
484         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
485         if not self.pkg.changes.has_key("architecture") or not \
486            isinstance(self.pkg.changes["architecture"], dict):
487             self.pkg.changes["architecture"] = { "Unknown" : "" }
488
489         # and maintainer2047 may not exist.
490         if not self.pkg.changes.has_key("maintainer2047"):
491             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
492
493         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
494         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
495         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
496
497         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
498         if self.pkg.changes["architecture"].has_key("source") and \
499            self.pkg.changes["changedby822"] != "" and \
500            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
501
502             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
503             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
504             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
505         else:
506             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
507             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
508             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
509
510         # Process policy doesn't set the fingerprint field and I don't want to make it
511         # do it for now as I don't want to have to deal with the case where we accepted
512         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
513         # the meantime so the package will be remarked as rejectable.  Urgh.
514         # TODO: Fix this properly
515         if self.pkg.changes.has_key('fingerprint'):
516             session = DBConn().session()
517             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
518             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
519                 if self.pkg.changes.has_key("sponsoremail"):
520                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
521             session.close()
522
523         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
524             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
525
526         # Apply any global override of the Maintainer field
527         if cnf.get("Dinstall::OverrideMaintainer"):
528             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
529             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
530
531         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
532         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
533         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
534         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
535
536     ###########################################################################
537     def load_changes(self, filename):
538         """
539         Load a changes file and setup a dictionary around it. Also checks for mandantory
540         fields  within.
541
542         @type filename: string
543         @param filename: Changes filename, full path.
544
545         @rtype: boolean
546         @return: whether the changes file was valid or not.  We may want to
547                  reject even if this is True (see what gets put in self.rejects).
548                  This is simply to prevent us even trying things later which will
549                  fail because we couldn't properly parse the file.
550         """
551         Cnf = Config()
552         self.pkg.changes_file = filename
553
554         # Parse the .changes field into a dictionary
555         try:
556             self.pkg.changes.update(parse_changes(filename))
557         except CantOpenError:
558             self.rejects.append("%s: can't read file." % (filename))
559             return False
560         except ParseChangesError, line:
561             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
562             return False
563         except ChangesUnicodeError:
564             self.rejects.append("%s: changes file not proper utf-8" % (filename))
565             return False
566
567         # Parse the Files field from the .changes into another dictionary
568         try:
569             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
570         except ParseChangesError, line:
571             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
572             return False
573         except UnknownFormatError, format:
574             self.rejects.append("%s: unknown format '%s'." % (filename, format))
575             return False
576
577         # Check for mandatory fields
578         for i in ("distribution", "source", "binary", "architecture",
579                   "version", "maintainer", "files", "changes", "description"):
580             if not self.pkg.changes.has_key(i):
581                 # Avoid undefined errors later
582                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
583                 return False
584
585         # Strip a source version in brackets from the source field
586         if re_strip_srcver.search(self.pkg.changes["source"]):
587             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
588
589         # Ensure the source field is a valid package name.
590         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
591             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
592
593         # Split multi-value fields into a lower-level dictionary
594         for i in ("architecture", "distribution", "binary", "closes"):
595             o = self.pkg.changes.get(i, "")
596             if o != "":
597                 del self.pkg.changes[i]
598
599             self.pkg.changes[i] = {}
600
601             for j in o.split():
602                 self.pkg.changes[i][j] = 1
603
604         # Fix the Maintainer: field to be RFC822/2047 compatible
605         try:
606             (self.pkg.changes["maintainer822"],
607              self.pkg.changes["maintainer2047"],
608              self.pkg.changes["maintainername"],
609              self.pkg.changes["maintaineremail"]) = \
610                    fix_maintainer (self.pkg.changes["maintainer"])
611         except ParseMaintError, msg:
612             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
613                    % (filename, self.pkg.changes["maintainer"], msg))
614
615         # ...likewise for the Changed-By: field if it exists.
616         try:
617             (self.pkg.changes["changedby822"],
618              self.pkg.changes["changedby2047"],
619              self.pkg.changes["changedbyname"],
620              self.pkg.changes["changedbyemail"]) = \
621                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
622         except ParseMaintError, msg:
623             self.pkg.changes["changedby822"] = ""
624             self.pkg.changes["changedby2047"] = ""
625             self.pkg.changes["changedbyname"] = ""
626             self.pkg.changes["changedbyemail"] = ""
627
628             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
629                    % (filename, self.pkg.changes["changed-by"], msg))
630
631         # Ensure all the values in Closes: are numbers
632         if self.pkg.changes.has_key("closes"):
633             for i in self.pkg.changes["closes"].keys():
634                 if re_isanum.match (i) == None:
635                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
636
637         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
638         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
639         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
640
641         # Check the .changes is non-empty
642         if not self.pkg.files:
643             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
644             return False
645
646         # Changes was syntactically valid even if we'll reject
647         return True
648
649     ###########################################################################
650
651     def check_distributions(self):
652         "Check and map the Distribution field"
653
654         Cnf = Config()
655
656         # Handle suite mappings
657         for m in Cnf.ValueList("SuiteMappings"):
658             args = m.split()
659             mtype = args[0]
660             if mtype == "map" or mtype == "silent-map":
661                 (source, dest) = args[1:3]
662                 if self.pkg.changes["distribution"].has_key(source):
663                     del self.pkg.changes["distribution"][source]
664                     self.pkg.changes["distribution"][dest] = 1
665                     if mtype != "silent-map":
666                         self.notes.append("Mapping %s to %s." % (source, dest))
667                 if self.pkg.changes.has_key("distribution-version"):
668                     if self.pkg.changes["distribution-version"].has_key(source):
669                         self.pkg.changes["distribution-version"][source]=dest
670             elif mtype == "map-unreleased":
671                 (source, dest) = args[1:3]
672                 if self.pkg.changes["distribution"].has_key(source):
673                     for arch in self.pkg.changes["architecture"].keys():
674                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
675                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
676                             del self.pkg.changes["distribution"][source]
677                             self.pkg.changes["distribution"][dest] = 1
678                             break
679             elif mtype == "ignore":
680                 suite = args[1]
681                 if self.pkg.changes["distribution"].has_key(suite):
682                     del self.pkg.changes["distribution"][suite]
683                     self.warnings.append("Ignoring %s as a target suite." % (suite))
684             elif mtype == "reject":
685                 suite = args[1]
686                 if self.pkg.changes["distribution"].has_key(suite):
687                     self.rejects.append("Uploads to %s are not accepted." % (suite))
688             elif mtype == "propup-version":
689                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
690                 #
691                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
692                 if self.pkg.changes["distribution"].has_key(args[1]):
693                     self.pkg.changes.setdefault("distribution-version", {})
694                     for suite in args[2:]:
695                         self.pkg.changes["distribution-version"][suite] = suite
696
697         # Ensure there is (still) a target distribution
698         if len(self.pkg.changes["distribution"].keys()) < 1:
699             self.rejects.append("No valid distribution remaining.")
700
701         # Ensure target distributions exist
702         for suite in self.pkg.changes["distribution"].keys():
703             if not get_suite(suite.lower()):
704                 self.rejects.append("Unknown distribution `%s'." % (suite))
705
706     ###########################################################################
707
708     def binary_file_checks(self, f, session):
709         cnf = Config()
710         entry = self.pkg.files[f]
711
712         # Extract package control information
713         deb_file = utils.open_file(f)
714         try:
715             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
716         except:
717             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
718             deb_file.close()
719             # Can't continue, none of the checks on control would work.
720             return
721
722         # Check for mandantory "Description:"
723         deb_file.seek(0)
724         try:
725             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
726         except:
727             self.rejects.append("%s: Missing Description in binary package" % (f))
728             return
729
730         deb_file.close()
731
732         # Check for mandatory fields
733         for field in [ "Package", "Architecture", "Version" ]:
734             if control.Find(field) == None:
735                 # Can't continue
736                 self.rejects.append("%s: No %s field in control." % (f, field))
737                 return
738
739         # Ensure the package name matches the one give in the .changes
740         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
741             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
742
743         # Validate the package field
744         package = control.Find("Package")
745         if not re_valid_pkg_name.match(package):
746             self.rejects.append("%s: invalid package name '%s'." % (f, package))
747
748         # Validate the version field
749         version = control.Find("Version")
750         if not re_valid_version.match(version):
751             self.rejects.append("%s: invalid version number '%s'." % (f, version))
752
753         # Ensure the architecture of the .deb is one we know about.
754         default_suite = cnf.get("Dinstall::DefaultSuite", "unstable")
755         architecture = control.Find("Architecture")
756         upload_suite = self.pkg.changes["distribution"].keys()[0]
757
758         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
759             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
760             self.rejects.append("Unknown architecture '%s'." % (architecture))
761
762         # Ensure the architecture of the .deb is one of the ones
763         # listed in the .changes.
764         if not self.pkg.changes["architecture"].has_key(architecture):
765             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
766
767         # Sanity-check the Depends field
768         depends = control.Find("Depends")
769         if depends == '':
770             self.rejects.append("%s: Depends field is empty." % (f))
771
772         # Sanity-check the Provides field
773         provides = control.Find("Provides")
774         if provides:
775             provide = re_spacestrip.sub('', provides)
776             if provide == '':
777                 self.rejects.append("%s: Provides field is empty." % (f))
778             prov_list = provide.split(",")
779             for prov in prov_list:
780                 if not re_valid_pkg_name.match(prov):
781                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
782
783         # If there is a Built-Using field, we need to check we can find the
784         # exact source version
785         built_using = control.Find("Built-Using")
786         if built_using:
787             try:
788                 entry["built-using"] = []
789                 for dep in apt_pkg.parse_depends(built_using):
790                     bu_s, bu_v, bu_e = dep[0]
791                     # Check that it's an exact match dependency and we have
792                     # some form of version
793                     if bu_e != "=" or len(bu_v) < 1:
794                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
795                     else:
796                         # Find the source id for this version
797                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
798                         if len(bu_so) != 1:
799                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
800                         else:
801                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
802
803             except ValueError, e:
804                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
805
806
807         # Check the section & priority match those given in the .changes (non-fatal)
808         if     control.Find("Section") and entry["section"] != "" \
809            and entry["section"] != control.Find("Section"):
810             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
811                                 (f, control.Find("Section", ""), entry["section"]))
812         if control.Find("Priority") and entry["priority"] != "" \
813            and entry["priority"] != control.Find("Priority"):
814             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
815                                 (f, control.Find("Priority", ""), entry["priority"]))
816
817         entry["package"] = package
818         entry["architecture"] = architecture
819         entry["version"] = version
820         entry["maintainer"] = control.Find("Maintainer", "")
821
822         if f.endswith(".udeb"):
823             self.pkg.files[f]["dbtype"] = "udeb"
824         elif f.endswith(".deb"):
825             self.pkg.files[f]["dbtype"] = "deb"
826         else:
827             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
828
829         entry["source"] = control.Find("Source", entry["package"])
830
831         # Get the source version
832         source = entry["source"]
833         source_version = ""
834
835         if source.find("(") != -1:
836             m = re_extract_src_version.match(source)
837             source = m.group(1)
838             source_version = m.group(2)
839
840         if not source_version:
841             source_version = self.pkg.files[f]["version"]
842
843         entry["source package"] = source
844         entry["source version"] = source_version
845
846         # Ensure the filename matches the contents of the .deb
847         m = re_isadeb.match(f)
848
849         #  package name
850         file_package = m.group(1)
851         if entry["package"] != file_package:
852             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
853                                 (f, file_package, entry["dbtype"], entry["package"]))
854         epochless_version = re_no_epoch.sub('', control.Find("Version"))
855
856         #  version
857         file_version = m.group(2)
858         if epochless_version != file_version:
859             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
860                                 (f, file_version, entry["dbtype"], epochless_version))
861
862         #  architecture
863         file_architecture = m.group(3)
864         if entry["architecture"] != file_architecture:
865             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
866                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
867
868         # Check for existent source
869         source_version = entry["source version"]
870         source_package = entry["source package"]
871         if self.pkg.changes["architecture"].has_key("source"):
872             if source_version != self.pkg.changes["version"]:
873                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
874                                     (source_version, f, self.pkg.changes["version"]))
875         else:
876             # Check in the SQL database
877             if not source_exists(source_package, source_version, suites = \
878                 self.pkg.changes["distribution"].keys(), session = session):
879                 # Check in one of the other directories
880                 source_epochless_version = re_no_epoch.sub('', source_version)
881                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
882                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
883                     entry["byhand"] = 1
884                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
885                     entry["new"] = 1
886                 else:
887                     dsc_file_exists = False
888                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
889                         if cnf.has_key("Dir::Queue::%s" % (myq)):
890                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
891                                 dsc_file_exists = True
892                                 break
893
894                     if not dsc_file_exists:
895                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
896
897         # Check the version and for file overwrites
898         self.check_binary_against_db(f, session)
899
900     def source_file_checks(self, f, session):
901         entry = self.pkg.files[f]
902
903         m = re_issource.match(f)
904         if not m:
905             return
906
907         entry["package"] = m.group(1)
908         entry["version"] = m.group(2)
909         entry["type"] = m.group(3)
910
911         # Ensure the source package name matches the Source filed in the .changes
912         if self.pkg.changes["source"] != entry["package"]:
913             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
914
915         # Ensure the source version matches the version in the .changes file
916         if re_is_orig_source.match(f):
917             changes_version = self.pkg.changes["chopversion2"]
918         else:
919             changes_version = self.pkg.changes["chopversion"]
920
921         if changes_version != entry["version"]:
922             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
923
924         # Ensure the .changes lists source in the Architecture field
925         if not self.pkg.changes["architecture"].has_key("source"):
926             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
927
928         # Check the signature of a .dsc file
929         if entry["type"] == "dsc":
930             # check_signature returns either:
931             #  (None, [list, of, rejects]) or (signature, [])
932             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
933             for j in rejects:
934                 self.rejects.append(j)
935
936         entry["architecture"] = "source"
937
938     def per_suite_file_checks(self, f, suite, session):
939         cnf = Config()
940         entry = self.pkg.files[f]
941
942         # Skip byhand
943         if entry.has_key("byhand"):
944             return
945
946         # Check we have fields we need to do these checks
947         oktogo = True
948         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
949             if not entry.has_key(m):
950                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
951                 oktogo = False
952
953         if not oktogo:
954             return
955
956         # Handle component mappings
957         for m in cnf.ValueList("ComponentMappings"):
958             (source, dest) = m.split()
959             if entry["component"] == source:
960                 entry["original component"] = source
961                 entry["component"] = dest
962
963         # Ensure the component is valid for the target suite
964         if entry["component"] not in get_component_names(session):
965             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
966             return
967
968         # Validate the component
969         if not get_component(entry["component"], session):
970             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
971             return
972
973         # See if the package is NEW
974         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
975             entry["new"] = 1
976
977         # Validate the priority
978         if entry["priority"].find('/') != -1:
979             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
980
981         # Determine the location
982         location = cnf["Dir::Pool"]
983         l = get_location(location, entry["component"], session=session)
984         if l is None:
985             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
986             entry["location id"] = -1
987         else:
988             entry["location id"] = l.location_id
989
990         # Check the md5sum & size against existing files (if any)
991         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
992
993         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
994                                          entry["size"], entry["md5sum"], entry["location id"])
995
996         if found is None:
997             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
998         elif found is False and poolfile is not None:
999             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1000         else:
1001             if poolfile is None:
1002                 entry["files id"] = None
1003             else:
1004                 entry["files id"] = poolfile.file_id
1005
1006         # Check for packages that have moved from one component to another
1007         entry['suite'] = suite
1008         arch_list = [entry["architecture"], 'all']
1009         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1010             [suite], arch_list = arch_list, session = session)
1011         if component is not None:
1012             entry["othercomponents"] = component
1013
1014     def check_files(self, action=True):
1015         file_keys = self.pkg.files.keys()
1016         holding = Holding()
1017         cnf = Config()
1018
1019         if action:
1020             cwd = os.getcwd()
1021             os.chdir(self.pkg.directory)
1022             for f in file_keys:
1023                 ret = holding.copy_to_holding(f)
1024                 if ret is not None:
1025                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1026
1027             os.chdir(cwd)
1028
1029         # check we already know the changes file
1030         # [NB: this check must be done post-suite mapping]
1031         base_filename = os.path.basename(self.pkg.changes_file)
1032
1033         session = DBConn().session()
1034
1035         try:
1036             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1037             # if in the pool or in a queue other than unchecked, reject
1038             if (dbc.in_queue is None) \
1039                    or (dbc.in_queue is not None
1040                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1041                 self.rejects.append("%s file already known to dak" % base_filename)
1042         except NoResultFound, e:
1043             # not known, good
1044             pass
1045
1046         has_binaries = False
1047         has_source = False
1048
1049         for f, entry in self.pkg.files.items():
1050             # Ensure the file does not already exist in one of the accepted directories
1051             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1052                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1053                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1054                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1055
1056             if not re_taint_free.match(f):
1057                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1058
1059             # Check the file is readable
1060             if os.access(f, os.R_OK) == 0:
1061                 # When running in -n, copy_to_holding() won't have
1062                 # generated the reject_message, so we need to.
1063                 if action:
1064                     if os.path.exists(f):
1065                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1066                     else:
1067                         # Don't directly reject, mark to check later to deal with orig's
1068                         # we can find in the pool
1069                         self.later_check_files.append(f)
1070                 entry["type"] = "unreadable"
1071                 continue
1072
1073             # If it's byhand skip remaining checks
1074             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1075                 entry["byhand"] = 1
1076                 entry["type"] = "byhand"
1077
1078             # Checks for a binary package...
1079             elif re_isadeb.match(f):
1080                 has_binaries = True
1081                 entry["type"] = "deb"
1082
1083                 # This routine appends to self.rejects/warnings as appropriate
1084                 self.binary_file_checks(f, session)
1085
1086             # Checks for a source package...
1087             elif re_issource.match(f):
1088                 has_source = True
1089
1090                 # This routine appends to self.rejects/warnings as appropriate
1091                 self.source_file_checks(f, session)
1092
1093             # Not a binary or source package?  Assume byhand...
1094             else:
1095                 entry["byhand"] = 1
1096                 entry["type"] = "byhand"
1097
1098             # Per-suite file checks
1099             entry["oldfiles"] = {}
1100             for suite in self.pkg.changes["distribution"].keys():
1101                 self.per_suite_file_checks(f, suite, session)
1102
1103         session.close()
1104
1105         # If the .changes file says it has source, it must have source.
1106         if self.pkg.changes["architecture"].has_key("source"):
1107             if not has_source:
1108                 self.rejects.append("no source found and Architecture line in changes mention source.")
1109
1110             if (not has_binaries) and (not cnf.FindB("Dinstall::AllowSourceOnlyUploads")):
1111                 self.rejects.append("source only uploads are not supported.")
1112
1113     ###########################################################################
1114
1115     def __dsc_filename(self):
1116         """
1117         Returns: (Status, Dsc_Filename)
1118         where
1119           Status: Boolean; True when there was no error, False otherwise
1120           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1121         """
1122         dsc_filename = None
1123
1124         # find the dsc
1125         for name, entry in self.pkg.files.items():
1126             if entry.has_key("type") and entry["type"] == "dsc":
1127                 if dsc_filename:
1128                     return False, "cannot process a .changes file with multiple .dsc's."
1129                 else:
1130                     dsc_filename = name
1131
1132         if not dsc_filename:
1133             return False, "source uploads must contain a dsc file"
1134
1135         return True, dsc_filename
1136
1137     def load_dsc(self, action=True, signing_rules=1):
1138         """
1139         Find and load the dsc from self.pkg.files into self.dsc
1140
1141         Returns: (Status, Reason)
1142         where
1143           Status: Boolean; True when there was no error, False otherwise
1144           Reason: String; When Status is False this describes the error
1145         """
1146
1147         # find the dsc
1148         (status, dsc_filename) = self.__dsc_filename()
1149         if not status:
1150             # If status is false, dsc_filename has the reason
1151             return False, dsc_filename
1152
1153         try:
1154             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1155         except CantOpenError:
1156             if not action:
1157                 return False, "%s: can't read file." % (dsc_filename)
1158         except ParseChangesError, line:
1159             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1160         except InvalidDscError, line:
1161             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1162         except ChangesUnicodeError:
1163             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1164
1165         return True, None
1166
1167     ###########################################################################
1168
1169     def check_dsc(self, action=True, session=None):
1170         """Returns bool indicating whether or not the source changes are valid"""
1171         # Ensure there is source to check
1172         if not self.pkg.changes["architecture"].has_key("source"):
1173             return True
1174
1175         (status, reason) = self.load_dsc(action=action)
1176         if not status:
1177             self.rejects.append(reason)
1178             return False
1179         (status, dsc_filename) = self.__dsc_filename()
1180         if not status:
1181             # If status is false, dsc_filename has the reason
1182             self.rejects.append(dsc_filename)
1183             return False
1184
1185         # Build up the file list of files mentioned by the .dsc
1186         try:
1187             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1188         except NoFilesFieldError:
1189             self.rejects.append("%s: no Files: field." % (dsc_filename))
1190             return False
1191         except UnknownFormatError, format:
1192             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1193             return False
1194         except ParseChangesError, line:
1195             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1196             return False
1197
1198         # Enforce mandatory fields
1199         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1200             if not self.pkg.dsc.has_key(i):
1201                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1202                 return False
1203
1204         # Validate the source and version fields
1205         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1206             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1207         if not re_valid_version.match(self.pkg.dsc["version"]):
1208             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1209
1210         # Only a limited list of source formats are allowed in each suite
1211         for dist in self.pkg.changes["distribution"].keys():
1212             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1213             if self.pkg.dsc["format"] not in allowed:
1214                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1215
1216         # Validate the Maintainer field
1217         try:
1218             # We ignore the return value
1219             fix_maintainer(self.pkg.dsc["maintainer"])
1220         except ParseMaintError, msg:
1221             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1222                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1223
1224         # Validate the build-depends field(s)
1225         for field_name in [ "build-depends", "build-depends-indep" ]:
1226             field = self.pkg.dsc.get(field_name)
1227             if field:
1228                 # Have apt try to parse them...
1229                 try:
1230                     apt_pkg.ParseSrcDepends(field)
1231                 except:
1232                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1233
1234         # Ensure the version number in the .dsc matches the version number in the .changes
1235         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1236         changes_version = self.pkg.files[dsc_filename]["version"]
1237
1238         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1239             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1240
1241         # Ensure the Files field contain only what's expected
1242         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1243
1244         # Ensure source is newer than existing source in target suites
1245         session = DBConn().session()
1246         self.check_source_against_db(dsc_filename, session)
1247         self.check_dsc_against_db(dsc_filename, session)
1248
1249         dbchg = get_dbchange(self.pkg.changes_file, session)
1250
1251         # Finally, check if we're missing any files
1252         for f in self.later_check_files:
1253             print 'XXX: %s' % f
1254             # Check if we've already processed this file if we have a dbchg object
1255             ok = False
1256             if dbchg:
1257                 for pf in dbchg.files:
1258                     if pf.filename == f and pf.processed:
1259                         self.notes.append('%s was already processed so we can go ahead' % f)
1260                         ok = True
1261                         del self.pkg.files[f]
1262             if not ok:
1263                 self.rejects.append("Could not find file %s references in changes" % f)
1264
1265         session.close()
1266
1267         return (len(self.rejects) == 0)
1268
1269     ###########################################################################
1270
1271     def get_changelog_versions(self, source_dir):
1272         """Extracts a the source package and (optionally) grabs the
1273         version history out of debian/changelog for the BTS."""
1274
1275         cnf = Config()
1276
1277         # Find the .dsc (again)
1278         dsc_filename = None
1279         for f in self.pkg.files.keys():
1280             if self.pkg.files[f]["type"] == "dsc":
1281                 dsc_filename = f
1282
1283         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1284         if not dsc_filename:
1285             return
1286
1287         # Create a symlink mirror of the source files in our temporary directory
1288         for f in self.pkg.files.keys():
1289             m = re_issource.match(f)
1290             if m:
1291                 src = os.path.join(source_dir, f)
1292                 # If a file is missing for whatever reason, give up.
1293                 if not os.path.exists(src):
1294                     return
1295                 ftype = m.group(3)
1296                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1297                    self.pkg.orig_files[f].has_key("path"):
1298                     continue
1299                 dest = os.path.join(os.getcwd(), f)
1300                 os.symlink(src, dest)
1301
1302         # If the orig files are not a part of the upload, create symlinks to the
1303         # existing copies.
1304         for orig_file in self.pkg.orig_files.keys():
1305             if not self.pkg.orig_files[orig_file].has_key("path"):
1306                 continue
1307             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1308             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1309
1310         # Extract the source
1311         try:
1312             unpacked = UnpackedSource(dsc_filename)
1313         except:
1314             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1315             return
1316
1317         if not cnf.Find("Dir::BTSVersionTrack"):
1318             return
1319
1320         # Get the upstream version
1321         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1322         if re_strip_revision.search(upstr_version):
1323             upstr_version = re_strip_revision.sub('', upstr_version)
1324
1325         # Ensure the changelog file exists
1326         changelog_file = unpacked.get_changelog_file()
1327         if changelog_file is None:
1328             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1329             return
1330
1331         # Parse the changelog
1332         self.pkg.dsc["bts changelog"] = ""
1333         for line in changelog_file.readlines():
1334             m = re_changelog_versions.match(line)
1335             if m:
1336                 self.pkg.dsc["bts changelog"] += line
1337         changelog_file.close()
1338         unpacked.cleanup()
1339
1340         # Check we found at least one revision in the changelog
1341         if not self.pkg.dsc["bts changelog"]:
1342             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1343
1344     def check_source(self):
1345         # Bail out if:
1346         #    a) there's no source
1347         if not self.pkg.changes["architecture"].has_key("source"):
1348             return
1349
1350         tmpdir = utils.temp_dirname()
1351
1352         # Move into the temporary directory
1353         cwd = os.getcwd()
1354         os.chdir(tmpdir)
1355
1356         # Get the changelog version history
1357         self.get_changelog_versions(cwd)
1358
1359         # Move back and cleanup the temporary tree
1360         os.chdir(cwd)
1361
1362         try:
1363             shutil.rmtree(tmpdir)
1364         except OSError, e:
1365             if e.errno != errno.EACCES:
1366                 print "foobar"
1367                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1368
1369             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1370             # We probably have u-r or u-w directories so chmod everything
1371             # and try again.
1372             cmd = "chmod -R u+rwx %s" % (tmpdir)
1373             result = os.system(cmd)
1374             if result != 0:
1375                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1376             shutil.rmtree(tmpdir)
1377         except Exception, e:
1378             print "foobar2 (%s)" % e
1379             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1380
1381     ###########################################################################
1382     def ensure_hashes(self):
1383         # Make sure we recognise the format of the Files: field in the .changes
1384         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1385         if len(format) == 2:
1386             format = int(format[0]), int(format[1])
1387         else:
1388             format = int(float(format[0])), 0
1389
1390         # We need to deal with the original changes blob, as the fields we need
1391         # might not be in the changes dict serialised into the .dak anymore.
1392         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1393
1394         # Copy the checksums over to the current changes dict.  This will keep
1395         # the existing modifications to it intact.
1396         for field in orig_changes:
1397             if field.startswith('checksums-'):
1398                 self.pkg.changes[field] = orig_changes[field]
1399
1400         # Check for unsupported hashes
1401         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1402             self.rejects.append(j)
1403
1404         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1405             self.rejects.append(j)
1406
1407         # We have to calculate the hash if we have an earlier changes version than
1408         # the hash appears in rather than require it exist in the changes file
1409         for hashname, hashfunc, version in utils.known_hashes:
1410             # TODO: Move _ensure_changes_hash into this class
1411             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1412                 self.rejects.append(j)
1413             if "source" in self.pkg.changes["architecture"]:
1414                 # TODO: Move _ensure_dsc_hash into this class
1415                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1416                     self.rejects.append(j)
1417
1418     def check_hashes(self):
1419         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1420             self.rejects.append(m)
1421
1422         for m in utils.check_size(".changes", self.pkg.files):
1423             self.rejects.append(m)
1424
1425         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1426             self.rejects.append(m)
1427
1428         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1429             self.rejects.append(m)
1430
1431         self.ensure_hashes()
1432
1433     ###########################################################################
1434
1435     def ensure_orig(self, target_dir='.', session=None):
1436         """
1437         Ensures that all orig files mentioned in the changes file are present
1438         in target_dir. If they do not exist, they are symlinked into place.
1439
1440         An list containing the symlinks that were created are returned (so they
1441         can be removed).
1442         """
1443
1444         symlinked = []
1445         cnf = Config()
1446
1447         for filename, entry in self.pkg.dsc_files.iteritems():
1448             if not re_is_orig_source.match(filename):
1449                 # File is not an orig; ignore
1450                 continue
1451
1452             if os.path.exists(filename):
1453                 # File exists, no need to continue
1454                 continue
1455
1456             def symlink_if_valid(path):
1457                 f = utils.open_file(path)
1458                 md5sum = apt_pkg.md5sum(f)
1459                 f.close()
1460
1461                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1462                 expected = (int(entry['size']), entry['md5sum'])
1463
1464                 if fingerprint != expected:
1465                     return False
1466
1467                 dest = os.path.join(target_dir, filename)
1468
1469                 os.symlink(path, dest)
1470                 symlinked.append(dest)
1471
1472                 return True
1473
1474             session_ = session
1475             if session is None:
1476                 session_ = DBConn().session()
1477
1478             found = False
1479
1480             # Look in the pool
1481             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1482                 poolfile_path = os.path.join(
1483                     poolfile.location.path, poolfile.filename
1484                 )
1485
1486                 if symlink_if_valid(poolfile_path):
1487                     found = True
1488                     break
1489
1490             if session is None:
1491                 session_.close()
1492
1493             if found:
1494                 continue
1495
1496             # Look in some other queues for the file
1497             queues = ('New', 'Byhand', 'ProposedUpdates',
1498                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1499
1500             for queue in queues:
1501                 if not cnf.get('Dir::Queue::%s' % queue):
1502                     continue
1503
1504                 queuefile_path = os.path.join(
1505                     cnf['Dir::Queue::%s' % queue], filename
1506                 )
1507
1508                 if not os.path.exists(queuefile_path):
1509                     # Does not exist in this queue
1510                     continue
1511
1512                 if symlink_if_valid(queuefile_path):
1513                     break
1514
1515         return symlinked
1516
1517     ###########################################################################
1518
1519     def check_lintian(self):
1520         """
1521         Extends self.rejects by checking the output of lintian against tags
1522         specified in Dinstall::LintianTags.
1523         """
1524
1525         cnf = Config()
1526
1527         # Don't reject binary uploads
1528         if not self.pkg.changes['architecture'].has_key('source'):
1529             return
1530
1531         # Only check some distributions
1532         for dist in ('unstable', 'experimental'):
1533             if dist in self.pkg.changes['distribution']:
1534                 break
1535         else:
1536             return
1537
1538         # If we do not have a tagfile, don't do anything
1539         tagfile = cnf.get("Dinstall::LintianTags")
1540         if not tagfile:
1541             return
1542
1543         # Parse the yaml file
1544         sourcefile = file(tagfile, 'r')
1545         sourcecontent = sourcefile.read()
1546         sourcefile.close()
1547
1548         try:
1549             lintiantags = yaml.load(sourcecontent)['lintian']
1550         except yaml.YAMLError, msg:
1551             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1552             return
1553
1554         # Try and find all orig mentioned in the .dsc
1555         symlinked = self.ensure_orig()
1556
1557         # Setup the input file for lintian
1558         fd, temp_filename = utils.temp_filename()
1559         temptagfile = os.fdopen(fd, 'w')
1560         for tags in lintiantags.values():
1561             temptagfile.writelines(['%s\n' % x for x in tags])
1562         temptagfile.close()
1563
1564         try:
1565             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1566                 (temp_filename, self.pkg.changes_file)
1567
1568             result, output = commands.getstatusoutput(cmd)
1569         finally:
1570             # Remove our tempfile and any symlinks we created
1571             os.unlink(temp_filename)
1572
1573             for symlink in symlinked:
1574                 os.unlink(symlink)
1575
1576         if result == 2:
1577             utils.warn("lintian failed for %s [return code: %s]." % \
1578                 (self.pkg.changes_file, result))
1579             utils.warn(utils.prefix_multi_line_string(output, \
1580                 " [possible output:] "))
1581
1582         def log(*txt):
1583             if self.logger:
1584                 self.logger.log(
1585                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1586                 )
1587
1588         # Generate messages
1589         parsed_tags = parse_lintian_output(output)
1590         self.rejects.extend(
1591             generate_reject_messages(parsed_tags, lintiantags, log=log)
1592         )
1593
1594     ###########################################################################
1595     def check_urgency(self):
1596         cnf = Config()
1597         if self.pkg.changes["architecture"].has_key("source"):
1598             if not self.pkg.changes.has_key("urgency"):
1599                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1600             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1601             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1602                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1603                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1604                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1605
1606     ###########################################################################
1607
1608     # Sanity check the time stamps of files inside debs.
1609     # [Files in the near future cause ugly warnings and extreme time
1610     #  travel can cause errors on extraction]
1611
1612     def check_timestamps(self):
1613         Cnf = Config()
1614
1615         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1616         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1617         tar = TarTime(future_cutoff, past_cutoff)
1618
1619         for filename, entry in self.pkg.files.items():
1620             if entry["type"] == "deb":
1621                 tar.reset()
1622                 try:
1623                     deb = apt_inst.DebFile(filename)
1624                     deb.control.go(tar.callback)
1625
1626                     future_files = tar.future_files.keys()
1627                     if future_files:
1628                         num_future_files = len(future_files)
1629                         future_file = future_files[0]
1630                         future_date = tar.future_files[future_file]
1631                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1632                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1633
1634                     ancient_files = tar.ancient_files.keys()
1635                     if ancient_files:
1636                         num_ancient_files = len(ancient_files)
1637                         ancient_file = ancient_files[0]
1638                         ancient_date = tar.ancient_files[ancient_file]
1639                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1640                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1641                 except:
1642                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1643
1644     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1645         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1646             sponsored = False
1647         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1648             sponsored = False
1649             if uid_name == "":
1650                 sponsored = True
1651         else:
1652             sponsored = True
1653             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1654                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1655                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1656                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1657                         self.pkg.changes["sponsoremail"] = uid_email
1658
1659         return sponsored
1660
1661
1662     ###########################################################################
1663     # check_signed_by_key checks
1664     ###########################################################################
1665
1666     def check_signed_by_key(self):
1667         """Ensure the .changes is signed by an authorized uploader."""
1668         session = DBConn().session()
1669
1670         # First of all we check that the person has proper upload permissions
1671         # and that this upload isn't blocked
1672         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1673
1674         if fpr is None:
1675             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1676             return
1677
1678         # TODO: Check that import-keyring adds UIDs properly
1679         if not fpr.uid:
1680             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1681             return
1682
1683         # Check that the fingerprint which uploaded has permission to do so
1684         self.check_upload_permissions(fpr, session)
1685
1686         # Check that this package is not in a transition
1687         self.check_transition(session)
1688
1689         session.close()
1690
1691
1692     def check_upload_permissions(self, fpr, session):
1693         # Check any one-off upload blocks
1694         self.check_upload_blocks(fpr, session)
1695
1696         # If the source_acl is None, source is never allowed
1697         if fpr.source_acl is None:
1698             if self.pkg.changes["architecture"].has_key("source"):
1699                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1700                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1701                 self.rejects.append(rej)
1702                 return
1703         # Do DM as a special case
1704         # DM is a special case unfortunately, so we check it first
1705         # (keys with no source access get more access than DMs in one
1706         #  way; DMs can only upload for their packages whether source
1707         #  or binary, whereas keys with no access might be able to
1708         #  upload some binaries)
1709         elif fpr.source_acl.access_level == 'dm':
1710             self.check_dm_upload(fpr, session)
1711         else:
1712             # If not a DM, we allow full upload rights
1713             uid_email = "%s@debian.org" % (fpr.uid.uid)
1714             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1715
1716
1717         # Check binary upload permissions
1718         # By this point we know that DMs can't have got here unless they
1719         # are allowed to deal with the package concerned so just apply
1720         # normal checks
1721         if fpr.binary_acl.access_level == 'full':
1722             return
1723
1724         # Otherwise we're in the map case
1725         tmparches = self.pkg.changes["architecture"].copy()
1726         tmparches.pop('source', None)
1727
1728         for bam in fpr.binary_acl_map:
1729             tmparches.pop(bam.architecture.arch_string, None)
1730
1731         if len(tmparches.keys()) > 0:
1732             if fpr.binary_reject:
1733                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1734                 if len(tmparches.keys()) == 1:
1735                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1736                 else:
1737                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1738                 self.rejects.append(rej)
1739             else:
1740                 # TODO: This is where we'll implement reject vs throw away binaries later
1741                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1742                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1743                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1744                 self.rejects.append(rej)
1745
1746
1747     def check_upload_blocks(self, fpr, session):
1748         """Check whether any upload blocks apply to this source, source
1749            version, uid / fpr combination"""
1750
1751         def block_rej_template(fb):
1752             rej = 'Manual upload block in place for package %s' % fb.source
1753             if fb.version is not None:
1754                 rej += ', version %s' % fb.version
1755             return rej
1756
1757         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1758             # version is None if the block applies to all versions
1759             if fb.version is None or fb.version == self.pkg.changes['version']:
1760                 # Check both fpr and uid - either is enough to cause a reject
1761                 if fb.fpr is not None:
1762                     if fb.fpr.fingerprint == fpr.fingerprint:
1763                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1764                 if fb.uid is not None:
1765                     if fb.uid == fpr.uid:
1766                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1767
1768
1769     def check_dm_upload(self, fpr, session):
1770         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1771         ## none of the uploaded packages are NEW
1772         rej = False
1773         for f in self.pkg.files.keys():
1774             if self.pkg.files[f].has_key("byhand"):
1775                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1776                 rej = True
1777             if self.pkg.files[f].has_key("new"):
1778                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1779                 rej = True
1780
1781         if rej:
1782             return
1783
1784         r = get_newest_source(self.pkg.changes["source"], session)
1785
1786         if r is None:
1787             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1788             self.rejects.append(rej)
1789             return
1790
1791         if not r.dm_upload_allowed:
1792             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1793             self.rejects.append(rej)
1794             return
1795
1796         ## the Maintainer: field of the uploaded .changes file corresponds with
1797         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1798         ## uploads)
1799         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1800             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1801
1802         ## the most recent version of the package uploaded to unstable or
1803         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1804         ## non-developer maintainers cannot NMU or hijack packages)
1805
1806         # uploader includes the maintainer
1807         accept = False
1808         for uploader in r.uploaders:
1809             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1810             # Eww - I hope we never have two people with the same name in Debian
1811             if email == fpr.uid.uid or name == fpr.uid.name:
1812                 accept = True
1813                 break
1814
1815         if not accept:
1816             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1817             return
1818
1819         ## none of the packages are being taken over from other source packages
1820         for b in self.pkg.changes["binary"].keys():
1821             for suite in self.pkg.changes["distribution"].keys():
1822                 for s in get_source_by_package_and_suite(b, suite, session):
1823                     if s.source != self.pkg.changes["source"]:
1824                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1825
1826
1827
1828     def check_transition(self, session):
1829         cnf = Config()
1830
1831         sourcepkg = self.pkg.changes["source"]
1832
1833         # No sourceful upload -> no need to do anything else, direct return
1834         # We also work with unstable uploads, not experimental or those going to some
1835         # proposed-updates queue
1836         if "source" not in self.pkg.changes["architecture"] or \
1837            "unstable" not in self.pkg.changes["distribution"]:
1838             return
1839
1840         # Also only check if there is a file defined (and existant) with
1841         # checks.
1842         transpath = cnf.get("Dinstall::ReleaseTransitions", "")
1843         if transpath == "" or not os.path.exists(transpath):
1844             return
1845
1846         # Parse the yaml file
1847         sourcefile = file(transpath, 'r')
1848         sourcecontent = sourcefile.read()
1849         try:
1850             transitions = yaml.load(sourcecontent)
1851         except yaml.YAMLError, msg:
1852             # This shouldn't happen, there is a wrapper to edit the file which
1853             # checks it, but we prefer to be safe than ending up rejecting
1854             # everything.
1855             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1856             return
1857
1858         # Now look through all defined transitions
1859         for trans in transitions:
1860             t = transitions[trans]
1861             source = t["source"]
1862             expected = t["new"]
1863
1864             # Will be None if nothing is in testing.
1865             current = get_source_in_suite(source, "testing", session)
1866             if current is not None:
1867                 compare = apt_pkg.VersionCompare(current.version, expected)
1868
1869             if current is None or compare < 0:
1870                 # This is still valid, the current version in testing is older than
1871                 # the new version we wait for, or there is none in testing yet
1872
1873                 # Check if the source we look at is affected by this.
1874                 if sourcepkg in t['packages']:
1875                     # The source is affected, lets reject it.
1876
1877                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1878                         sourcepkg, trans)
1879
1880                     if current is not None:
1881                         currentlymsg = "at version %s" % (current.version)
1882                     else:
1883                         currentlymsg = "not present in testing"
1884
1885                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1886
1887                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1888 is part of a testing transition designed to get %s migrated (it is
1889 currently %s, we need version %s).  This transition is managed by the
1890 Release Team, and %s is the Release-Team member responsible for it.
1891 Please mail debian-release@lists.debian.org or contact %s directly if you
1892 need further assistance.  You might want to upload to experimental until this
1893 transition is done."""
1894                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1895
1896                     self.rejects.append(rejectmsg)
1897                     return
1898
1899     ###########################################################################
1900     # End check_signed_by_key checks
1901     ###########################################################################
1902
1903     def build_summaries(self):
1904         """ Build a summary of changes the upload introduces. """
1905
1906         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1907
1908         short_summary = summary
1909
1910         # This is for direport's benefit...
1911         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1912
1913         if byhand or new:
1914             summary += "Changes: " + f
1915
1916         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1917
1918         summary += self.announce(short_summary, 0)
1919
1920         return (summary, short_summary)
1921
1922     ###########################################################################
1923
1924     def close_bugs(self, summary, action):
1925         """
1926         Send mail to close bugs as instructed by the closes field in the changes file.
1927         Also add a line to summary if any work was done.
1928
1929         @type summary: string
1930         @param summary: summary text, as given by L{build_summaries}
1931
1932         @type action: bool
1933         @param action: Set to false no real action will be done.
1934
1935         @rtype: string
1936         @return: summary. If action was taken, extended by the list of closed bugs.
1937
1938         """
1939
1940         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1941
1942         bugs = self.pkg.changes["closes"].keys()
1943
1944         if not bugs:
1945             return summary
1946
1947         bugs.sort()
1948         summary += "Closing bugs: "
1949         for bug in bugs:
1950             summary += "%s " % (bug)
1951             if action:
1952                 self.update_subst()
1953                 self.Subst["__BUG_NUMBER__"] = bug
1954                 if self.pkg.changes["distribution"].has_key("stable"):
1955                     self.Subst["__STABLE_WARNING__"] = """
1956 Note that this package is not part of the released stable Debian
1957 distribution.  It may have dependencies on other unreleased software,
1958 or other instabilities.  Please take care if you wish to install it.
1959 The update will eventually make its way into the next released Debian
1960 distribution."""
1961                 else:
1962                     self.Subst["__STABLE_WARNING__"] = ""
1963                 mail_message = utils.TemplateSubst(self.Subst, template)
1964                 utils.send_mail(mail_message)
1965
1966                 # Clear up after ourselves
1967                 del self.Subst["__BUG_NUMBER__"]
1968                 del self.Subst["__STABLE_WARNING__"]
1969
1970         if action and self.logger:
1971             self.logger.log(["closing bugs"] + bugs)
1972
1973         summary += "\n"
1974
1975         return summary
1976
1977     ###########################################################################
1978
1979     def announce(self, short_summary, action):
1980         """
1981         Send an announce mail about a new upload.
1982
1983         @type short_summary: string
1984         @param short_summary: Short summary text to include in the mail
1985
1986         @type action: bool
1987         @param action: Set to false no real action will be done.
1988
1989         @rtype: string
1990         @return: Textstring about action taken.
1991
1992         """
1993
1994         cnf = Config()
1995         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1996
1997         # Only do announcements for source uploads with a recent dpkg-dev installed
1998         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1999            self.pkg.changes["architecture"].has_key("source"):
2000             return ""
2001
2002         lists_done = {}
2003         summary = ""
2004
2005         self.Subst["__SHORT_SUMMARY__"] = short_summary
2006
2007         for dist in self.pkg.changes["distribution"].keys():
2008             suite = get_suite(dist)
2009             if suite is None: continue
2010             announce_list = suite.announce
2011             if announce_list == "" or lists_done.has_key(announce_list):
2012                 continue
2013
2014             lists_done[announce_list] = 1
2015             summary += "Announcing to %s\n" % (announce_list)
2016
2017             if action:
2018                 self.update_subst()
2019                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2020                 if cnf.get("Dinstall::TrackingServer") and \
2021                    self.pkg.changes["architecture"].has_key("source"):
2022                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2023                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2024
2025                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2026                 utils.send_mail(mail_message)
2027
2028                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2029
2030         if cnf.FindB("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"):
2031             summary = self.close_bugs(summary, action)
2032
2033         del self.Subst["__SHORT_SUMMARY__"]
2034
2035         return summary
2036
2037     ###########################################################################
2038     @session_wrapper
2039     def accept (self, summary, short_summary, session=None):
2040         """
2041         Accept an upload.
2042
2043         This moves all files referenced from the .changes into the pool,
2044         sends the accepted mail, announces to lists, closes bugs and
2045         also checks for override disparities. If enabled it will write out
2046         the version history for the BTS Version Tracking and will finally call
2047         L{queue_build}.
2048
2049         @type summary: string
2050         @param summary: Summary text
2051
2052         @type short_summary: string
2053         @param short_summary: Short summary
2054         """
2055
2056         cnf = Config()
2057         stats = SummaryStats()
2058
2059         print "Installing."
2060         self.logger.log(["installing changes", self.pkg.changes_file])
2061
2062         binaries = []
2063         poolfiles = []
2064
2065         # Add the .dsc file to the DB first
2066         for newfile, entry in self.pkg.files.items():
2067             if entry["type"] == "dsc":
2068                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2069                 for j in pfs:
2070                     poolfiles.append(j)
2071
2072         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2073         for newfile, entry in self.pkg.files.items():
2074             if entry["type"] == "deb":
2075                 b, pf = add_deb_to_db(self, newfile, session)
2076                 binaries.append(b)
2077                 poolfiles.append(pf)
2078
2079         # If this is a sourceful diff only upload that is moving
2080         # cross-component we need to copy the .orig files into the new
2081         # component too for the same reasons as above.
2082         # XXX: mhy: I think this should be in add_dsc_to_db
2083         if self.pkg.changes["architecture"].has_key("source"):
2084             for orig_file in self.pkg.orig_files.keys():
2085                 if not self.pkg.orig_files[orig_file].has_key("id"):
2086                     continue # Skip if it's not in the pool
2087                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2088                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2089                     continue # Skip if the location didn't change
2090
2091                 # Do the move
2092                 oldf = get_poolfile_by_id(orig_file_id, session)
2093                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2094                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2095                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2096
2097                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2098
2099                 # TODO: Care about size/md5sum collisions etc
2100                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2101
2102                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2103                 if newf is None:
2104                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2105                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2106
2107                     session.flush()
2108
2109                     # Don't reference the old file from this changes
2110                     for p in poolfiles:
2111                         if p.file_id == oldf.file_id:
2112                             poolfiles.remove(p)
2113
2114                     poolfiles.append(newf)
2115
2116                     # Fix up the DSC references
2117                     toremove = []
2118
2119                     for df in source.srcfiles:
2120                         if df.poolfile.file_id == oldf.file_id:
2121                             # Add a new DSC entry and mark the old one for deletion
2122                             # Don't do it in the loop so we don't change the thing we're iterating over
2123                             newdscf = DSCFile()
2124                             newdscf.source_id = source.source_id
2125                             newdscf.poolfile_id = newf.file_id
2126                             session.add(newdscf)
2127
2128                             toremove.append(df)
2129
2130                     for df in toremove:
2131                         session.delete(df)
2132
2133                     # Flush our changes
2134                     session.flush()
2135
2136                     # Make sure that our source object is up-to-date
2137                     session.expire(source)
2138
2139         # Add changelog information to the database
2140         self.store_changelog()
2141
2142         # Install the files into the pool
2143         for newfile, entry in self.pkg.files.items():
2144             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2145             utils.move(newfile, destination)
2146             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2147             stats.accept_bytes += float(entry["size"])
2148
2149         # Copy the .changes file across for suite which need it.
2150         copy_changes = dict([(x.copychanges, '')
2151                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2152                              if x.copychanges is not None])
2153
2154         for dest in copy_changes.keys():
2155             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2156
2157         # We're done - commit the database changes
2158         session.commit()
2159         # Our SQL session will automatically start a new transaction after
2160         # the last commit
2161
2162         # Now ensure that the metadata has been added
2163         # This has to be done after we copy the files into the pool
2164         # For source if we have it:
2165         if self.pkg.changes["architecture"].has_key("source"):
2166             import_metadata_into_db(source, session)
2167
2168         # Now for any of our binaries
2169         for b in binaries:
2170             import_metadata_into_db(b, session)
2171
2172         session.commit()
2173
2174         # Move the .changes into the 'done' directory
2175         ye, mo, da = time.gmtime()[0:3]
2176         donedir = os.path.join(cnf["Dir::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2177         if not os.path.isdir(donedir):
2178             os.makedirs(donedir)
2179
2180         utils.move(self.pkg.changes_file,
2181                    os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2182
2183         if self.pkg.changes["architecture"].has_key("source"):
2184             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2185
2186         self.update_subst()
2187         self.Subst["__SUMMARY__"] = summary
2188         mail_message = utils.TemplateSubst(self.Subst,
2189                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2190         utils.send_mail(mail_message)
2191         self.announce(short_summary, 1)
2192
2193         ## Helper stuff for DebBugs Version Tracking
2194         if cnf.Find("Dir::BTSVersionTrack"):
2195             if self.pkg.changes["architecture"].has_key("source"):
2196                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2197                 version_history = os.fdopen(fd, 'w')
2198                 version_history.write(self.pkg.dsc["bts changelog"])
2199                 version_history.close()
2200                 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2201                                       self.pkg.changes_file[:-8]+".versions")
2202                 os.rename(temp_filename, filename)
2203                 os.chmod(filename, 0644)
2204
2205             # Write out the binary -> source mapping.
2206             (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2207             debinfo = os.fdopen(fd, 'w')
2208             for name, entry in sorted(self.pkg.files.items()):
2209                 if entry["type"] == "deb":
2210                     line = " ".join([entry["package"], entry["version"],
2211                                      entry["architecture"], entry["source package"],
2212                                      entry["source version"]])
2213                     debinfo.write(line+"\n")
2214             debinfo.close()
2215             filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2216                                   self.pkg.changes_file[:-8]+".debinfo")
2217             os.rename(temp_filename, filename)
2218             os.chmod(filename, 0644)
2219
2220         session.commit()
2221
2222         # Set up our copy queues (e.g. buildd queues)
2223         for suite_name in self.pkg.changes["distribution"].keys():
2224             suite = get_suite(suite_name, session)
2225             for q in suite.copy_queues:
2226                 for f in poolfiles:
2227                     q.add_file_from_pool(f)
2228
2229         session.commit()
2230
2231         # Finally...
2232         stats.accept_count += 1
2233
2234     def check_override(self):
2235         """
2236         Checks override entries for validity. Mails "Override disparity" warnings,
2237         if that feature is enabled.
2238
2239         Abandons the check if
2240           - override disparity checks are disabled
2241           - mail sending is disabled
2242         """
2243
2244         cnf = Config()
2245
2246         # Abandon the check if override disparity checks have been disabled
2247         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2248             return
2249
2250         summary = self.pkg.check_override()
2251
2252         if summary == "":
2253             return
2254
2255         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2256
2257         self.update_subst()
2258         self.Subst["__SUMMARY__"] = summary
2259         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2260         utils.send_mail(mail_message)
2261         del self.Subst["__SUMMARY__"]
2262
2263     ###########################################################################
2264
2265     def remove(self, from_dir=None):
2266         """
2267         Used (for instance) in p-u to remove the package from unchecked
2268
2269         Also removes the package from holding area.
2270         """
2271         if from_dir is None:
2272             from_dir = self.pkg.directory
2273         h = Holding()
2274
2275         for f in self.pkg.files.keys():
2276             os.unlink(os.path.join(from_dir, f))
2277             if os.path.exists(os.path.join(h.holding_dir, f)):
2278                 os.unlink(os.path.join(h.holding_dir, f))
2279
2280         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2281         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2282             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2283
2284     ###########################################################################
2285
2286     def move_to_queue (self, queue):
2287         """
2288         Move files to a destination queue using the permissions in the table
2289         """
2290         h = Holding()
2291         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2292                    queue.path, perms=int(queue.change_perms, 8))
2293         for f in self.pkg.files.keys():
2294             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2295
2296     ###########################################################################
2297
2298     def force_reject(self, reject_files):
2299         """
2300         Forcefully move files from the current directory to the
2301         reject directory.  If any file already exists in the reject
2302         directory it will be moved to the morgue to make way for
2303         the new file.
2304
2305         @type reject_files: dict
2306         @param reject_files: file dictionary
2307
2308         """
2309
2310         cnf = Config()
2311
2312         for file_entry in reject_files:
2313             # Skip any files which don't exist or which we don't have permission to copy.
2314             if os.access(file_entry, os.R_OK) == 0:
2315                 continue
2316
2317             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2318
2319             try:
2320                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2321             except OSError, e:
2322                 # File exists?  Let's find a new name by adding a number
2323                 if e.errno == errno.EEXIST:
2324                     try:
2325                         dest_file = utils.find_next_free(dest_file, 255)
2326                     except NoFreeFilenameError:
2327                         # Something's either gone badly Pete Tong, or
2328                         # someone is trying to exploit us.
2329                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2330                         return
2331
2332                     # Make sure we really got it
2333                     try:
2334                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2335                     except OSError, e:
2336                         # Likewise
2337                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2338                         return
2339                 else:
2340                     raise
2341             # If we got here, we own the destination file, so we can
2342             # safely overwrite it.
2343             utils.move(file_entry, dest_file, 1, perms=0660)
2344             os.close(dest_fd)
2345
2346     ###########################################################################
2347     def do_reject (self, manual=0, reject_message="", notes=""):
2348         """
2349         Reject an upload. If called without a reject message or C{manual} is
2350         true, spawn an editor so the user can write one.
2351
2352         @type manual: bool
2353         @param manual: manual or automated rejection
2354
2355         @type reject_message: string
2356         @param reject_message: A reject message
2357
2358         @return: 0
2359
2360         """
2361         # If we weren't given a manual rejection message, spawn an
2362         # editor so the user can add one in...
2363         if manual and not reject_message:
2364             (fd, temp_filename) = utils.temp_filename()
2365             temp_file = os.fdopen(fd, 'w')
2366             if len(notes) > 0:
2367                 for note in notes:
2368                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2369                                     % (note.author, note.version, note.notedate, note.comment))
2370             temp_file.close()
2371             editor = os.environ.get("EDITOR","vi")
2372             answer = 'E'
2373             while answer == 'E':
2374                 os.system("%s %s" % (editor, temp_filename))
2375                 temp_fh = utils.open_file(temp_filename)
2376                 reject_message = "".join(temp_fh.readlines())
2377                 temp_fh.close()
2378                 print "Reject message:"
2379                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2380                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2381                 answer = "XXX"
2382                 while prompt.find(answer) == -1:
2383                     answer = utils.our_raw_input(prompt)
2384                     m = re_default_answer.search(prompt)
2385                     if answer == "":
2386                         answer = m.group(1)
2387                     answer = answer[:1].upper()
2388             os.unlink(temp_filename)
2389             if answer == 'A':
2390                 return 1
2391             elif answer == 'Q':
2392                 sys.exit(0)
2393
2394         print "Rejecting.\n"
2395
2396         cnf = Config()
2397
2398         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2399         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2400
2401         # Move all the files into the reject directory
2402         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2403         self.force_reject(reject_files)
2404
2405         # If we fail here someone is probably trying to exploit the race
2406         # so let's just raise an exception ...
2407         if os.path.exists(reason_filename):
2408             os.unlink(reason_filename)
2409         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2410
2411         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2412
2413         self.update_subst()
2414         if not manual:
2415             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2416             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2417             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2418             os.write(reason_fd, reject_message)
2419             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2420         else:
2421             # Build up the rejection email
2422             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2423             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2424             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2425             self.Subst["__REJECT_MESSAGE__"] = ""
2426             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2427             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2428             # Write the rejection email out as the <foo>.reason file
2429             os.write(reason_fd, reject_mail_message)
2430
2431         del self.Subst["__REJECTOR_ADDRESS__"]
2432         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2433         del self.Subst["__CC__"]
2434
2435         os.close(reason_fd)
2436
2437         # Send the rejection mail
2438         utils.send_mail(reject_mail_message)
2439
2440         if self.logger:
2441             self.logger.log(["rejected", self.pkg.changes_file])
2442
2443         stats = SummaryStats()
2444         stats.reject_count += 1
2445         return 0
2446
2447     ################################################################################
2448     def in_override_p(self, package, component, suite, binary_type, filename, session):
2449         """
2450         Check if a package already has override entries in the DB
2451
2452         @type package: string
2453         @param package: package name
2454
2455         @type component: string
2456         @param component: database id of the component
2457
2458         @type suite: int
2459         @param suite: database id of the suite
2460
2461         @type binary_type: string
2462         @param binary_type: type of the package
2463
2464         @type filename: string
2465         @param filename: filename we check
2466
2467         @return: the database result. But noone cares anyway.
2468
2469         """
2470
2471         cnf = Config()
2472
2473         if binary_type == "": # must be source
2474             file_type = "dsc"
2475         else:
2476             file_type = binary_type
2477
2478         # Override suite name; used for example with proposed-updates
2479         oldsuite = get_suite(suite, session)
2480         if (not oldsuite is None) and oldsuite.overridesuite:
2481             suite = oldsuite.overridesuite
2482
2483         result = get_override(package, suite, component, file_type, session)
2484
2485         # If checking for a source package fall back on the binary override type
2486         if file_type == "dsc" and len(result) < 1:
2487             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2488
2489         # Remember the section and priority so we can check them later if appropriate
2490         if len(result) > 0:
2491             result = result[0]
2492             self.pkg.files[filename]["override section"] = result.section.section
2493             self.pkg.files[filename]["override priority"] = result.priority.priority
2494             return result
2495
2496         return None
2497
2498     ################################################################################
2499     def get_anyversion(self, sv_list, suite):
2500         """
2501         @type sv_list: list
2502         @param sv_list: list of (suite, version) tuples to check
2503
2504         @type suite: string
2505         @param suite: suite name
2506
2507         Description: TODO
2508         """
2509         Cnf = Config()
2510         anyversion = None
2511         anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2512         for (s, v) in sv_list:
2513             if s in [ x.lower() for x in anysuite ]:
2514                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2515                     anyversion = v
2516
2517         return anyversion
2518
2519     ################################################################################
2520
2521     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2522         """
2523         @type sv_list: list
2524         @param sv_list: list of (suite, version) tuples to check
2525
2526         @type filename: string
2527         @param filename: XXX
2528
2529         @type new_version: string
2530         @param new_version: XXX
2531
2532         Ensure versions are newer than existing packages in target
2533         suites and that cross-suite version checking rules as
2534         set out in the conf file are satisfied.
2535         """
2536
2537         cnf = Config()
2538
2539         # Check versions for each target suite
2540         for target_suite in self.pkg.changes["distribution"].keys():
2541             # Check we can find the target suite
2542             ts = get_suite(target_suite)
2543             if ts is None:
2544                 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2545                 continue
2546
2547             must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2548             must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2549
2550             # Enforce "must be newer than target suite" even if conffile omits it
2551             if target_suite not in must_be_newer_than:
2552                 must_be_newer_than.append(target_suite)
2553
2554             for (suite, existent_version) in sv_list:
2555                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2556
2557                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2558                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2559
2560                 if suite in must_be_older_than and vercmp > -1:
2561                     cansave = 0
2562
2563                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2564                         # we really use the other suite, ignoring the conflicting one ...
2565                         addsuite = self.pkg.changes["distribution-version"][suite]
2566
2567                         add_version = self.get_anyversion(sv_list, addsuite)
2568                         target_version = self.get_anyversion(sv_list, target_suite)
2569
2570                         if not add_version:
2571                             # not add_version can only happen if we map to a suite
2572                             # that doesn't enhance the suite we're propup'ing from.
2573                             # so "propup-ver x a b c; map a d" is a problem only if
2574                             # d doesn't enhance a.
2575                             #
2576                             # i think we could always propagate in this case, rather
2577                             # than complaining. either way, this isn't a REJECT issue
2578                             #
2579                             # And - we really should complain to the dorks who configured dak
2580                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2581                             self.pkg.changes.setdefault("propdistribution", {})
2582                             self.pkg.changes["propdistribution"][addsuite] = 1
2583                             cansave = 1
2584                         elif not target_version:
2585                             # not targets_version is true when the package is NEW
2586                             # we could just stick with the "...old version..." REJECT
2587                             # for this, I think.
2588                             self.rejects.append("Won't propogate NEW packages.")
2589                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2590                             # propogation would be redundant. no need to reject though.
2591                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2592                             cansave = 1
2593                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2594                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2595                             # propogate!!
2596                             self.warnings.append("Propogating upload to %s" % (addsuite))
2597                             self.pkg.changes.setdefault("propdistribution", {})
2598                             self.pkg.changes["propdistribution"][addsuite] = 1
2599                             cansave = 1
2600
2601                     if not cansave:
2602                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2603
2604     ################################################################################
2605     def check_binary_against_db(self, filename, session):
2606         # Ensure version is sane
2607         self.cross_suite_version_check( \
2608             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2609                 self.pkg.files[filename]["architecture"], session),
2610             filename, self.pkg.files[filename]["version"], sourceful=False)
2611
2612         # Check for any existing copies of the file
2613         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2614         q = q.filter_by(version=self.pkg.files[filename]["version"])
2615         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2616
2617         if q.count() > 0:
2618             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2619
2620     ################################################################################
2621
2622     def check_source_against_db(self, filename, session):
2623         source = self.pkg.dsc.get("source")
2624         version = self.pkg.dsc.get("version")
2625
2626         # Ensure version is sane
2627         self.cross_suite_version_check( \
2628             get_suite_version_by_source(source, session), filename, version,
2629             sourceful=True)
2630
2631     ################################################################################
2632     def check_dsc_against_db(self, filename, session):
2633         """
2634
2635         @warning: NB: this function can remove entries from the 'files' index [if
2636          the orig tarball is a duplicate of the one in the archive]; if
2637          you're iterating over 'files' and call this function as part of
2638          the loop, be sure to add a check to the top of the loop to
2639          ensure you haven't just tried to dereference the deleted entry.
2640
2641         """
2642
2643         Cnf = Config()
2644         self.pkg.orig_files = {} # XXX: do we need to clear it?
2645         orig_files = self.pkg.orig_files
2646
2647         # Try and find all files mentioned in the .dsc.  This has
2648         # to work harder to cope with the multiple possible
2649         # locations of an .orig.tar.gz.
2650         # The ordering on the select is needed to pick the newest orig
2651         # when it exists in multiple places.
2652         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2653             found = None
2654             if self.pkg.files.has_key(dsc_name):
2655                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2656                 actual_size = int(self.pkg.files[dsc_name]["size"])
2657                 found = "%s in incoming" % (dsc_name)
2658
2659                 # Check the file does not already exist in the archive
2660                 ql = get_poolfile_like_name(dsc_name, session)
2661
2662                 # Strip out anything that isn't '%s' or '/%s$'
2663                 for i in ql:
2664                     if not i.filename.endswith(dsc_name):
2665                         ql.remove(i)
2666
2667                 # "[dak] has not broken them.  [dak] has fixed a
2668                 # brokenness.  Your crappy hack exploited a bug in
2669                 # the old dinstall.
2670                 #
2671                 # "(Come on!  I thought it was always obvious that
2672                 # one just doesn't release different files with
2673                 # the same name and version.)"
2674                 #                        -- ajk@ on d-devel@l.d.o
2675
2676                 if len(ql) > 0:
2677                     # Ignore exact matches for .orig.tar.gz
2678                     match = 0
2679                     if re_is_orig_source.match(dsc_name):
2680                         for i in ql:
2681                             if self.pkg.files.has_key(dsc_name) and \
2682                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2683                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2684                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2685                                 # TODO: Don't delete the entry, just mark it as not needed
2686                                 # This would fix the stupidity of changing something we often iterate over
2687                                 # whilst we're doing it
2688                                 del self.pkg.files[dsc_name]
2689                                 dsc_entry["files id"] = i.file_id
2690                                 if not orig_files.has_key(dsc_name):
2691                                     orig_files[dsc_name] = {}
2692                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2693                                 match = 1
2694
2695                                 # Don't bitch that we couldn't find this file later
2696                                 try:
2697                                     self.later_check_files.remove(dsc_name)
2698                                 except ValueError:
2699                                     pass
2700
2701
2702                     if not match:
2703                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2704
2705             elif re_is_orig_source.match(dsc_name):
2706                 # Check in the pool
2707                 ql = get_poolfile_like_name(dsc_name, session)
2708
2709                 # Strip out anything that isn't '%s' or '/%s$'
2710                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2711                 for i in ql:
2712                     if not i.filename.endswith(dsc_name):
2713                         ql.remove(i)
2714
2715                 if len(ql) > 0:
2716                     # Unfortunately, we may get more than one match here if,
2717                     # for example, the package was in potato but had an -sa
2718                     # upload in woody.  So we need to choose the right one.
2719
2720                     # default to something sane in case we don't match any or have only one
2721                     x = ql[0]
2722
2723                     if len(ql) > 1:
2724                         for i in ql:
2725                             old_file = os.path.join(i.location.path, i.filename)
2726                             old_file_fh = utils.open_file(old_file)
2727                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2728                             old_file_fh.close()
2729                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2730                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2731                                 x = i
2732
2733                     old_file = os.path.join(i.location.path, i.filename)
2734                     old_file_fh = utils.open_file(old_file)
2735                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2736                     old_file_fh.close()
2737                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2738                     found = old_file
2739                     suite_type = x.location.archive_type
2740                     # need this for updating dsc_files in install()
2741                     dsc_entry["files id"] = x.file_id
2742                     # See install() in process-accepted...
2743                     if not orig_files.has_key(dsc_name):
2744                         orig_files[dsc_name] = {}
2745                     orig_files[dsc_name]["id"] = x.file_id
2746                     orig_files[dsc_name]["path"] = old_file
2747                     orig_files[dsc_name]["location"] = x.location.location_id
2748                 else:
2749                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2750                     # Not there? Check the queue directories...
2751                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2752                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2753                             continue
2754                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2755                         if os.path.exists(in_otherdir):
2756                             in_otherdir_fh = utils.open_file(in_otherdir)
2757                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2758                             in_otherdir_fh.close()
2759                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2760                             found = in_otherdir
2761                             if not orig_files.has_key(dsc_name):
2762                                 orig_files[dsc_name] = {}
2763                             orig_files[dsc_name]["path"] = in_otherdir
2764
2765                     if not found:
2766                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2767                         continue
2768             else:
2769                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2770                 continue
2771             if actual_md5 != dsc_entry["md5sum"]:
2772                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2773             if actual_size != int(dsc_entry["size"]):
2774                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2775
2776     ################################################################################
2777     # This is used by process-new and process-holding to recheck a changes file
2778     # at the time we're running.  It mainly wraps various other internal functions
2779     # and is similar to accepted_checks - these should probably be tidied up
2780     # and combined
2781     def recheck(self, session):
2782         cnf = Config()
2783         for f in self.pkg.files.keys():
2784             # The .orig.tar.gz can disappear out from under us is it's a
2785             # duplicate of one in the archive.
2786             if not self.pkg.files.has_key(f):
2787                 continue
2788
2789             entry = self.pkg.files[f]
2790
2791             # Check that the source still exists
2792             if entry["type"] == "deb":
2793                 source_version = entry["source version"]
2794                 source_package = entry["source package"]
2795                 if not self.pkg.changes["architecture"].has_key("source") \
2796                    and not source_exists(source_package, source_version, \
2797                     suites = self.pkg.changes["distribution"].keys(), session = session):
2798                     source_epochless_version = re_no_epoch.sub('', source_version)
2799                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2800                     found = False
2801                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2802                         if cnf.has_key("Dir::Queue::%s" % (q)):
2803                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2804                                 found = True
2805                     if not found:
2806                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2807
2808             # Version and file overwrite checks
2809             if entry["type"] == "deb":
2810                 self.check_binary_against_db(f, session)
2811             elif entry["type"] == "dsc":
2812                 self.check_source_against_db(f, session)
2813                 self.check_dsc_against_db(f, session)
2814
2815     ################################################################################
2816     def accepted_checks(self, overwrite_checks, session):
2817         # Recheck anything that relies on the database; since that's not
2818         # frozen between accept and our run time when called from p-a.
2819
2820         # overwrite_checks is set to False when installing to stable/oldstable
2821
2822         propogate={}
2823         nopropogate={}
2824
2825         # Find the .dsc (again)
2826         dsc_filename = None
2827         for f in self.pkg.files.keys():
2828             if self.pkg.files[f]["type"] == "dsc":
2829                 dsc_filename = f
2830
2831         for checkfile in self.pkg.files.keys():
2832             # The .orig.tar.gz can disappear out from under us is it's a
2833             # duplicate of one in the archive.
2834             if not self.pkg.files.has_key(checkfile):
2835                 continue
2836
2837             entry = self.pkg.files[checkfile]
2838
2839             # Check that the source still exists
2840             if entry["type"] == "deb":
2841                 source_version = entry["source version"]
2842                 source_package = entry["source package"]
2843                 if not self.pkg.changes["architecture"].has_key("source") \
2844                    and not source_exists(source_package, source_version, \
2845                     suites = self.pkg.changes["distribution"].keys(), \
2846                     session = session):
2847                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2848
2849             # Version and file overwrite checks
2850             if overwrite_checks:
2851                 if entry["type"] == "deb":
2852                     self.check_binary_against_db(checkfile, session)
2853                 elif entry["type"] == "dsc":
2854                     self.check_source_against_db(checkfile, session)
2855                     self.check_dsc_against_db(dsc_filename, session)
2856
2857             # propogate in the case it is in the override tables:
2858             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2859                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2860                     propogate[suite] = 1
2861                 else:
2862                     nopropogate[suite] = 1
2863
2864         for suite in propogate.keys():
2865             if suite in nopropogate:
2866                 continue
2867             self.pkg.changes["distribution"][suite] = 1
2868
2869         for checkfile in self.pkg.files.keys():
2870             # Check the package is still in the override tables
2871             for suite in self.pkg.changes["distribution"].keys():
2872                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2873                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2874
2875     ################################################################################
2876     # If any file of an upload has a recent mtime then chances are good
2877     # the file is still being uploaded.
2878
2879     def upload_too_new(self):
2880         cnf = Config()
2881         too_new = False
2882         # Move back to the original directory to get accurate time stamps
2883         cwd = os.getcwd()
2884         os.chdir(self.pkg.directory)
2885         file_list = self.pkg.files.keys()
2886         file_list.extend(self.pkg.dsc_files.keys())
2887         file_list.append(self.pkg.changes_file)
2888         for f in file_list:
2889             try:
2890                 last_modified = time.time()-os.path.getmtime(f)
2891                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2892                     too_new = True
2893                     break
2894             except:
2895                 pass
2896
2897         os.chdir(cwd)
2898         return too_new
2899
2900     def store_changelog(self):
2901
2902         # Skip binary-only upload if it is not a bin-NMU
2903         if not self.pkg.changes['architecture'].has_key('source'):
2904             from daklib.regexes import re_bin_only_nmu
2905             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2906                 return
2907
2908         session = DBConn().session()
2909
2910         # Check if upload already has a changelog entry
2911         query = """SELECT changelog_id FROM changes WHERE source = :source
2912                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2913         if session.execute(query, {'source': self.pkg.changes['source'], \
2914                                    'version': self.pkg.changes['version'], \
2915                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2916             session.commit()
2917             return
2918
2919         # Add current changelog text into changelogs_text table, return created ID
2920         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2921         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2922
2923         # Link ID to the upload available in changes table
2924         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2925                    AND version = :version AND architecture = :architecture"""
2926         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2927                                 'version': self.pkg.changes['version'], \
2928                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2929
2930         session.commit()