3 """ Checks Debian packages from Incoming """
4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 2 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 # Originally based on dinstall by Guy Maor <maor@debian.org>
22 ################################################################################
24 # Computer games don't affect kids. I mean if Pacman affected our generation as
25 # kids, we'd all run around in a darkened room munching pills and listening to
29 ################################################################################
44 from debian_bundle import deb822
45 from daklib.dbconn import DBConn
46 from daklib.binary import Binary
47 from daklib import logging
48 from daklib import queue
49 from daklib import utils
50 from daklib.dak_exceptions import *
51 from daklib.regexes import re_valid_version, re_valid_pkg_name, re_changelog_versions, \
52 re_strip_revision, re_strip_srcver, re_spacestrip, \
53 re_isanum, re_no_epoch, re_no_revision, re_taint_free, \
54 re_isadeb, re_extract_src_version, re_issource, re_default_answer
58 ################################################################################
61 ################################################################################
72 # Aliases to the real vars in the Upload class; hysterical raisins.
80 ###############################################################################
83 global Cnf, Options, Upload, changes, dsc, dsc_files, files, pkg
87 Cnf = apt_pkg.newConfiguration()
88 apt_pkg.ReadConfigFileISC(Cnf,utils.which_conf_file())
90 Arguments = [('a',"automatic","Dinstall::Options::Automatic"),
91 ('h',"help","Dinstall::Options::Help"),
92 ('n',"no-action","Dinstall::Options::No-Action"),
93 ('p',"no-lock", "Dinstall::Options::No-Lock"),
94 ('s',"no-mail", "Dinstall::Options::No-Mail"),
95 ('d',"directory", "Dinstall::Options::Directory", "HasArg")]
97 for i in ["automatic", "help", "no-action", "no-lock", "no-mail",
98 "override-distribution", "version", "directory"]:
99 Cnf["Dinstall::Options::%s" % (i)] = ""
101 changes_files = apt_pkg.ParseCommandLine(Cnf,Arguments,sys.argv)
102 Options = Cnf.SubTree("Dinstall::Options")
107 # If we have a directory flag, use it to find our files
108 if Cnf["Dinstall::Options::Directory"] != "":
109 # Note that we clobber the list of files we were given in this case
110 # so warn if the user has done both
111 if len(changes_files) > 0:
112 utils.warn("Directory provided so ignoring files given on command line")
114 changes_files = utils.get_changes_files(Cnf["Dinstall::Options::Directory"])
116 Upload = queue.Upload(Cnf)
118 changes = Upload.pkg.changes
120 dsc_files = Upload.pkg.dsc_files
121 files = Upload.pkg.files
126 ################################################################################
128 def usage (exit_code=0):
129 print """Usage: dinstall [OPTION]... [CHANGES]...
130 -a, --automatic automatic run
131 -h, --help show this help and exit.
132 -n, --no-action don't do anything
133 -p, --no-lock don't check lockfile !! for cron.daily only !!
134 -s, --no-mail don't send any mail
135 -V, --version display the version number and exit"""
138 ################################################################################
140 def reject (str, prefix="Rejected: "):
141 global reject_message
143 reject_message += prefix + str + "\n"
145 ################################################################################
147 def copy_to_holding(filename):
150 base_filename = os.path.basename(filename)
152 dest = Cnf["Dir::Queue::Holding"] + '/' + base_filename
154 fd = os.open(dest, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0640)
157 # Shouldn't happen, but will if, for example, someone lists a
158 # file twice in the .changes.
159 if errno.errorcode[e.errno] == 'EEXIST':
160 reject("%s: already exists in holding area; can not overwrite." % (base_filename))
165 shutil.copy(filename, dest)
167 # In either case (ENOENT or EACCES) we want to remove the
168 # O_CREAT | O_EXCLed ghost file, so add the file to the list
169 # of 'in holding' even if it's not the real file.
170 if errno.errorcode[e.errno] == 'ENOENT':
171 reject("%s: can not copy to holding area: file not found." % (base_filename))
174 elif errno.errorcode[e.errno] == 'EACCES':
175 reject("%s: can not copy to holding area: read permission denied." % (base_filename))
180 in_holding[base_filename] = ""
182 ################################################################################
188 os.chdir(Cnf["Dir::Queue::Holding"])
189 for f in in_holding.keys():
190 if os.path.exists(f):
191 if f.find('/') != -1:
192 utils.fubar("WTF? clean_holding() got a file ('%s') with / in it!" % (f))
198 ################################################################################
201 filename = pkg.changes_file
203 # Parse the .changes field into a dictionary
205 changes.update(utils.parse_changes(filename))
206 except CantOpenError:
207 reject("%s: can't read file." % (filename))
209 except ParseChangesError, line:
210 reject("%s: parse error, can't grok: %s." % (filename, line))
212 except ChangesUnicodeError:
213 reject("%s: changes file not proper utf-8" % (filename))
216 # Parse the Files field from the .changes into another dictionary
218 files.update(utils.build_file_list(changes))
219 except ParseChangesError, line:
220 reject("%s: parse error, can't grok: %s." % (filename, line))
221 except UnknownFormatError, format:
222 reject("%s: unknown format '%s'." % (filename, format))
225 # Check for mandatory fields
226 for i in ("source", "binary", "architecture", "version", "distribution",
227 "maintainer", "files", "changes", "description"):
228 if not changes.has_key(i):
229 reject("%s: Missing mandatory field `%s'." % (filename, i))
230 return 0 # Avoid <undef> errors during later tests
232 # Strip a source version in brackets from the source field
233 if re_strip_srcver.search(changes["source"]):
234 changes["source"] = re_strip_srcver.sub('', changes["source"])
236 # Ensure the source field is a valid package name.
237 if not re_valid_pkg_name.match(changes["source"]):
238 reject("%s: invalid source name '%s'." % (filename, changes["source"]))
240 # Split multi-value fields into a lower-level dictionary
241 for i in ("architecture", "distribution", "binary", "closes"):
242 o = changes.get(i, "")
249 # Fix the Maintainer: field to be RFC822/2047 compatible
251 (changes["maintainer822"], changes["maintainer2047"],
252 changes["maintainername"], changes["maintaineremail"]) = \
253 utils.fix_maintainer (changes["maintainer"])
254 except ParseMaintError, msg:
255 reject("%s: Maintainer field ('%s') failed to parse: %s" \
256 % (filename, changes["maintainer"], msg))
258 # ...likewise for the Changed-By: field if it exists.
260 (changes["changedby822"], changes["changedby2047"],
261 changes["changedbyname"], changes["changedbyemail"]) = \
262 utils.fix_maintainer (changes.get("changed-by", ""))
263 except ParseMaintError, msg:
264 (changes["changedby822"], changes["changedby2047"],
265 changes["changedbyname"], changes["changedbyemail"]) = \
267 reject("%s: Changed-By field ('%s') failed to parse: %s" \
268 % (filename, changes["changed-by"], msg))
270 # Ensure all the values in Closes: are numbers
271 if changes.has_key("closes"):
272 for i in changes["closes"].keys():
273 if re_isanum.match (i) == None:
274 reject("%s: `%s' from Closes field isn't a number." % (filename, i))
277 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
278 changes["chopversion"] = re_no_epoch.sub('', changes["version"])
279 changes["chopversion2"] = re_no_revision.sub('', changes["chopversion"])
281 # Check there isn't already a changes file of the same name in one
282 # of the queue directories.
283 base_filename = os.path.basename(filename)
284 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
285 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ]+'/'+base_filename):
286 reject("%s: a file with this name already exists in the %s directory." % (base_filename, d))
288 # Check the .changes is non-empty
290 reject("%s: nothing to do (Files field is empty)." % (base_filename))
295 ################################################################################
297 def check_distributions():
298 "Check and map the Distribution field of a .changes file."
300 # Handle suite mappings
301 for m in Cnf.ValueList("SuiteMappings"):
304 if mtype == "map" or mtype == "silent-map":
305 (source, dest) = args[1:3]
306 if changes["distribution"].has_key(source):
307 del changes["distribution"][source]
308 changes["distribution"][dest] = 1
309 if mtype != "silent-map":
310 reject("Mapping %s to %s." % (source, dest),"")
311 if changes.has_key("distribution-version"):
312 if changes["distribution-version"].has_key(source):
313 changes["distribution-version"][source]=dest
314 elif mtype == "map-unreleased":
315 (source, dest) = args[1:3]
316 if changes["distribution"].has_key(source):
317 for arch in changes["architecture"].keys():
318 if arch not in DBConn().get_suite_architectures(source):
319 reject("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch),"")
320 del changes["distribution"][source]
321 changes["distribution"][dest] = 1
323 elif mtype == "ignore":
325 if changes["distribution"].has_key(suite):
326 del changes["distribution"][suite]
327 reject("Ignoring %s as a target suite." % (suite), "Warning: ")
328 elif mtype == "reject":
330 if changes["distribution"].has_key(suite):
331 reject("Uploads to %s are not accepted." % (suite))
332 elif mtype == "propup-version":
333 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
335 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
336 if changes["distribution"].has_key(args[1]):
337 changes.setdefault("distribution-version", {})
338 for suite in args[2:]: changes["distribution-version"][suite]=suite
340 # Ensure there is (still) a target distribution
341 if changes["distribution"].keys() == []:
342 reject("no valid distribution.")
344 # Ensure target distributions exist
345 for suite in changes["distribution"].keys():
346 if not Cnf.has_key("Suite::%s" % (suite)):
347 reject("Unknown distribution `%s'." % (suite))
349 ################################################################################
354 archive = utils.where_am_i()
355 file_keys = files.keys()
357 # if reprocess is 2 we've already done this and we're checking
358 # things again for the new .orig.tar.gz.
359 # [Yes, I'm fully aware of how disgusting this is]
360 if not Options["No-Action"] and reprocess < 2:
362 os.chdir(pkg.directory)
367 # Check there isn't already a .changes or .dak file of the same name in
368 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
369 # [NB: this check must be done post-suite mapping]
370 base_filename = os.path.basename(pkg.changes_file)
371 dot_dak_filename = base_filename[:-8]+".dak"
372 for suite in changes["distribution"].keys():
373 copychanges = "Suite::%s::CopyChanges" % (suite)
374 if Cnf.has_key(copychanges) and \
375 os.path.exists(Cnf[copychanges]+"/"+base_filename):
376 reject("%s: a file with this name already exists in %s" \
377 % (base_filename, Cnf[copychanges]))
379 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
380 if Cnf.has_key(copy_dot_dak) and \
381 os.path.exists(Cnf[copy_dot_dak]+"/"+dot_dak_filename):
382 reject("%s: a file with this name already exists in %s" \
383 % (dot_dak_filename, Cnf[copy_dot_dak]))
389 cursor = DBConn().cursor()
390 # Check for packages that have moved from one component to another
391 # STU: this should probably be changed to not join on architecture, suite tables but instead to used their cached name->id mappings from DBConn
392 cursor.execute("""PREPARE moved_pkg_q(text,text,text) AS
393 SELECT c.name FROM binaries b, bin_associations ba, suite s, location l,
394 component c, architecture a, files f
395 WHERE b.package = $1 AND s.suite_name = $2
396 AND (a.arch_string = $3 OR a.arch_string = 'all')
397 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
398 AND f.location = l.id
399 AND l.component = c.id
400 AND b.file = f.id""")
403 # Ensure the file does not already exist in one of the accepted directories
404 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
405 if not Cnf.has_key("Dir::Queue::%s" % (d)): continue
406 if os.path.exists(Cnf["Dir::Queue::%s" % (d) ] + '/' + f):
407 reject("%s file already exists in the %s directory." % (f, d))
408 if not re_taint_free.match(f):
409 reject("!!WARNING!! tainted filename: '%s'." % (f))
410 # Check the file is readable
411 if os.access(f, os.R_OK) == 0:
412 # When running in -n, copy_to_holding() won't have
413 # generated the reject_message, so we need to.
414 if Options["No-Action"]:
415 if os.path.exists(f):
416 reject("Can't read `%s'. [permission denied]" % (f))
418 reject("Can't read `%s'. [file not found]" % (f))
419 files[f]["type"] = "unreadable"
421 # If it's byhand skip remaining checks
422 if files[f]["section"] == "byhand" or files[f]["section"][:4] == "raw-":
423 files[f]["byhand"] = 1
424 files[f]["type"] = "byhand"
425 # Checks for a binary package...
426 elif re_isadeb.match(f):
428 files[f]["type"] = "deb"
430 # Extract package control information
431 deb_file = utils.open_file(f)
433 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
435 reject("%s: debExtractControl() raised %s." % (f, sys.exc_type))
437 # Can't continue, none of the checks on control would work.
441 # Check for mandatory fields
442 for field in [ "Package", "Architecture", "Version" ]:
443 if control.Find(field) == None:
444 reject("%s: No %s field in control." % (f, field))
448 # Ensure the package name matches the one give in the .changes
449 if not changes["binary"].has_key(control.Find("Package", "")):
450 reject("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
452 # Validate the package field
453 package = control.Find("Package")
454 if not re_valid_pkg_name.match(package):
455 reject("%s: invalid package name '%s'." % (f, package))
457 # Validate the version field
458 version = control.Find("Version")
459 if not re_valid_version.match(version):
460 reject("%s: invalid version number '%s'." % (f, version))
462 # Ensure the architecture of the .deb is one we know about.
463 default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable")
464 architecture = control.Find("Architecture")
465 upload_suite = changes["distribution"].keys()[0]
466 if architecture not in DBConn().get_suite_architectures(default_suite) and architecture not in DBConn().get_suite_architectures(upload_suite):
467 reject("Unknown architecture '%s'." % (architecture))
469 # Ensure the architecture of the .deb is one of the ones
470 # listed in the .changes.
471 if not changes["architecture"].has_key(architecture):
472 reject("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
474 # Sanity-check the Depends field
475 depends = control.Find("Depends")
477 reject("%s: Depends field is empty." % (f))
479 # Sanity-check the Provides field
480 provides = control.Find("Provides")
482 provide = re_spacestrip.sub('', provides)
484 reject("%s: Provides field is empty." % (f))
485 prov_list = provide.split(",")
486 for prov in prov_list:
487 if not re_valid_pkg_name.match(prov):
488 reject("%s: Invalid Provides field content %s." % (f, prov))
491 # Check the section & priority match those given in the .changes (non-fatal)
492 if control.Find("Section") and files[f]["section"] != "" and files[f]["section"] != control.Find("Section"):
493 reject("%s control file lists section as `%s', but changes file has `%s'." % (f, control.Find("Section", ""), files[f]["section"]), "Warning: ")
494 if control.Find("Priority") and files[f]["priority"] != "" and files[f]["priority"] != control.Find("Priority"):
495 reject("%s control file lists priority as `%s', but changes file has `%s'." % (f, control.Find("Priority", ""), files[f]["priority"]),"Warning: ")
497 files[f]["package"] = package
498 files[f]["architecture"] = architecture
499 files[f]["version"] = version
500 files[f]["maintainer"] = control.Find("Maintainer", "")
501 if f.endswith(".udeb"):
502 files[f]["dbtype"] = "udeb"
503 elif f.endswith(".deb"):
504 files[f]["dbtype"] = "deb"
506 reject("%s is neither a .deb or a .udeb." % (f))
507 files[f]["source"] = control.Find("Source", files[f]["package"])
508 # Get the source version
509 source = files[f]["source"]
511 if source.find("(") != -1:
512 m = re_extract_src_version.match(source)
514 source_version = m.group(2)
515 if not source_version:
516 source_version = files[f]["version"]
517 files[f]["source package"] = source
518 files[f]["source version"] = source_version
520 # Ensure the filename matches the contents of the .deb
521 m = re_isadeb.match(f)
523 file_package = m.group(1)
524 if files[f]["package"] != file_package:
525 reject("%s: package part of filename (%s) does not match package name in the %s (%s)." % (f, file_package, files[f]["dbtype"], files[f]["package"]))
526 epochless_version = re_no_epoch.sub('', control.Find("Version"))
528 file_version = m.group(2)
529 if epochless_version != file_version:
530 reject("%s: version part of filename (%s) does not match package version in the %s (%s)." % (f, file_version, files[f]["dbtype"], epochless_version))
532 file_architecture = m.group(3)
533 if files[f]["architecture"] != file_architecture:
534 reject("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % (f, file_architecture, files[f]["dbtype"], files[f]["architecture"]))
536 # Check for existent source
537 source_version = files[f]["source version"]
538 source_package = files[f]["source package"]
539 if changes["architecture"].has_key("source"):
540 if source_version != changes["version"]:
541 reject("source version (%s) for %s doesn't match changes version %s." % (source_version, f, changes["version"]))
543 # Check in the SQL database
544 if not Upload.source_exists(source_package, source_version, changes["distribution"].keys()):
545 # Check in one of the other directories
546 source_epochless_version = re_no_epoch.sub('', source_version)
547 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
548 if os.path.exists(Cnf["Dir::Queue::Byhand"] + '/' + dsc_filename):
549 files[f]["byhand"] = 1
550 elif os.path.exists(Cnf["Dir::Queue::New"] + '/' + dsc_filename):
554 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
555 if Cnf.has_key("Dir::Queue::%s" % (myq)):
556 if os.path.exists(Cnf["Dir::Queue::"+myq] + '/' + dsc_filename):
559 if not dsc_file_exists:
560 reject("no source found for %s %s (%s)." % (source_package, source_version, f))
561 # Check the version and for file overwrites
562 reject(Upload.check_binary_against_db(f),"")
564 Binary(f, reject).scan_package( )
566 # Checks for a source package...
568 m = re_issource.match(f)
571 files[f]["package"] = m.group(1)
572 files[f]["version"] = m.group(2)
573 files[f]["type"] = m.group(3)
575 # Ensure the source package name matches the Source filed in the .changes
576 if changes["source"] != files[f]["package"]:
577 reject("%s: changes file doesn't say %s for Source" % (f, files[f]["package"]))
579 # Ensure the source version matches the version in the .changes file
580 if files[f]["type"] == "orig.tar.gz":
581 changes_version = changes["chopversion2"]
583 changes_version = changes["chopversion"]
584 if changes_version != files[f]["version"]:
585 reject("%s: should be %s according to changes file." % (f, changes_version))
587 # Ensure the .changes lists source in the Architecture field
588 if not changes["architecture"].has_key("source"):
589 reject("%s: changes file doesn't list `source' in Architecture field." % (f))
591 # Check the signature of a .dsc file
592 if files[f]["type"] == "dsc":
593 dsc["fingerprint"] = utils.check_signature(f, reject)
595 files[f]["architecture"] = "source"
597 # Not a binary or source package? Assume byhand...
599 files[f]["byhand"] = 1
600 files[f]["type"] = "byhand"
602 # Per-suite file checks
603 files[f]["oldfiles"] = {}
604 for suite in changes["distribution"].keys():
606 if files[f].has_key("byhand"):
609 # Handle component mappings
610 for m in Cnf.ValueList("ComponentMappings"):
611 (source, dest) = m.split()
612 if files[f]["component"] == source:
613 files[f]["original component"] = source
614 files[f]["component"] = dest
616 # Ensure the component is valid for the target suite
617 if Cnf.has_key("Suite:%s::Components" % (suite)) and \
618 files[f]["component"] not in Cnf.ValueList("Suite::%s::Components" % (suite)):
619 reject("unknown component `%s' for suite `%s'." % (files[f]["component"], suite))
622 # Validate the component
623 component = files[f]["component"]
624 component_id = DBConn().get_component_id(component)
625 if component_id == -1:
626 reject("file '%s' has unknown component '%s'." % (f, component))
629 # See if the package is NEW
630 if not Upload.in_override_p(files[f]["package"], files[f]["component"], suite, files[f].get("dbtype",""), f):
633 # Validate the priority
634 if files[f]["priority"].find('/') != -1:
635 reject("file '%s' has invalid priority '%s' [contains '/']." % (f, files[f]["priority"]))
637 # Determine the location
638 location = Cnf["Dir::Pool"]
639 location_id = DBConn().get_location_id(location, component, archive)
640 if location_id == -1:
641 reject("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
642 files[f]["location id"] = location_id
644 # Check the md5sum & size against existing files (if any)
645 files[f]["pool name"] = utils.poolify (changes["source"], files[f]["component"])
646 files_id = DBConn().get_files_id(files[f]["pool name"] + f, files[f]["size"], files[f]["md5sum"], files[f]["location id"])
648 reject("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
650 reject("md5sum and/or size mismatch on existing copy of %s." % (f))
651 files[f]["files id"] = files_id
653 # Check for packages that have moved from one component to another
654 files[f]['suite'] = suite
655 cursor.execute("""EXECUTE moved_pkg_q( %(package)s, %(suite)s, %(architecture)s )""", ( files[f] ) )
656 ql = cursor.fetchone()
658 files[f]["othercomponents"] = ql[0][0]
660 # If the .changes file says it has source, it must have source.
661 if changes["architecture"].has_key("source"):
663 reject("no source found and Architecture line in changes mention source.")
665 if not has_binaries and Cnf.FindB("Dinstall::Reject::NoSourceOnly"):
666 reject("source only uploads are not supported.")
668 ###############################################################################
673 # Ensure there is source to check
674 if not changes["architecture"].has_key("source"):
679 for f in files.keys():
680 if files[f]["type"] == "dsc":
682 reject("can not process a .changes file with multiple .dsc's.")
687 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
689 reject("source uploads must contain a dsc file")
692 # Parse the .dsc file
694 dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
695 except CantOpenError:
696 # if not -n copy_to_holding() will have done this for us...
697 if Options["No-Action"]:
698 reject("%s: can't read file." % (dsc_filename))
699 except ParseChangesError, line:
700 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
701 except InvalidDscError, line:
702 reject("%s: syntax error on line %s." % (dsc_filename, line))
703 except ChangesUnicodeError:
704 reject("%s: dsc file not proper utf-8." % (dsc_filename))
706 # Build up the file list of files mentioned by the .dsc
708 dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
709 except NoFilesFieldError:
710 reject("%s: no Files: field." % (dsc_filename))
712 except UnknownFormatError, format:
713 reject("%s: unknown format '%s'." % (dsc_filename, format))
715 except ParseChangesError, line:
716 reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
719 # Enforce mandatory fields
720 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
721 if not dsc.has_key(i):
722 reject("%s: missing mandatory field `%s'." % (dsc_filename, i))
725 # Validate the source and version fields
726 if not re_valid_pkg_name.match(dsc["source"]):
727 reject("%s: invalid source name '%s'." % (dsc_filename, dsc["source"]))
728 if not re_valid_version.match(dsc["version"]):
729 reject("%s: invalid version number '%s'." % (dsc_filename, dsc["version"]))
731 # Bumping the version number of the .dsc breaks extraction by stable's
732 # dpkg-source. So let's not do that...
733 if dsc["format"] != "1.0":
734 reject("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
736 # Validate the Maintainer field
738 utils.fix_maintainer (dsc["maintainer"])
739 except ParseMaintError, msg:
740 reject("%s: Maintainer field ('%s') failed to parse: %s" \
741 % (dsc_filename, dsc["maintainer"], msg))
743 # Validate the build-depends field(s)
744 for field_name in [ "build-depends", "build-depends-indep" ]:
745 field = dsc.get(field_name)
747 # Check for broken dpkg-dev lossage...
748 if field.startswith("ARRAY"):
749 reject("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % (dsc_filename, field_name.title()))
751 # Have apt try to parse them...
753 apt_pkg.ParseSrcDepends(field)
755 reject("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
758 # Ensure the version number in the .dsc matches the version number in the .changes
759 epochless_dsc_version = re_no_epoch.sub('', dsc["version"])
760 changes_version = files[dsc_filename]["version"]
761 if epochless_dsc_version != files[dsc_filename]["version"]:
762 reject("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
764 # Ensure there is a .tar.gz in the .dsc file
766 for f in dsc_files.keys():
767 m = re_issource.match(f)
769 reject("%s: %s in Files field not recognised as source." % (dsc_filename, f))
772 if ftype == "orig.tar.gz" or ftype == "tar.gz":
775 reject("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
777 # Ensure source is newer than existing source in target suites
778 reject(Upload.check_source_against_db(dsc_filename),"")
780 (reject_msg, is_in_incoming) = Upload.check_dsc_against_db(dsc_filename)
781 reject(reject_msg, "")
783 if not Options["No-Action"]:
784 copy_to_holding(is_in_incoming)
785 orig_tar_gz = os.path.basename(is_in_incoming)
786 files[orig_tar_gz] = {}
787 files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
788 files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
789 files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
790 files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
791 files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
792 files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
793 files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
794 files[orig_tar_gz]["type"] = "orig.tar.gz"
799 ################################################################################
801 def get_changelog_versions(source_dir):
802 """Extracts a the source package and (optionally) grabs the
803 version history out of debian/changelog for the BTS."""
805 # Find the .dsc (again)
807 for f in files.keys():
808 if files[f]["type"] == "dsc":
811 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
815 # Create a symlink mirror of the source files in our temporary directory
816 for f in files.keys():
817 m = re_issource.match(f)
819 src = os.path.join(source_dir, f)
820 # If a file is missing for whatever reason, give up.
821 if not os.path.exists(src):
824 if ftype == "orig.tar.gz" and pkg.orig_tar_gz:
826 dest = os.path.join(os.getcwd(), f)
827 os.symlink(src, dest)
829 # If the orig.tar.gz is not a part of the upload, create a symlink to the
832 dest = os.path.join(os.getcwd(), os.path.basename(pkg.orig_tar_gz))
833 os.symlink(pkg.orig_tar_gz, dest)
836 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
837 (result, output) = commands.getstatusoutput(cmd)
839 reject("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
840 reject(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
843 if not Cnf.Find("Dir::Queue::BTSVersionTrack"):
846 # Get the upstream version
847 upstr_version = re_no_epoch.sub('', dsc["version"])
848 if re_strip_revision.search(upstr_version):
849 upstr_version = re_strip_revision.sub('', upstr_version)
851 # Ensure the changelog file exists
852 changelog_filename = "%s-%s/debian/changelog" % (dsc["source"], upstr_version)
853 if not os.path.exists(changelog_filename):
854 reject("%s: debian/changelog not found in extracted source." % (dsc_filename))
857 # Parse the changelog
858 dsc["bts changelog"] = ""
859 changelog_file = utils.open_file(changelog_filename)
860 for line in changelog_file.readlines():
861 m = re_changelog_versions.match(line)
863 dsc["bts changelog"] += line
864 changelog_file.close()
866 # Check we found at least one revision in the changelog
867 if not dsc["bts changelog"]:
868 reject("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
870 ########################################
874 # a) there's no source
875 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
876 # or c) the orig.tar.gz is MIA
877 if not changes["architecture"].has_key("source") or reprocess == 2 \
878 or pkg.orig_tar_gz == -1:
881 tmpdir = utils.temp_dirname()
883 # Move into the temporary directory
887 # Get the changelog version history
888 get_changelog_versions(cwd)
890 # Move back and cleanup the temporary tree
893 shutil.rmtree(tmpdir)
895 if errno.errorcode[e.errno] != 'EACCES':
896 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
898 reject("%s: source tree could not be cleanly removed." % (dsc["source"]))
899 # We probably have u-r or u-w directories so chmod everything
901 cmd = "chmod -R u+rwx %s" % (tmpdir)
902 result = os.system(cmd)
904 utils.fubar("'%s' failed with result %s." % (cmd, result))
905 shutil.rmtree(tmpdir)
907 utils.fubar("%s: couldn't remove tmp dir for source tree." % (dsc["source"]))
909 ################################################################################
911 # FIXME: should be a debian specific check called from a hook
913 def check_urgency ():
914 if changes["architecture"].has_key("source"):
915 if not changes.has_key("urgency"):
916 changes["urgency"] = Cnf["Urgency::Default"]
917 changes["urgency"] = changes["urgency"].lower()
918 if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
919 reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
920 changes["urgency"] = Cnf["Urgency::Default"]
922 ################################################################################
925 utils.check_hash(".changes", files, "md5", apt_pkg.md5sum)
926 utils.check_size(".changes", files)
927 utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum)
928 utils.check_size(".dsc", dsc_files)
930 # This is stupid API, but it'll have to do for now until
931 # we actually have proper abstraction
932 for m in utils.ensure_hashes(changes, dsc, files, dsc_files):
935 ################################################################################
937 # Sanity check the time stamps of files inside debs.
938 # [Files in the near future cause ugly warnings and extreme time
939 # travel can cause errors on extraction]
941 def check_timestamps():
943 def __init__(self, future_cutoff, past_cutoff):
945 self.future_cutoff = future_cutoff
946 self.past_cutoff = past_cutoff
949 self.future_files = {}
950 self.ancient_files = {}
952 def callback(self, Kind,Name,Link,Mode,UID,GID,Size,MTime,Major,Minor):
953 if MTime > self.future_cutoff:
954 self.future_files[Name] = MTime
955 if MTime < self.past_cutoff:
956 self.ancient_files[Name] = MTime
959 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
960 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
961 tar = Tar(future_cutoff, past_cutoff)
962 for filename in files.keys():
963 if files[filename]["type"] == "deb":
966 deb_file = utils.open_file(filename)
967 apt_inst.debExtract(deb_file,tar.callback,"control.tar.gz")
970 apt_inst.debExtract(deb_file,tar.callback,"data.tar.gz")
971 except SystemError, e:
972 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
973 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
976 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
979 future_files = tar.future_files.keys()
981 num_future_files = len(future_files)
982 future_file = future_files[0]
983 future_date = tar.future_files[future_file]
984 reject("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
985 % (filename, num_future_files, future_file,
986 time.ctime(future_date)))
988 ancient_files = tar.ancient_files.keys()
990 num_ancient_files = len(ancient_files)
991 ancient_file = ancient_files[0]
992 ancient_date = tar.ancient_files[ancient_file]
993 reject("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
994 % (filename, num_ancient_files, ancient_file,
995 time.ctime(ancient_date)))
997 reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
999 ################################################################################
1001 def lookup_uid_from_fingerprint(fpr):
1003 Return the uid,name,isdm for a given gpg fingerprint
1006 @param fpr: a 40 byte GPG fingerprint
1008 @return (uid, name, isdm)
1010 cursor = DBConn().cursor()
1011 cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
1012 qs = cursor.fetchone()
1016 return (None, None, None)
1018 def check_signed_by_key():
1019 """Ensure the .changes is signed by an authorized uploader."""
1021 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(changes["fingerprint"])
1022 if uid_name == None:
1025 # match claimed name with actual name:
1027 uid, uid_email = changes["fingerprint"], uid
1028 may_nmu, may_sponsor = 1, 1
1029 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1030 # and can't get one in there if we don't allow nmu/sponsorship
1033 may_nmu, may_sponsor = 0, 0
1035 uid_email = "%s@debian.org" % (uid)
1036 may_nmu, may_sponsor = 1, 1
1038 if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
1040 elif uid_name in [changes["maintainername"], changes["changedbyname"]]:
1042 if uid_name == "": sponsored = 1
1045 if ("source" in changes["architecture"] and
1046 uid_email and utils.is_email_alias(uid_email)):
1047 sponsor_addresses = utils.gpg_get_key_addresses(changes["fingerprint"])
1048 if (changes["maintaineremail"] not in sponsor_addresses and
1049 changes["changedbyemail"] not in sponsor_addresses):
1050 changes["sponsoremail"] = uid_email
1052 if sponsored and not may_sponsor:
1053 reject("%s is not authorised to sponsor uploads" % (uid))
1055 if not sponsored and not may_nmu:
1057 cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
1059 highest_sid, highest_version = None, None
1061 should_reject = True
1063 si = cursor.fetchone()
1067 if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1:
1069 highest_version = si[1]
1071 if highest_sid == None:
1072 reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"])
1075 cursor.execute("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
1078 m = cursor.fetchone()
1082 (rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0])
1083 if email == uid_email or name == uid_name:
1087 if should_reject == True:
1088 reject("%s is not in Maintainer or Uploaders of source package %s" % (uid, changes["source"]))
1090 for b in changes["binary"].keys():
1091 for suite in changes["distribution"].keys():
1092 suite_id = DBConn().get_suite_id(suite)
1094 cursor.execute("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = %(package)s AND ba.suite = %(suite)s" , {'package':b, 'suite':suite_id} )
1096 s = cursor.fetchone()
1100 if s[0] != changes["source"]:
1101 reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1103 for f in files.keys():
1104 if files[f].has_key("byhand"):
1105 reject("%s may not upload BYHAND file %s" % (uid, f))
1106 if files[f].has_key("new"):
1107 reject("%s may not upload NEW file %s" % (uid, f))
1110 ################################################################################
1111 ################################################################################
1113 # If any file of an upload has a recent mtime then chances are good
1114 # the file is still being uploaded.
1116 def upload_too_new():
1118 # Move back to the original directory to get accurate time stamps
1120 os.chdir(pkg.directory)
1121 file_list = pkg.files.keys()
1122 file_list.extend(pkg.dsc_files.keys())
1123 file_list.append(pkg.changes_file)
1126 last_modified = time.time()-os.path.getmtime(f)
1127 if last_modified < int(Cnf["Dinstall::SkipTime"]):
1135 ################################################################################
1138 # changes["distribution"] may not exist in corner cases
1139 # (e.g. unreadable changes files)
1140 if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
1141 changes["distribution"] = {}
1143 (summary, short_summary) = Upload.build_summaries()
1145 # q-unapproved hax0ring
1147 "New": { "is": is_new, "process": acknowledge_new },
1148 "Autobyhand" : { "is" : is_autobyhand, "process": do_autobyhand },
1149 "Byhand" : { "is": is_byhand, "process": do_byhand },
1150 "OldStableUpdate" : { "is": is_oldstableupdate,
1151 "process": do_oldstableupdate },
1152 "StableUpdate" : { "is": is_stableupdate, "process": do_stableupdate },
1153 "Unembargo" : { "is": is_unembargo, "process": queue_unembargo },
1154 "Embargo" : { "is": is_embargo, "process": queue_embargo },
1156 queues = [ "New", "Autobyhand", "Byhand" ]
1157 if Cnf.FindB("Dinstall::SecurityQueueHandling"):
1158 queues += [ "Unembargo", "Embargo" ]
1160 queues += [ "OldStableUpdate", "StableUpdate" ]
1162 (prompt, answer) = ("", "XXX")
1163 if Options["No-Action"] or Options["Automatic"]:
1168 if reject_message.find("Rejected") != -1:
1169 if upload_too_new():
1170 print "SKIP (too new)\n" + reject_message,
1171 prompt = "[S]kip, Quit ?"
1173 print "REJECT\n" + reject_message,
1174 prompt = "[R]eject, Skip, Quit ?"
1175 if Options["Automatic"]:
1180 if queue_info[q]["is"]():
1184 print "%s for %s\n%s%s" % (
1185 qu.upper(), ", ".join(changes["distribution"].keys()),
1186 reject_message, summary),
1187 queuekey = qu[0].upper()
1188 if queuekey in "RQSA":
1190 prompt = "[D]ivert, Skip, Quit ?"
1192 prompt = "[%s]%s, Skip, Quit ?" % (queuekey, qu[1:].lower())
1193 if Options["Automatic"]:
1196 print "ACCEPT\n" + reject_message + summary,
1197 prompt = "[A]ccept, Skip, Quit ?"
1198 if Options["Automatic"]:
1201 while prompt.find(answer) == -1:
1202 answer = utils.our_raw_input(prompt)
1203 m = re_default_answer.match(prompt)
1206 answer = answer[:1].upper()
1209 os.chdir (pkg.directory)
1210 Upload.do_reject(0, reject_message)
1212 accept(summary, short_summary)
1213 remove_from_unchecked()
1214 elif answer == queuekey:
1215 queue_info[qu]["process"](summary, short_summary)
1216 remove_from_unchecked()
1220 def remove_from_unchecked():
1221 os.chdir (pkg.directory)
1222 for f in files.keys():
1224 os.unlink(pkg.changes_file)
1226 ################################################################################
1228 def accept (summary, short_summary):
1229 Upload.accept(summary, short_summary)
1230 Upload.check_override()
1232 ################################################################################
1234 def move_to_dir (dest, perms=0660, changesperms=0664):
1235 utils.move (pkg.changes_file, dest, perms=changesperms)
1236 file_keys = files.keys()
1238 utils.move (f, dest, perms=perms)
1240 ################################################################################
1242 def is_unembargo ():
1243 cursor = DBConn().cursor()
1244 cursor.execute( "SELECT package FROM disembargo WHERE package = %(source)s AND version = %(version)s", changes )
1245 if cursor.fetchone():
1248 oldcwd = os.getcwd()
1249 os.chdir(Cnf["Dir::Queue::Disembargo"])
1250 disdir = os.getcwd()
1253 if pkg.directory == disdir:
1254 if changes["architecture"].has_key("source"):
1255 if Options["No-Action"]: return 1
1257 cursor.execute( "INSERT INTO disembargo (package, version) VALUES ('%(package)s', '%(version)s')",
1259 cursor.execute( "COMMIT" )
1264 def queue_unembargo (summary, short_summary):
1265 print "Moving to UNEMBARGOED holding area."
1266 Logger.log(["Moving to unembargoed", pkg.changes_file])
1268 Upload.dump_vars(Cnf["Dir::Queue::Unembargoed"])
1269 move_to_dir(Cnf["Dir::Queue::Unembargoed"])
1270 Upload.queue_build("unembargoed", Cnf["Dir::Queue::Unembargoed"])
1272 # Check for override disparities
1273 Upload.Subst["__SUMMARY__"] = summary
1274 Upload.check_override()
1276 # Send accept mail, announce to lists, close bugs and check for
1277 # override disparities
1278 if not Cnf["Dinstall::Options::No-Mail"]:
1279 Upload.Subst["__SUITE__"] = ""
1280 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1281 utils.send_mail(mail_message)
1282 Upload.announce(short_summary, 1)
1284 ################################################################################
1287 # if embargoed queues are enabled always embargo
1290 def queue_embargo (summary, short_summary):
1291 print "Moving to EMBARGOED holding area."
1292 Logger.log(["Moving to embargoed", pkg.changes_file])
1294 Upload.dump_vars(Cnf["Dir::Queue::Embargoed"])
1295 move_to_dir(Cnf["Dir::Queue::Embargoed"])
1296 Upload.queue_build("embargoed", Cnf["Dir::Queue::Embargoed"])
1298 # Check for override disparities
1299 Upload.Subst["__SUMMARY__"] = summary
1300 Upload.check_override()
1302 # Send accept mail, announce to lists, close bugs and check for
1303 # override disparities
1304 if not Cnf["Dinstall::Options::No-Mail"]:
1305 Upload.Subst["__SUITE__"] = ""
1306 mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
1307 utils.send_mail(mail_message)
1308 Upload.announce(short_summary, 1)
1310 ################################################################################
1312 def is_stableupdate ():
1313 if not changes["distribution"].has_key("proposed-updates"):
1316 if not changes["architecture"].has_key("source"):
1317 pusuite = DBConn().get_suite_id("proposed-updates")
1318 cursor = DBConn().cursor()
1319 cursor.execute( """SELECT 1 FROM source s
1320 JOIN src_associations sa ON (s.id = sa.source)
1321 WHERE s.source = %(source)s
1322 AND s.version = '%(version)s'
1323 AND sa.suite = %(suite)d""",
1324 {'source' : changes['source'],
1325 'version' : changes['version'],
1328 if cursor.fetchone():
1329 # source is already in proposed-updates so no need to hold
1334 def do_stableupdate (summary, short_summary):
1335 print "Moving to PROPOSED-UPDATES holding area."
1336 Logger.log(["Moving to proposed-updates", pkg.changes_file])
1338 Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"])
1339 move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664)
1341 # Check for override disparities
1342 Upload.Subst["__SUMMARY__"] = summary
1343 Upload.check_override()
1345 ################################################################################
1347 def is_oldstableupdate ():
1348 if not changes["distribution"].has_key("oldstable-proposed-updates"):
1351 if not changes["architecture"].has_key("source"):
1352 pusuite = DBConn().get_suite_id("oldstable-proposed-updates")
1353 cursor = DBConn().cursor()
1354 cursor.execute( """"SELECT 1 FROM source s
1355 JOIN src_associations sa ON (s.id = sa.source)
1356 WHERE s.source = %(source)s
1357 AND s.version = %(version)s
1358 AND sa.suite = %d""",
1359 {'source' : changes['source'],
1360 'version' : changes['version'],
1362 if cursor.fetchone():
1367 def do_oldstableupdate (summary, short_summary):
1368 print "Moving to OLDSTABLE-PROPOSED-UPDATES holding area."
1369 Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file])
1371 Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"])
1372 move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664)
1374 # Check for override disparities
1375 Upload.Subst["__SUMMARY__"] = summary
1376 Upload.check_override()
1378 ################################################################################
1380 def is_autobyhand ():
1383 for f in files.keys():
1384 if files[f].has_key("byhand"):
1387 # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH
1388 # don't contain underscores, and ARCH doesn't contain dots.
1389 # further VER matches the .changes Version:, and ARCH should be in
1390 # the .changes Architecture: list.
1391 if f.count("_") < 2:
1395 (pckg, ver, archext) = f.split("_", 2)
1396 if archext.count(".") < 1 or changes["version"] != ver:
1400 ABH = Cnf.SubTree("AutomaticByHandPackages")
1401 if not ABH.has_key(pckg) or \
1402 ABH["%s::Source" % (pckg)] != changes["source"]:
1403 print "not match %s %s" % (pckg, changes["source"])
1407 (arch, ext) = archext.split(".", 1)
1408 if arch not in changes["architecture"]:
1412 files[f]["byhand-arch"] = arch
1413 files[f]["byhand-script"] = ABH["%s::Script" % (pckg)]
1415 return any_auto and all_auto
1417 def do_autobyhand (summary, short_summary):
1418 print "Attempting AUTOBYHAND."
1420 for f in files.keys():
1422 if not files[f].has_key("byhand"):
1424 if not files[f].has_key("byhand-script"):
1428 os.system("ls -l %s" % byhandfile)
1429 result = os.system("%s %s %s %s %s" % (
1430 files[f]["byhand-script"], byhandfile,
1431 changes["version"], files[f]["byhand-arch"],
1432 os.path.abspath(pkg.changes_file)))
1434 os.unlink(byhandfile)
1437 print "Error processing %s, left as byhand." % (f)
1441 do_byhand(summary, short_summary)
1443 accept(summary, short_summary)
1445 ################################################################################
1448 for f in files.keys():
1449 if files[f].has_key("byhand"):
1453 def do_byhand (summary, short_summary):
1454 print "Moving to BYHAND holding area."
1455 Logger.log(["Moving to byhand", pkg.changes_file])
1457 Upload.dump_vars(Cnf["Dir::Queue::Byhand"])
1458 move_to_dir(Cnf["Dir::Queue::Byhand"])
1460 # Check for override disparities
1461 Upload.Subst["__SUMMARY__"] = summary
1462 Upload.check_override()
1464 ################################################################################
1467 for f in files.keys():
1468 if files[f].has_key("new"):
1472 def acknowledge_new (summary, short_summary):
1473 Subst = Upload.Subst
1475 print "Moving to NEW holding area."
1476 Logger.log(["Moving to new", pkg.changes_file])
1478 Upload.dump_vars(Cnf["Dir::Queue::New"])
1479 move_to_dir(Cnf["Dir::Queue::New"])
1481 if not Options["No-Mail"]:
1482 print "Sending new ack."
1483 Subst["__SUMMARY__"] = summary
1484 new_ack_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.new")
1485 utils.send_mail(new_ack_message)
1487 ################################################################################
1489 # reprocess is necessary for the case of foo_1.2-1 and foo_1.2-2 in
1490 # Incoming. -1 will reference the .orig.tar.gz, but -2 will not.
1491 # Upload.check_dsc_against_db() can find the .orig.tar.gz but it will
1492 # not have processed it during it's checks of -2. If -1 has been
1493 # deleted or otherwise not checked by 'dak process-unchecked', the
1494 # .orig.tar.gz will not have been checked at all. To get round this,
1495 # we force the .orig.tar.gz into the .changes structure and reprocess
1496 # the .changes file.
1498 def process_it (changes_file):
1499 global reprocess, reject_message
1501 # Reset some globals
1504 # Some defaults in case we can't fully process the .changes file
1505 changes["maintainer2047"] = Cnf["Dinstall::MyEmailAddress"]
1506 changes["changedby2047"] = Cnf["Dinstall::MyEmailAddress"]
1509 # Absolutize the filename to avoid the requirement of being in the
1510 # same directory as the .changes file.
1511 pkg.changes_file = os.path.abspath(changes_file)
1513 # Remember where we are so we can come back after cd-ing into the
1514 # holding directory.
1515 pkg.directory = os.getcwd()
1518 # If this is the Real Thing(tm), copy things into a private
1519 # holding directory first to avoid replacable file races.
1520 if not Options["No-Action"]:
1521 os.chdir(Cnf["Dir::Queue::Holding"])
1522 copy_to_holding(pkg.changes_file)
1523 # Relativize the filename so we use the copy in holding
1524 # rather than the original...
1525 pkg.changes_file = os.path.basename(pkg.changes_file)
1526 changes["fingerprint"] = utils.check_signature(pkg.changes_file, reject)
1527 if changes["fingerprint"]:
1528 valid_changes_p = check_changes()
1533 check_distributions()
1535 valid_dsc_p = check_dsc()
1541 check_signed_by_key()
1542 Upload.update_subst(reject_message)
1548 traceback.print_exc(file=sys.stderr)
1551 # Restore previous WD
1552 os.chdir(pkg.directory)
1554 ###############################################################################
1557 global Cnf, Options, Logger
1559 changes_files = init()
1561 # -n/--dry-run invalidates some other options which would involve things happening
1562 if Options["No-Action"]:
1563 Options["Automatic"] = ""
1565 # Ensure all the arguments we were given are .changes files
1566 for f in changes_files:
1567 if not f.endswith(".changes"):
1568 utils.warn("Ignoring '%s' because it's not a .changes file." % (f))
1569 changes_files.remove(f)
1571 if changes_files == []:
1572 utils.fubar("Need at least one .changes file as an argument.")
1574 # Check that we aren't going to clash with the daily cron job
1576 if not Options["No-Action"] and os.path.exists("%s/daily.lock" % (Cnf["Dir::Lock"])) and not Options["No-Lock"]:
1577 utils.fubar("Archive maintenance in progress. Try again later.")
1579 # Obtain lock if not in no-action mode and initialize the log
1581 if not Options["No-Action"]:
1582 lock_fd = os.open(Cnf["Dinstall::LockFile"], os.O_RDWR | os.O_CREAT)
1584 fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
1586 if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN':
1587 utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.")
1590 Logger = Upload.Logger = logging.Logger(Cnf, "process-unchecked")
1592 # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
1593 bcc = "X-DAK: dak process-unchecked\nX-Katie: $Revision: 1.65 $"
1594 if Cnf.has_key("Dinstall::Bcc"):
1595 Upload.Subst["__BCC__"] = bcc + "\nBcc: %s" % (Cnf["Dinstall::Bcc"])
1597 Upload.Subst["__BCC__"] = bcc
1600 # Sort the .changes files so that we process sourceful ones first
1601 changes_files.sort(utils.changes_compare)
1603 # Process the changes files
1604 for changes_file in changes_files:
1605 print "\n" + changes_file
1607 process_it (changes_file)
1609 if not Options["No-Action"]:
1612 accept_count = Upload.accept_count
1613 accept_bytes = Upload.accept_bytes
1616 if accept_count > 1:
1618 print "Accepted %d package %s, %s." % (accept_count, sets, utils.size_type(int(accept_bytes)))
1619 Logger.log(["total",accept_count,accept_bytes])
1621 if not Options["No-Action"]:
1624 ################################################################################
1626 if __name__ == '__main__':