]> git.donarmstrong.com Git - dak.git/commitdiff
Merge remote-tracking branch 'ansgar/pu/multiarchive-1'
authorJoerg Jaspert <joerg@debian.org>
Tue, 14 Aug 2012 06:09:42 +0000 (08:09 +0200)
committerJoerg Jaspert <joerg@debian.org>
Tue, 14 Aug 2012 06:09:42 +0000 (08:09 +0200)
* ansgar/pu/multiarchive-1:
  daklib/archive.py: don't check hashes when copying upload to temporary location
  dak/clean_suites.py: use '>' instead of '>='
  daklib/utils.py (gpg_get_key_addresses): prefer @debian.org addresses
  dak/process_upload.py: remove old inactive code; update some comments
  dak/dakdb/update79.py: fix typo: workd → world

Signed-off-by: Joerg Jaspert <joerg@debian.org>
dak/clean_suites.py
dak/dakdb/update79.py
dak/process_upload.py
daklib/archive.py
daklib/utils.py

index e9360012550da7fd686a5d1db17951eda60d0873..e571ae0e8c585aee24e2b3f9dc8ee2721d2705a6 100755 (executable)
@@ -214,7 +214,7 @@ def clean_binaries(now_date, session):
          AND NOT EXISTS (SELECT 1 FROM files_archive_map af
                                   JOIN archive_delete_date ad ON af.archive_id = ad.archive_id
                                  WHERE af.file_id = b.file
-                                   AND (af.last_used IS NULL OR af.last_used >= ad.delete_date))
+                                   AND (af.last_used IS NULL OR af.last_used > ad.delete_date))
       RETURNING f.filename
     """)
     for b in q:
@@ -254,7 +254,7 @@ def clean(now_date, archives, max_delete, session):
            AND NOT EXISTS (SELECT 1 FROM files_archive_map af
                                     JOIN archive_delete_date ad ON af.archive_id = ad.archive_id
                                    WHERE af.file_id = source.file
-                                     AND (af.last_used IS NULL OR af.last_used >= ad.delete_date))
+                                     AND (af.last_used IS NULL OR af.last_used > ad.delete_date))
         RETURNING source.id AS id, f.filename AS filename
       ),
       deleted_dsc_files AS (
index d03fc0660605761200f6cb5d31f99e0bb15c64a7..81a7b239dcdbf8e230d69eb618a357b4fb0dbf48 100755 (executable)
@@ -38,7 +38,7 @@ def do_update(self):
         c = self.db.cursor()
 
         c.execute("CREATE SCHEMA world");
-        c.execute("GRANT USAGE ON SCHEMA workd TO PUBLIC")
+        c.execute("GRANT USAGE ON SCHEMA world TO PUBLIC")
         c.execute("ALTER DEFAULT PRIVILEGES IN SCHEMA world GRANT SELECT ON TABLES TO PUBLIC")
         c.execute("ALTER DEFAULT PRIVILEGES IN SCHEMA world GRANT ALL ON TABLES TO ftpmaster")
         c.execute("ALTER DEFAULT PRIVILEGES IN SCHEMA world GRANT SELECT ON SEQUENCES TO PUBLIC")
index 93d30f85d672944842f4a8e54111e2bd76868fe6..f070566775a720d8a76cf008c1032af84a373349 100755 (executable)
@@ -245,6 +245,7 @@ def subst_for_upload(upload):
     else:
         addresses = utils.mail_addresses_for_upload(maintainer_field, maintainer_field, changes.primary_fingerprint)
 
+    # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
     bcc = 'X-DAK: dak process-upload'
     if 'Dinstall::Bcc' in cnf:
         bcc = '{0}\nBcc: {1}'.format(bcc, cnf['Dinstall::Bcc'])
@@ -357,7 +358,7 @@ def accept_to_new(directory, upload):
     Logger.log(['ACCEPT-TO-NEW', upload.changes.filename])
 
     upload.install_to_new()
-    # TODO: tag bugs pending, send announcement
+    # TODO: tag bugs pending
 
     subst = subst_for_upload(upload)
     message = utils.TemplateSubst(subst, os.path.join(cnf['Dir::Templates'], 'process-unchecked.new'))
@@ -402,7 +403,6 @@ def real_reject(directory, upload, reason=None, notify=True):
     fh.write(reason)
     fh.close()
 
-    # TODO: fix
     if notify:
         subst = subst_for_upload(upload)
         subst['__REJECTOR_ADDRESS__'] = cnf['Dinstall::MyEmailAddress']
@@ -498,7 +498,6 @@ def action(directory, upload):
     elif answer == 'S':
         processed = False
 
-    #raise Exception("FAIL")
     if not Options['No-Action']:
         upload.commit()
 
@@ -519,19 +518,6 @@ def process_it(directory, changes, keyrings, session):
     print "\n{0}\n".format(changes.filename)
     Logger.log(["Processing changes file", changes.filename])
 
-    cnf = Config()
-
-    # Some defaults in case we can't fully process the .changes file
-    #u.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
-    #u.pkg.changes["changedby2047"] = cnf["Dinstall::MyEmailAddress"]
-
-    # debian-{devel-,}-changes@lists.debian.org toggles writes access based on this header
-    bcc = "X-DAK: dak process-upload"
-    #if cnf.has_key("Dinstall::Bcc"):
-    #    u.Subst["__BCC__"] = bcc + "\nBcc: %s" % (cnf["Dinstall::Bcc"])
-    #else:
-    #    u.Subst["__BCC__"] = bcc
-
     with daklib.archive.ArchiveUpload(directory, changes, keyrings) as upload:
         processed = action(directory, upload)
         if processed and not Options['No-Action']:
index 54f925a33f07d22e629a5c6edf50d83f925ce23d..2badae7252693c6cc57ec025c695ba28e36ea51d 100644 (file)
@@ -49,12 +49,18 @@ class ArchiveTransaction(object):
         self.fs = FilesystemTransaction()
         self.session = DBConn().session()
 
-    def get_file(self, hashed_file, source_name):
+    def get_file(self, hashed_file, source_name, check_hashes=True):
         """Look for file C{hashed_file} in database
 
         @type  hashed_file: L{daklib.upload.HashedFile}
         @param hashed_file: file to look for in the database
 
+        @type  source_name: str
+        @param source_name: source package name
+
+        @type  check_hashes: bool
+        @param check_hashes: check size and hashes match
+
         @raise KeyError: file was not found in the database
         @raise HashMismatchException: hash mismatch
 
@@ -64,7 +70,10 @@ class ArchiveTransaction(object):
         poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
         try:
             poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
-            if poolfile.filesize != hashed_file.size or poolfile.md5sum != hashed_file.md5sum or poolfile.sha1sum != hashed_file.sha1sum or poolfile.sha256sum != hashed_file.sha256sum:
+            if check_hashes and (poolfile.filesize != hashed_file.size
+                                 or poolfile.md5sum != hashed_file.md5sum
+                                 or poolfile.sha1sum != hashed_file.sha1sum
+                                 or poolfile.sha256sum != hashed_file.sha256sum):
                 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
             return poolfile
         except NoResultFound:
@@ -635,7 +644,7 @@ class ArchiveUpload(object):
                     dst = os.path.join(self.directory, f.filename)
                     if not os.path.exists(dst):
                         try:
-                            db_file = self.transaction.get_file(f, source.dsc['Source'])
+                            db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
                             db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
                             fs.copy(db_archive_file.path, dst, symlink=True)
                         except KeyError:
index 31c22cbb63ca222b32ef709fd0dcee0481ca4165..be3c65161e8fee8c203f6954715ec50fa9445d3f 100644 (file)
@@ -1396,7 +1396,14 @@ def gpg_get_key_addresses(fingerprint):
     if result == 0:
         for l in output.split('\n'):
             m = re_gpg_uid.match(l)
-            if m:
+            if not m:
+                continue
+            address = m.group(1)
+            if address.endswith('@debian.org'):
+                # prefer @debian.org addresses
+                # TODO: maybe not hardcode the domain
+                addresses.insert(0, address)
+            else:
                 addresses.append(m.group(1))
     key_uid_email_cache[fingerprint] = addresses
     return addresses