5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
38 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from subprocess import Popen, PIPE
56 from tarfile import TarFile
58 from inspect import getargspec
61 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
63 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
64 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
65 from sqlalchemy import types as sqltypes
66 from sqlalchemy.orm.collections import attribute_mapped_collection
67 from sqlalchemy.ext.associationproxy import association_proxy
69 # Don't remove this, we re-export the exceptions to scripts which import us
70 from sqlalchemy.exc import *
71 from sqlalchemy.orm.exc import NoResultFound
73 # Only import Config until Queue stuff is changed to store its config
75 from config import Config
76 from textutils import fix_maintainer
77 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
79 # suppress some deprecation warnings in squeeze related to sqlalchemy
81 warnings.filterwarnings('ignore', \
82 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
86 ################################################################################
88 # Patch in support for the debversion field type so that it works during
92 # that is for sqlalchemy 0.6
93 UserDefinedType = sqltypes.UserDefinedType
95 # this one for sqlalchemy 0.5
96 UserDefinedType = sqltypes.TypeEngine
98 class DebVersion(UserDefinedType):
99 def get_col_spec(self):
102 def bind_processor(self, dialect):
105 # ' = None' is needed for sqlalchemy 0.5:
106 def result_processor(self, dialect, coltype = None):
109 sa_major_version = sqlalchemy.__version__[0:3]
110 if sa_major_version in ["0.5", "0.6"]:
111 from sqlalchemy.databases import postgres
112 postgres.ischema_names['debversion'] = DebVersion
114 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
116 ################################################################################
118 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
120 ################################################################################
122 def session_wrapper(fn):
124 Wrapper around common ".., session=None):" handling. If the wrapped
125 function is called without passing 'session', we create a local one
126 and destroy it when the function ends.
128 Also attaches a commit_or_flush method to the session; if we created a
129 local session, this is a synonym for session.commit(), otherwise it is a
130 synonym for session.flush().
133 def wrapped(*args, **kwargs):
134 private_transaction = False
136 # Find the session object
137 session = kwargs.get('session')
140 if len(args) <= len(getargspec(fn)[0]) - 1:
141 # No session specified as last argument or in kwargs
142 private_transaction = True
143 session = kwargs['session'] = DBConn().session()
145 # Session is last argument in args
149 session = args[-1] = DBConn().session()
150 private_transaction = True
152 if private_transaction:
153 session.commit_or_flush = session.commit
155 session.commit_or_flush = session.flush
158 return fn(*args, **kwargs)
160 if private_transaction:
161 # We created a session; close it.
164 wrapped.__doc__ = fn.__doc__
165 wrapped.func_name = fn.func_name
169 __all__.append('session_wrapper')
171 ################################################################################
173 class ORMObject(object):
175 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
176 derived classes must implement the properties() method.
179 def properties(self):
181 This method should be implemented by all derived classes and returns a
182 list of the important properties. The properties 'created' and
183 'modified' will be added automatically. A suffix '_count' should be
184 added to properties that are lists or query objects. The most important
185 property name should be returned as the first element in the list
186 because it is used by repr().
192 Returns a JSON representation of the object based on the properties
193 returned from the properties() method.
196 # add created and modified
197 all_properties = self.properties() + ['created', 'modified']
198 for property in all_properties:
199 # check for list or query
200 if property[-6:] == '_count':
201 real_property = property[:-6]
202 if not hasattr(self, real_property):
204 value = getattr(self, real_property)
205 if hasattr(value, '__len__'):
208 elif hasattr(value, 'count'):
209 # query (but not during validation)
210 if self.in_validation:
212 value = value.count()
214 raise KeyError('Do not understand property %s.' % property)
216 if not hasattr(self, property):
219 value = getattr(self, property)
223 elif isinstance(value, ORMObject):
224 # use repr() for ORMObject types
227 # we want a string for all other types because json cannot
230 data[property] = value
231 return json.dumps(data)
235 Returns the name of the class.
237 return type(self).__name__
241 Returns a short string representation of the object using the first
242 element from the properties() method.
244 primary_property = self.properties()[0]
245 value = getattr(self, primary_property)
246 return '<%s %s>' % (self.classname(), str(value))
250 Returns a human readable form of the object using the properties()
253 return '<%s %s>' % (self.classname(), self.json())
255 def not_null_constraints(self):
257 Returns a list of properties that must be not NULL. Derived classes
258 should override this method if needed.
262 validation_message = \
263 "Validation failed because property '%s' must not be empty in object\n%s"
265 in_validation = False
269 This function validates the not NULL constraints as returned by
270 not_null_constraints(). It raises the DBUpdateError exception if
273 for property in self.not_null_constraints():
274 # TODO: It is a bit awkward that the mapper configuration allow
275 # directly setting the numeric _id columns. We should get rid of it
277 if hasattr(self, property + '_id') and \
278 getattr(self, property + '_id') is not None:
280 if not hasattr(self, property) or getattr(self, property) is None:
281 # str() might lead to races due to a 2nd flush
282 self.in_validation = True
283 message = self.validation_message % (property, str(self))
284 self.in_validation = False
285 raise DBUpdateError(message)
289 def get(cls, primary_key, session = None):
291 This is a support function that allows getting an object by its primary
294 Architecture.get(3[, session])
296 instead of the more verbose
298 session.query(Architecture).get(3)
300 return session.query(cls).get(primary_key)
302 def session(self, replace = False):
304 Returns the current session that is associated with the object. May
305 return None is object is in detached state.
308 return object_session(self)
310 def clone(self, session = None):
312 Clones the current object in a new session and returns the new clone. A
313 fresh session is created if the optional session parameter is not
314 provided. The function will fail if a session is provided and has
317 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
318 an existing object to allow several threads to work with their own
319 instances of an ORMObject.
321 WARNING: Only persistent (committed) objects can be cloned. Changes
322 made to the original object that are not committed yet will get lost.
323 The session of the new object will always be rolled back to avoid
327 if self.session() is None:
328 raise RuntimeError( \
329 'Method clone() failed for detached object:\n%s' % self)
330 self.session().flush()
331 mapper = object_mapper(self)
332 primary_key = mapper.primary_key_from_instance(self)
333 object_class = self.__class__
335 session = DBConn().session()
336 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
337 raise RuntimeError( \
338 'Method clone() failed due to unflushed changes in session.')
339 new_object = session.query(object_class).get(primary_key)
341 if new_object is None:
342 raise RuntimeError( \
343 'Method clone() failed for non-persistent object:\n%s' % self)
346 __all__.append('ORMObject')
348 ################################################################################
350 class Validator(MapperExtension):
352 This class calls the validate() method for each instance for the
353 'before_update' and 'before_insert' events. A global object validator is
354 used for configuring the individual mappers.
357 def before_update(self, mapper, connection, instance):
361 def before_insert(self, mapper, connection, instance):
365 validator = Validator()
367 ################################################################################
369 class Architecture(ORMObject):
370 def __init__(self, arch_string = None, description = None):
371 self.arch_string = arch_string
372 self.description = description
374 def __eq__(self, val):
375 if isinstance(val, str):
376 return (self.arch_string== val)
377 # This signals to use the normal comparison operator
378 return NotImplemented
380 def __ne__(self, val):
381 if isinstance(val, str):
382 return (self.arch_string != val)
383 # This signals to use the normal comparison operator
384 return NotImplemented
386 def properties(self):
387 return ['arch_string', 'arch_id', 'suites_count']
389 def not_null_constraints(self):
390 return ['arch_string']
392 __all__.append('Architecture')
395 def get_architecture(architecture, session=None):
397 Returns database id for given C{architecture}.
399 @type architecture: string
400 @param architecture: The name of the architecture
402 @type session: Session
403 @param session: Optional SQLA session object (a temporary one will be
404 generated if not supplied)
407 @return: Architecture object for the given arch (None if not present)
410 q = session.query(Architecture).filter_by(arch_string=architecture)
414 except NoResultFound:
417 __all__.append('get_architecture')
419 # TODO: should be removed because the implementation is too trivial
421 def get_architecture_suites(architecture, session=None):
423 Returns list of Suite objects for given C{architecture} name
425 @type architecture: str
426 @param architecture: Architecture name to search for
428 @type session: Session
429 @param session: Optional SQL session object (a temporary one will be
430 generated if not supplied)
433 @return: list of Suite objects for the given name (may be empty)
436 return get_architecture(architecture, session).suites
438 __all__.append('get_architecture_suites')
440 ################################################################################
442 class Archive(object):
443 def __init__(self, *args, **kwargs):
447 return '<Archive %s>' % self.archive_name
449 __all__.append('Archive')
452 def get_archive(archive, session=None):
454 returns database id for given C{archive}.
456 @type archive: string
457 @param archive: the name of the arhive
459 @type session: Session
460 @param session: Optional SQLA session object (a temporary one will be
461 generated if not supplied)
464 @return: Archive object for the given name (None if not present)
467 archive = archive.lower()
469 q = session.query(Archive).filter_by(archive_name=archive)
473 except NoResultFound:
476 __all__.append('get_archive')
478 ################################################################################
480 class BinContents(ORMObject):
481 def __init__(self, file = None, binary = None):
485 def properties(self):
486 return ['file', 'binary']
488 __all__.append('BinContents')
490 ################################################################################
492 def subprocess_setup():
493 # Python installs a SIGPIPE handler by default. This is usually not what
494 # non-Python subprocesses expect.
495 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
497 class DBBinary(ORMObject):
498 def __init__(self, package = None, source = None, version = None, \
499 maintainer = None, architecture = None, poolfile = None, \
501 self.package = package
503 self.version = version
504 self.maintainer = maintainer
505 self.architecture = architecture
506 self.poolfile = poolfile
507 self.binarytype = binarytype
511 return self.binary_id
513 def properties(self):
514 return ['package', 'version', 'maintainer', 'source', 'architecture', \
515 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
516 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
518 def not_null_constraints(self):
519 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
522 metadata = association_proxy('key', 'value')
524 def get_component_name(self):
525 return self.poolfile.location.component.component_name
527 def scan_contents(self):
529 Yields the contents of the package. Only regular files are yielded and
530 the path names are normalized after converting them from either utf-8
531 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
532 package does not contain any regular file.
534 fullpath = self.poolfile.fullpath
535 dpkg = Popen(['dpkg-deb', '--fsys-tarfile', fullpath], stdout = PIPE,
536 preexec_fn = subprocess_setup)
537 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
538 for member in tar.getmembers():
539 if not member.isdir():
540 name = normpath(member.name)
541 # enforce proper utf-8 encoding
544 except UnicodeDecodeError:
545 name = name.decode('iso8859-1').encode('utf-8')
551 def read_control(self):
553 Reads the control information from a binary.
556 @return: stanza text of the control section.
559 fullpath = self.poolfile.fullpath
560 deb_file = open(fullpath, 'r')
561 stanza = apt_inst.debExtractControl(deb_file)
566 def read_control_fields(self):
568 Reads the control information from a binary and return
572 @return: fields of the control section as a dictionary.
575 stanza = self.read_control()
576 return apt_pkg.TagSection(stanza)
578 __all__.append('DBBinary')
581 def get_suites_binary_in(package, session=None):
583 Returns list of Suite objects which given C{package} name is in
586 @param package: DBBinary package name to search for
589 @return: list of Suite objects for the given package
592 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
594 __all__.append('get_suites_binary_in')
597 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
599 Returns the component name of the newest binary package in suite_list or
600 None if no package is found. The result can be optionally filtered by a list
601 of architecture names.
604 @param package: DBBinary package name to search for
606 @type suite_list: list of str
607 @param suite_list: list of suite_name items
609 @type arch_list: list of str
610 @param arch_list: optional list of arch_string items that defaults to []
612 @rtype: str or NoneType
613 @return: name of component or None
616 q = session.query(DBBinary).filter_by(package = package). \
617 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
618 if len(arch_list) > 0:
619 q = q.join(DBBinary.architecture). \
620 filter(Architecture.arch_string.in_(arch_list))
621 binary = q.order_by(desc(DBBinary.version)).first()
625 return binary.get_component_name()
627 __all__.append('get_component_by_package_suite')
629 ################################################################################
631 class BinaryACL(object):
632 def __init__(self, *args, **kwargs):
636 return '<BinaryACL %s>' % self.binary_acl_id
638 __all__.append('BinaryACL')
640 ################################################################################
642 class BinaryACLMap(object):
643 def __init__(self, *args, **kwargs):
647 return '<BinaryACLMap %s>' % self.binary_acl_map_id
649 __all__.append('BinaryACLMap')
651 ################################################################################
656 ArchiveDir "%(archivepath)s";
657 OverrideDir "%(overridedir)s";
658 CacheDir "%(cachedir)s";
663 Packages::Compress ". bzip2 gzip";
664 Sources::Compress ". bzip2 gzip";
669 bindirectory "incoming"
674 BinOverride "override.sid.all3";
675 BinCacheDB "packages-accepted.db";
677 FileList "%(filelist)s";
680 Packages::Extensions ".deb .udeb";
683 bindirectory "incoming/"
686 BinOverride "override.sid.all3";
687 SrcOverride "override.sid.all3.src";
688 FileList "%(filelist)s";
692 class BuildQueue(object):
693 def __init__(self, *args, **kwargs):
697 return '<BuildQueue %s>' % self.queue_name
699 def write_metadata(self, starttime, force=False):
700 # Do we write out metafiles?
701 if not (force or self.generate_metadata):
704 session = DBConn().session().object_session(self)
706 fl_fd = fl_name = ac_fd = ac_name = None
708 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
709 startdir = os.getcwd()
712 # Grab files we want to include
713 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
714 newer += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
715 # Write file list with newer files
716 (fl_fd, fl_name) = mkstemp()
718 os.write(fl_fd, '%s\n' % n.fullpath)
723 # Write minimal apt.conf
724 # TODO: Remove hardcoding from template
725 (ac_fd, ac_name) = mkstemp()
726 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
728 'cachedir': cnf["Dir::Cache"],
729 'overridedir': cnf["Dir::Override"],
733 # Run apt-ftparchive generate
734 os.chdir(os.path.dirname(ac_name))
735 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
737 # Run apt-ftparchive release
738 # TODO: Eww - fix this
739 bname = os.path.basename(self.path)
743 # We have to remove the Release file otherwise it'll be included in the
746 os.unlink(os.path.join(bname, 'Release'))
750 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
752 # Crude hack with open and append, but this whole section is and should be redone.
753 if self.notautomatic:
754 release=open("Release", "a")
755 release.write("NotAutomatic: yes\n")
760 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
761 if cnf.has_key("Dinstall::SigningPubKeyring"):
762 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
764 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
766 # Move the files if we got this far
767 os.rename('Release', os.path.join(bname, 'Release'))
769 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
771 # Clean up any left behind files
798 def clean_and_update(self, starttime, Logger, dryrun=False):
799 """WARNING: This routine commits for you"""
800 session = DBConn().session().object_session(self)
802 if self.generate_metadata and not dryrun:
803 self.write_metadata(starttime)
805 # Grab files older than our execution time
806 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
807 older += session.query(BuildQueuePolicyFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueuePolicyFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
813 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
815 Logger.log(["I: Removing %s from the queue" % o.fullpath])
816 os.unlink(o.fullpath)
819 # If it wasn't there, don't worry
820 if e.errno == ENOENT:
823 # TODO: Replace with proper logging call
824 Logger.log(["E: Could not remove %s" % o.fullpath])
831 for f in os.listdir(self.path):
832 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
835 if not self.contains_filename(f):
836 fp = os.path.join(self.path, f)
838 Logger.log(["I: Would remove unused link %s" % fp])
840 Logger.log(["I: Removing unused link %s" % fp])
844 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
846 def contains_filename(self, filename):
849 @returns True if filename is supposed to be in the queue; False otherwise
851 session = DBConn().session().object_session(self)
852 if session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id, filename = filename).count() > 0:
854 elif session.query(BuildQueuePolicyFile).filter_by(build_queue = self, filename = filename).count() > 0:
858 def add_file_from_pool(self, poolfile):
859 """Copies a file into the pool. Assumes that the PoolFile object is
860 attached to the same SQLAlchemy session as the Queue object is.
862 The caller is responsible for committing after calling this function."""
863 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
865 # Check if we have a file of this name or this ID already
866 for f in self.queuefiles:
867 if (f.fileid is not None and f.fileid == poolfile.file_id) or \
868 (f.poolfile is not None and f.poolfile.filename == poolfile_basename):
869 # In this case, update the BuildQueueFile entry so we
870 # don't remove it too early
871 f.lastused = datetime.now()
872 DBConn().session().object_session(poolfile).add(f)
875 # Prepare BuildQueueFile object
876 qf = BuildQueueFile()
877 qf.build_queue_id = self.queue_id
878 qf.lastused = datetime.now()
879 qf.filename = poolfile_basename
881 targetpath = poolfile.fullpath
882 queuepath = os.path.join(self.path, poolfile_basename)
886 # We need to copy instead of symlink
888 utils.copy(targetpath, queuepath)
889 # NULL in the fileid field implies a copy
892 os.symlink(targetpath, queuepath)
893 qf.fileid = poolfile.file_id
894 except FileExistsError:
895 if not poolfile.identical_to(queuepath):
900 # Get the same session as the PoolFile is using and add the qf to it
901 DBConn().session().object_session(poolfile).add(qf)
905 def add_changes_from_policy_queue(self, policyqueue, changes):
907 Copies a changes from a policy queue together with its poolfiles.
909 @type policyqueue: PolicyQueue
910 @param policyqueue: policy queue to copy the changes from
912 @type changes: DBChange
913 @param changes: changes to copy to this build queue
915 for policyqueuefile in changes.files:
916 self.add_file_from_policy_queue(policyqueue, policyqueuefile)
917 for poolfile in changes.poolfiles:
918 self.add_file_from_pool(poolfile)
920 def add_file_from_policy_queue(self, policyqueue, policyqueuefile):
922 Copies a file from a policy queue.
923 Assumes that the policyqueuefile is attached to the same SQLAlchemy
924 session as the Queue object is. The caller is responsible for
925 committing after calling this function.
927 @type policyqueue: PolicyQueue
928 @param policyqueue: policy queue to copy the file from
930 @type policyqueuefile: ChangePendingFile
931 @param policyqueuefile: file to be added to the build queue
933 session = DBConn().session().object_session(policyqueuefile)
935 # Is the file already there?
937 f = session.query(BuildQueuePolicyFile).filter_by(build_queue=self, file=policyqueuefile).one()
938 f.lastused = datetime.now()
940 except NoResultFound:
941 pass # continue below
943 # We have to add the file.
944 f = BuildQueuePolicyFile()
946 f.file = policyqueuefile
947 f.filename = policyqueuefile.filename
949 source = os.path.join(policyqueue.path, policyqueuefile.filename)
952 # Always copy files from policy queues as they might move around.
954 utils.copy(source, target)
955 except FileExistsError:
956 if not policyqueuefile.identical_to(target):
964 __all__.append('BuildQueue')
967 def get_build_queue(queuename, session=None):
969 Returns BuildQueue object for given C{queue name}, creating it if it does not
972 @type queuename: string
973 @param queuename: The name of the queue
975 @type session: Session
976 @param session: Optional SQLA session object (a temporary one will be
977 generated if not supplied)
980 @return: BuildQueue object for the given queue
983 q = session.query(BuildQueue).filter_by(queue_name=queuename)
987 except NoResultFound:
990 __all__.append('get_build_queue')
992 ################################################################################
994 class BuildQueueFile(object):
996 BuildQueueFile represents a file in a build queue coming from a pool.
999 def __init__(self, *args, **kwargs):
1003 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
1007 return os.path.join(self.buildqueue.path, self.filename)
1010 __all__.append('BuildQueueFile')
1012 ################################################################################
1014 class BuildQueuePolicyFile(object):
1016 BuildQueuePolicyFile represents a file in a build queue that comes from a
1017 policy queue (and not a pool).
1020 def __init__(self, *args, **kwargs):
1024 #def filename(self):
1025 # return self.file.filename
1029 return os.path.join(self.build_queue.path, self.filename)
1031 __all__.append('BuildQueuePolicyFile')
1033 ################################################################################
1035 class ChangePendingBinary(object):
1036 def __init__(self, *args, **kwargs):
1040 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
1042 __all__.append('ChangePendingBinary')
1044 ################################################################################
1046 class ChangePendingFile(object):
1047 def __init__(self, *args, **kwargs):
1051 return '<ChangePendingFile %s>' % self.change_pending_file_id
1053 def identical_to(self, filename):
1055 compare size and hash with the given file
1058 @return: true if the given file has the same size and hash as this object; false otherwise
1060 st = os.stat(filename)
1061 if self.size != st.st_size:
1064 f = open(filename, "r")
1065 sha256sum = apt_pkg.sha256sum(f)
1066 if sha256sum != self.sha256sum:
1071 __all__.append('ChangePendingFile')
1073 ################################################################################
1075 class ChangePendingSource(object):
1076 def __init__(self, *args, **kwargs):
1080 return '<ChangePendingSource %s>' % self.change_pending_source_id
1082 __all__.append('ChangePendingSource')
1084 ################################################################################
1086 class Component(ORMObject):
1087 def __init__(self, component_name = None):
1088 self.component_name = component_name
1090 def __eq__(self, val):
1091 if isinstance(val, str):
1092 return (self.component_name == val)
1093 # This signals to use the normal comparison operator
1094 return NotImplemented
1096 def __ne__(self, val):
1097 if isinstance(val, str):
1098 return (self.component_name != val)
1099 # This signals to use the normal comparison operator
1100 return NotImplemented
1102 def properties(self):
1103 return ['component_name', 'component_id', 'description', \
1104 'location_count', 'meets_dfsg', 'overrides_count']
1106 def not_null_constraints(self):
1107 return ['component_name']
1110 __all__.append('Component')
1113 def get_component(component, session=None):
1115 Returns database id for given C{component}.
1117 @type component: string
1118 @param component: The name of the override type
1121 @return: the database id for the given component
1124 component = component.lower()
1126 q = session.query(Component).filter_by(component_name=component)
1130 except NoResultFound:
1133 __all__.append('get_component')
1135 ################################################################################
1137 class DBConfig(object):
1138 def __init__(self, *args, **kwargs):
1142 return '<DBConfig %s>' % self.name
1144 __all__.append('DBConfig')
1146 ################################################################################
1149 def get_or_set_contents_file_id(filename, session=None):
1151 Returns database id for given filename.
1153 If no matching file is found, a row is inserted.
1155 @type filename: string
1156 @param filename: The filename
1157 @type session: SQLAlchemy
1158 @param session: Optional SQL session object (a temporary one will be
1159 generated if not supplied). If not passed, a commit will be performed at
1160 the end of the function, otherwise the caller is responsible for commiting.
1163 @return: the database id for the given component
1166 q = session.query(ContentFilename).filter_by(filename=filename)
1169 ret = q.one().cafilename_id
1170 except NoResultFound:
1171 cf = ContentFilename()
1172 cf.filename = filename
1174 session.commit_or_flush()
1175 ret = cf.cafilename_id
1179 __all__.append('get_or_set_contents_file_id')
1182 def get_contents(suite, overridetype, section=None, session=None):
1184 Returns contents for a suite / overridetype combination, limiting
1185 to a section if not None.
1188 @param suite: Suite object
1190 @type overridetype: OverrideType
1191 @param overridetype: OverrideType object
1193 @type section: Section
1194 @param section: Optional section object to limit results to
1196 @type session: SQLAlchemy
1197 @param session: Optional SQL session object (a temporary one will be
1198 generated if not supplied)
1200 @rtype: ResultsProxy
1201 @return: ResultsProxy object set up to return tuples of (filename, section,
1205 # find me all of the contents for a given suite
1206 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1210 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1211 JOIN content_file_names n ON (c.filename=n.id)
1212 JOIN binaries b ON (b.id=c.binary_pkg)
1213 JOIN override o ON (o.package=b.package)
1214 JOIN section s ON (s.id=o.section)
1215 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1216 AND b.type=:overridetypename"""
1218 vals = {'suiteid': suite.suite_id,
1219 'overridetypeid': overridetype.overridetype_id,
1220 'overridetypename': overridetype.overridetype}
1222 if section is not None:
1223 contents_q += " AND s.id = :sectionid"
1224 vals['sectionid'] = section.section_id
1226 contents_q += " ORDER BY fn"
1228 return session.execute(contents_q, vals)
1230 __all__.append('get_contents')
1232 ################################################################################
1234 class ContentFilepath(object):
1235 def __init__(self, *args, **kwargs):
1239 return '<ContentFilepath %s>' % self.filepath
1241 __all__.append('ContentFilepath')
1244 def get_or_set_contents_path_id(filepath, session=None):
1246 Returns database id for given path.
1248 If no matching file is found, a row is inserted.
1250 @type filepath: string
1251 @param filepath: The filepath
1253 @type session: SQLAlchemy
1254 @param session: Optional SQL session object (a temporary one will be
1255 generated if not supplied). If not passed, a commit will be performed at
1256 the end of the function, otherwise the caller is responsible for commiting.
1259 @return: the database id for the given path
1262 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1265 ret = q.one().cafilepath_id
1266 except NoResultFound:
1267 cf = ContentFilepath()
1268 cf.filepath = filepath
1270 session.commit_or_flush()
1271 ret = cf.cafilepath_id
1275 __all__.append('get_or_set_contents_path_id')
1277 ################################################################################
1279 class ContentAssociation(object):
1280 def __init__(self, *args, **kwargs):
1284 return '<ContentAssociation %s>' % self.ca_id
1286 __all__.append('ContentAssociation')
1288 def insert_content_paths(binary_id, fullpaths, session=None):
1290 Make sure given path is associated with given binary id
1292 @type binary_id: int
1293 @param binary_id: the id of the binary
1294 @type fullpaths: list
1295 @param fullpaths: the list of paths of the file being associated with the binary
1296 @type session: SQLAlchemy session
1297 @param session: Optional SQLAlchemy session. If this is passed, the caller
1298 is responsible for ensuring a transaction has begun and committing the
1299 results or rolling back based on the result code. If not passed, a commit
1300 will be performed at the end of the function, otherwise the caller is
1301 responsible for commiting.
1303 @return: True upon success
1306 privatetrans = False
1308 session = DBConn().session()
1313 def generate_path_dicts():
1314 for fullpath in fullpaths:
1315 if fullpath.startswith( './' ):
1316 fullpath = fullpath[2:]
1318 yield {'filename':fullpath, 'id': binary_id }
1320 for d in generate_path_dicts():
1321 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1330 traceback.print_exc()
1332 # Only rollback if we set up the session ourself
1339 __all__.append('insert_content_paths')
1341 ################################################################################
1343 class DSCFile(object):
1344 def __init__(self, *args, **kwargs):
1348 return '<DSCFile %s>' % self.dscfile_id
1350 __all__.append('DSCFile')
1353 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1355 Returns a list of DSCFiles which may be empty
1357 @type dscfile_id: int (optional)
1358 @param dscfile_id: the dscfile_id of the DSCFiles to find
1360 @type source_id: int (optional)
1361 @param source_id: the source id related to the DSCFiles to find
1363 @type poolfile_id: int (optional)
1364 @param poolfile_id: the poolfile id related to the DSCFiles to find
1367 @return: Possibly empty list of DSCFiles
1370 q = session.query(DSCFile)
1372 if dscfile_id is not None:
1373 q = q.filter_by(dscfile_id=dscfile_id)
1375 if source_id is not None:
1376 q = q.filter_by(source_id=source_id)
1378 if poolfile_id is not None:
1379 q = q.filter_by(poolfile_id=poolfile_id)
1383 __all__.append('get_dscfiles')
1385 ################################################################################
1387 class ExternalOverride(ORMObject):
1388 def __init__(self, *args, **kwargs):
1392 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
1394 __all__.append('ExternalOverride')
1396 ################################################################################
1398 class PoolFile(ORMObject):
1399 def __init__(self, filename = None, location = None, filesize = -1, \
1401 self.filename = filename
1402 self.location = location
1403 self.filesize = filesize
1404 self.md5sum = md5sum
1408 return os.path.join(self.location.path, self.filename)
1410 def is_valid(self, filesize = -1, md5sum = None):
1411 return self.filesize == long(filesize) and self.md5sum == md5sum
1413 def properties(self):
1414 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1415 'sha256sum', 'location', 'source', 'binary', 'last_used']
1417 def not_null_constraints(self):
1418 return ['filename', 'md5sum', 'location']
1420 def identical_to(self, filename):
1422 compare size and hash with the given file
1425 @return: true if the given file has the same size and hash as this object; false otherwise
1427 st = os.stat(filename)
1428 if self.filesize != st.st_size:
1431 f = open(filename, "r")
1432 sha256sum = apt_pkg.sha256sum(f)
1433 if sha256sum != self.sha256sum:
1438 __all__.append('PoolFile')
1441 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1444 (ValidFileFound [boolean], PoolFile object or None)
1446 @type filename: string
1447 @param filename: the filename of the file to check against the DB
1450 @param filesize: the size of the file to check against the DB
1452 @type md5sum: string
1453 @param md5sum: the md5sum of the file to check against the DB
1455 @type location_id: int
1456 @param location_id: the id of the location to look in
1459 @return: Tuple of length 2.
1460 - If valid pool file found: (C{True}, C{PoolFile object})
1461 - If valid pool file not found:
1462 - (C{False}, C{None}) if no file found
1463 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1466 poolfile = session.query(Location).get(location_id). \
1467 files.filter_by(filename=filename).first()
1469 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1472 return (valid, poolfile)
1474 __all__.append('check_poolfile')
1476 # TODO: the implementation can trivially be inlined at the place where the
1477 # function is called
1479 def get_poolfile_by_id(file_id, session=None):
1481 Returns a PoolFile objects or None for the given id
1484 @param file_id: the id of the file to look for
1486 @rtype: PoolFile or None
1487 @return: either the PoolFile object or None
1490 return session.query(PoolFile).get(file_id)
1492 __all__.append('get_poolfile_by_id')
1495 def get_poolfile_like_name(filename, session=None):
1497 Returns an array of PoolFile objects which are like the given name
1499 @type filename: string
1500 @param filename: the filename of the file to check against the DB
1503 @return: array of PoolFile objects
1506 # TODO: There must be a way of properly using bind parameters with %FOO%
1507 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1511 __all__.append('get_poolfile_like_name')
1514 def add_poolfile(filename, datadict, location_id, session=None):
1516 Add a new file to the pool
1518 @type filename: string
1519 @param filename: filename
1521 @type datadict: dict
1522 @param datadict: dict with needed data
1524 @type location_id: int
1525 @param location_id: database id of the location
1528 @return: the PoolFile object created
1530 poolfile = PoolFile()
1531 poolfile.filename = filename
1532 poolfile.filesize = datadict["size"]
1533 poolfile.md5sum = datadict["md5sum"]
1534 poolfile.sha1sum = datadict["sha1sum"]
1535 poolfile.sha256sum = datadict["sha256sum"]
1536 poolfile.location_id = location_id
1538 session.add(poolfile)
1539 # Flush to get a file id (NB: This is not a commit)
1544 __all__.append('add_poolfile')
1546 ################################################################################
1548 class Fingerprint(ORMObject):
1549 def __init__(self, fingerprint = None):
1550 self.fingerprint = fingerprint
1552 def properties(self):
1553 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1556 def not_null_constraints(self):
1557 return ['fingerprint']
1559 __all__.append('Fingerprint')
1562 def get_fingerprint(fpr, session=None):
1564 Returns Fingerprint object for given fpr.
1567 @param fpr: The fpr to find / add
1569 @type session: SQLAlchemy
1570 @param session: Optional SQL session object (a temporary one will be
1571 generated if not supplied).
1574 @return: the Fingerprint object for the given fpr or None
1577 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1581 except NoResultFound:
1586 __all__.append('get_fingerprint')
1589 def get_or_set_fingerprint(fpr, session=None):
1591 Returns Fingerprint object for given fpr.
1593 If no matching fpr is found, a row is inserted.
1596 @param fpr: The fpr to find / add
1598 @type session: SQLAlchemy
1599 @param session: Optional SQL session object (a temporary one will be
1600 generated if not supplied). If not passed, a commit will be performed at
1601 the end of the function, otherwise the caller is responsible for commiting.
1602 A flush will be performed either way.
1605 @return: the Fingerprint object for the given fpr
1608 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1612 except NoResultFound:
1613 fingerprint = Fingerprint()
1614 fingerprint.fingerprint = fpr
1615 session.add(fingerprint)
1616 session.commit_or_flush()
1621 __all__.append('get_or_set_fingerprint')
1623 ################################################################################
1625 # Helper routine for Keyring class
1626 def get_ldap_name(entry):
1628 for k in ["cn", "mn", "sn"]:
1630 if ret and ret[0] != "" and ret[0] != "-":
1632 return " ".join(name)
1634 ################################################################################
1636 class Keyring(object):
1637 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1638 " --with-colons --fingerprint --fingerprint"
1643 def __init__(self, *args, **kwargs):
1647 return '<Keyring %s>' % self.keyring_name
1649 def de_escape_gpg_str(self, txt):
1650 esclist = re.split(r'(\\x..)', txt)
1651 for x in range(1,len(esclist),2):
1652 esclist[x] = "%c" % (int(esclist[x][2:],16))
1653 return "".join(esclist)
1655 def parse_address(self, uid):
1656 """parses uid and returns a tuple of real name and email address"""
1658 (name, address) = email.Utils.parseaddr(uid)
1659 name = re.sub(r"\s*[(].*[)]", "", name)
1660 name = self.de_escape_gpg_str(name)
1663 return (name, address)
1665 def load_keys(self, keyring):
1666 if not self.keyring_id:
1667 raise Exception('Must be initialized with database information')
1669 k = os.popen(self.gpg_invocation % keyring, "r")
1673 for line in k.xreadlines():
1674 field = line.split(":")
1675 if field[0] == "pub":
1678 (name, addr) = self.parse_address(field[9])
1680 self.keys[key]["email"] = addr
1681 self.keys[key]["name"] = name
1682 self.keys[key]["fingerprints"] = []
1684 elif key and field[0] == "sub" and len(field) >= 12:
1685 signingkey = ("s" in field[11])
1686 elif key and field[0] == "uid":
1687 (name, addr) = self.parse_address(field[9])
1688 if "email" not in self.keys[key] and "@" in addr:
1689 self.keys[key]["email"] = addr
1690 self.keys[key]["name"] = name
1691 elif signingkey and field[0] == "fpr":
1692 self.keys[key]["fingerprints"].append(field[9])
1693 self.fpr_lookup[field[9]] = key
1695 def import_users_from_ldap(self, session):
1699 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1700 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1702 l = ldap.open(LDAPServer)
1703 l.simple_bind_s("","")
1704 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1705 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1706 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1708 ldap_fin_uid_id = {}
1715 uid = entry["uid"][0]
1716 name = get_ldap_name(entry)
1717 fingerprints = entry["keyFingerPrint"]
1719 for f in fingerprints:
1720 key = self.fpr_lookup.get(f, None)
1721 if key not in self.keys:
1723 self.keys[key]["uid"] = uid
1727 keyid = get_or_set_uid(uid, session).uid_id
1728 byuid[keyid] = (uid, name)
1729 byname[uid] = (keyid, name)
1731 return (byname, byuid)
1733 def generate_users_from_keyring(self, format, session):
1737 for x in self.keys.keys():
1738 if "email" not in self.keys[x]:
1740 self.keys[x]["uid"] = format % "invalid-uid"
1742 uid = format % self.keys[x]["email"]
1743 keyid = get_or_set_uid(uid, session).uid_id
1744 byuid[keyid] = (uid, self.keys[x]["name"])
1745 byname[uid] = (keyid, self.keys[x]["name"])
1746 self.keys[x]["uid"] = uid
1749 uid = format % "invalid-uid"
1750 keyid = get_or_set_uid(uid, session).uid_id
1751 byuid[keyid] = (uid, "ungeneratable user id")
1752 byname[uid] = (keyid, "ungeneratable user id")
1754 return (byname, byuid)
1756 __all__.append('Keyring')
1759 def get_keyring(keyring, session=None):
1761 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1762 If C{keyring} already has an entry, simply return the existing Keyring
1764 @type keyring: string
1765 @param keyring: the keyring name
1768 @return: the Keyring object for this keyring
1771 q = session.query(Keyring).filter_by(keyring_name=keyring)
1775 except NoResultFound:
1778 __all__.append('get_keyring')
1781 def get_active_keyring_paths(session=None):
1784 @return: list of active keyring paths
1786 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1788 __all__.append('get_active_keyring_paths')
1791 def get_primary_keyring_path(session=None):
1793 Get the full path to the highest priority active keyring
1796 @return: path to the active keyring with the highest priority or None if no
1797 keyring is configured
1799 keyrings = get_active_keyring_paths()
1801 if len(keyrings) > 0:
1806 __all__.append('get_primary_keyring_path')
1808 ################################################################################
1810 class KeyringACLMap(object):
1811 def __init__(self, *args, **kwargs):
1815 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1817 __all__.append('KeyringACLMap')
1819 ################################################################################
1821 class DBChange(object):
1822 def __init__(self, *args, **kwargs):
1826 return '<DBChange %s>' % self.changesname
1828 def clean_from_queue(self):
1829 session = DBConn().session().object_session(self)
1831 # Remove changes_pool_files entries
1834 # Remove changes_pending_files references
1837 # Clear out of queue
1838 self.in_queue = None
1839 self.approved_for_id = None
1841 __all__.append('DBChange')
1844 def get_dbchange(filename, session=None):
1846 returns DBChange object for given C{filename}.
1848 @type filename: string
1849 @param filename: the name of the file
1851 @type session: Session
1852 @param session: Optional SQLA session object (a temporary one will be
1853 generated if not supplied)
1856 @return: DBChange object for the given filename (C{None} if not present)
1859 q = session.query(DBChange).filter_by(changesname=filename)
1863 except NoResultFound:
1866 __all__.append('get_dbchange')
1868 ################################################################################
1870 class Location(ORMObject):
1871 def __init__(self, path = None, component = None):
1873 self.component = component
1874 # the column 'type' should go away, see comment at mapper
1875 self.archive_type = 'pool'
1877 def properties(self):
1878 return ['path', 'location_id', 'archive_type', 'component', \
1881 def not_null_constraints(self):
1882 return ['path', 'archive_type']
1884 __all__.append('Location')
1887 def get_location(location, component=None, archive=None, session=None):
1889 Returns Location object for the given combination of location, component
1892 @type location: string
1893 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1895 @type component: string
1896 @param component: the component name (if None, no restriction applied)
1898 @type archive: string
1899 @param archive: the archive name (if None, no restriction applied)
1901 @rtype: Location / None
1902 @return: Either a Location object or None if one can't be found
1905 q = session.query(Location).filter_by(path=location)
1907 if archive is not None:
1908 q = q.join(Archive).filter_by(archive_name=archive)
1910 if component is not None:
1911 q = q.join(Component).filter_by(component_name=component)
1915 except NoResultFound:
1918 __all__.append('get_location')
1920 ################################################################################
1922 class Maintainer(ORMObject):
1923 def __init__(self, name = None):
1926 def properties(self):
1927 return ['name', 'maintainer_id']
1929 def not_null_constraints(self):
1932 def get_split_maintainer(self):
1933 if not hasattr(self, 'name') or self.name is None:
1934 return ('', '', '', '')
1936 return fix_maintainer(self.name.strip())
1938 __all__.append('Maintainer')
1941 def get_or_set_maintainer(name, session=None):
1943 Returns Maintainer object for given maintainer name.
1945 If no matching maintainer name is found, a row is inserted.
1948 @param name: The maintainer name to add
1950 @type session: SQLAlchemy
1951 @param session: Optional SQL session object (a temporary one will be
1952 generated if not supplied). If not passed, a commit will be performed at
1953 the end of the function, otherwise the caller is responsible for commiting.
1954 A flush will be performed either way.
1957 @return: the Maintainer object for the given maintainer
1960 q = session.query(Maintainer).filter_by(name=name)
1963 except NoResultFound:
1964 maintainer = Maintainer()
1965 maintainer.name = name
1966 session.add(maintainer)
1967 session.commit_or_flush()
1972 __all__.append('get_or_set_maintainer')
1975 def get_maintainer(maintainer_id, session=None):
1977 Return the name of the maintainer behind C{maintainer_id} or None if that
1978 maintainer_id is invalid.
1980 @type maintainer_id: int
1981 @param maintainer_id: the id of the maintainer
1984 @return: the Maintainer with this C{maintainer_id}
1987 return session.query(Maintainer).get(maintainer_id)
1989 __all__.append('get_maintainer')
1991 ################################################################################
1993 class NewComment(object):
1994 def __init__(self, *args, **kwargs):
1998 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
2000 __all__.append('NewComment')
2003 def has_new_comment(package, version, session=None):
2005 Returns true if the given combination of C{package}, C{version} has a comment.
2007 @type package: string
2008 @param package: name of the package
2010 @type version: string
2011 @param version: package version
2013 @type session: Session
2014 @param session: Optional SQLA session object (a temporary one will be
2015 generated if not supplied)
2021 q = session.query(NewComment)
2022 q = q.filter_by(package=package)
2023 q = q.filter_by(version=version)
2025 return bool(q.count() > 0)
2027 __all__.append('has_new_comment')
2030 def get_new_comments(package=None, version=None, comment_id=None, session=None):
2032 Returns (possibly empty) list of NewComment objects for the given
2035 @type package: string (optional)
2036 @param package: name of the package
2038 @type version: string (optional)
2039 @param version: package version
2041 @type comment_id: int (optional)
2042 @param comment_id: An id of a comment
2044 @type session: Session
2045 @param session: Optional SQLA session object (a temporary one will be
2046 generated if not supplied)
2049 @return: A (possibly empty) list of NewComment objects will be returned
2052 q = session.query(NewComment)
2053 if package is not None: q = q.filter_by(package=package)
2054 if version is not None: q = q.filter_by(version=version)
2055 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
2059 __all__.append('get_new_comments')
2061 ################################################################################
2063 class Override(ORMObject):
2064 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
2065 section = None, priority = None):
2066 self.package = package
2068 self.component = component
2069 self.overridetype = overridetype
2070 self.section = section
2071 self.priority = priority
2073 def properties(self):
2074 return ['package', 'suite', 'component', 'overridetype', 'section', \
2077 def not_null_constraints(self):
2078 return ['package', 'suite', 'component', 'overridetype', 'section']
2080 __all__.append('Override')
2083 def get_override(package, suite=None, component=None, overridetype=None, session=None):
2085 Returns Override object for the given parameters
2087 @type package: string
2088 @param package: The name of the package
2090 @type suite: string, list or None
2091 @param suite: The name of the suite (or suites if a list) to limit to. If
2092 None, don't limit. Defaults to None.
2094 @type component: string, list or None
2095 @param component: The name of the component (or components if a list) to
2096 limit to. If None, don't limit. Defaults to None.
2098 @type overridetype: string, list or None
2099 @param overridetype: The name of the overridetype (or overridetypes if a list) to
2100 limit to. If None, don't limit. Defaults to None.
2102 @type session: Session
2103 @param session: Optional SQLA session object (a temporary one will be
2104 generated if not supplied)
2107 @return: A (possibly empty) list of Override objects will be returned
2110 q = session.query(Override)
2111 q = q.filter_by(package=package)
2113 if suite is not None:
2114 if not isinstance(suite, list): suite = [suite]
2115 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
2117 if component is not None:
2118 if not isinstance(component, list): component = [component]
2119 q = q.join(Component).filter(Component.component_name.in_(component))
2121 if overridetype is not None:
2122 if not isinstance(overridetype, list): overridetype = [overridetype]
2123 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
2127 __all__.append('get_override')
2130 ################################################################################
2132 class OverrideType(ORMObject):
2133 def __init__(self, overridetype = None):
2134 self.overridetype = overridetype
2136 def properties(self):
2137 return ['overridetype', 'overridetype_id', 'overrides_count']
2139 def not_null_constraints(self):
2140 return ['overridetype']
2142 __all__.append('OverrideType')
2145 def get_override_type(override_type, session=None):
2147 Returns OverrideType object for given C{override type}.
2149 @type override_type: string
2150 @param override_type: The name of the override type
2152 @type session: Session
2153 @param session: Optional SQLA session object (a temporary one will be
2154 generated if not supplied)
2157 @return: the database id for the given override type
2160 q = session.query(OverrideType).filter_by(overridetype=override_type)
2164 except NoResultFound:
2167 __all__.append('get_override_type')
2169 ################################################################################
2171 class PolicyQueue(object):
2172 def __init__(self, *args, **kwargs):
2176 return '<PolicyQueue %s>' % self.queue_name
2178 __all__.append('PolicyQueue')
2181 def get_policy_queue(queuename, session=None):
2183 Returns PolicyQueue object for given C{queue name}
2185 @type queuename: string
2186 @param queuename: The name of the queue
2188 @type session: Session
2189 @param session: Optional SQLA session object (a temporary one will be
2190 generated if not supplied)
2193 @return: PolicyQueue object for the given queue
2196 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2200 except NoResultFound:
2203 __all__.append('get_policy_queue')
2206 def get_policy_queue_from_path(pathname, session=None):
2208 Returns PolicyQueue object for given C{path name}
2210 @type queuename: string
2211 @param queuename: The path
2213 @type session: Session
2214 @param session: Optional SQLA session object (a temporary one will be
2215 generated if not supplied)
2218 @return: PolicyQueue object for the given queue
2221 q = session.query(PolicyQueue).filter_by(path=pathname)
2225 except NoResultFound:
2228 __all__.append('get_policy_queue_from_path')
2230 ################################################################################
2232 class Priority(ORMObject):
2233 def __init__(self, priority = None, level = None):
2234 self.priority = priority
2237 def properties(self):
2238 return ['priority', 'priority_id', 'level', 'overrides_count']
2240 def not_null_constraints(self):
2241 return ['priority', 'level']
2243 def __eq__(self, val):
2244 if isinstance(val, str):
2245 return (self.priority == val)
2246 # This signals to use the normal comparison operator
2247 return NotImplemented
2249 def __ne__(self, val):
2250 if isinstance(val, str):
2251 return (self.priority != val)
2252 # This signals to use the normal comparison operator
2253 return NotImplemented
2255 __all__.append('Priority')
2258 def get_priority(priority, session=None):
2260 Returns Priority object for given C{priority name}.
2262 @type priority: string
2263 @param priority: The name of the priority
2265 @type session: Session
2266 @param session: Optional SQLA session object (a temporary one will be
2267 generated if not supplied)
2270 @return: Priority object for the given priority
2273 q = session.query(Priority).filter_by(priority=priority)
2277 except NoResultFound:
2280 __all__.append('get_priority')
2283 def get_priorities(session=None):
2285 Returns dictionary of priority names -> id mappings
2287 @type session: Session
2288 @param session: Optional SQL session object (a temporary one will be
2289 generated if not supplied)
2292 @return: dictionary of priority names -> id mappings
2296 q = session.query(Priority)
2298 ret[x.priority] = x.priority_id
2302 __all__.append('get_priorities')
2304 ################################################################################
2306 class Section(ORMObject):
2307 def __init__(self, section = None):
2308 self.section = section
2310 def properties(self):
2311 return ['section', 'section_id', 'overrides_count']
2313 def not_null_constraints(self):
2316 def __eq__(self, val):
2317 if isinstance(val, str):
2318 return (self.section == val)
2319 # This signals to use the normal comparison operator
2320 return NotImplemented
2322 def __ne__(self, val):
2323 if isinstance(val, str):
2324 return (self.section != val)
2325 # This signals to use the normal comparison operator
2326 return NotImplemented
2328 __all__.append('Section')
2331 def get_section(section, session=None):
2333 Returns Section object for given C{section name}.
2335 @type section: string
2336 @param section: The name of the section
2338 @type session: Session
2339 @param session: Optional SQLA session object (a temporary one will be
2340 generated if not supplied)
2343 @return: Section object for the given section name
2346 q = session.query(Section).filter_by(section=section)
2350 except NoResultFound:
2353 __all__.append('get_section')
2356 def get_sections(session=None):
2358 Returns dictionary of section names -> id mappings
2360 @type session: Session
2361 @param session: Optional SQL session object (a temporary one will be
2362 generated if not supplied)
2365 @return: dictionary of section names -> id mappings
2369 q = session.query(Section)
2371 ret[x.section] = x.section_id
2375 __all__.append('get_sections')
2377 ################################################################################
2379 class SrcContents(ORMObject):
2380 def __init__(self, file = None, source = None):
2382 self.source = source
2384 def properties(self):
2385 return ['file', 'source']
2387 __all__.append('SrcContents')
2389 ################################################################################
2391 from debian.debfile import Deb822
2393 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
2394 class Dak822(Deb822):
2395 def _internal_parser(self, sequence, fields=None):
2396 # The key is non-whitespace, non-colon characters before any colon.
2397 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
2398 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
2399 multi = re.compile(key_part + r"$")
2400 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
2402 wanted_field = lambda f: fields is None or f in fields
2404 if isinstance(sequence, basestring):
2405 sequence = sequence.splitlines()
2409 for line in self.gpg_stripped_paragraph(sequence):
2410 m = single.match(line)
2413 self[curkey] = content
2415 if not wanted_field(m.group('key')):
2419 curkey = m.group('key')
2420 content = m.group('data')
2423 m = multi.match(line)
2426 self[curkey] = content
2428 if not wanted_field(m.group('key')):
2432 curkey = m.group('key')
2436 m = multidata.match(line)
2438 content += '\n' + line # XXX not m.group('data')?
2442 self[curkey] = content
2445 class DBSource(ORMObject):
2446 def __init__(self, source = None, version = None, maintainer = None, \
2447 changedby = None, poolfile = None, install_date = None):
2448 self.source = source
2449 self.version = version
2450 self.maintainer = maintainer
2451 self.changedby = changedby
2452 self.poolfile = poolfile
2453 self.install_date = install_date
2457 return self.source_id
2459 def properties(self):
2460 return ['source', 'source_id', 'maintainer', 'changedby', \
2461 'fingerprint', 'poolfile', 'version', 'suites_count', \
2462 'install_date', 'binaries_count', 'uploaders_count']
2464 def not_null_constraints(self):
2465 return ['source', 'version', 'install_date', 'maintainer', \
2466 'changedby', 'poolfile', 'install_date']
2468 def read_control_fields(self):
2470 Reads the control information from a dsc
2473 @return: fields is the dsc information in a dictionary form
2475 fullpath = self.poolfile.fullpath
2476 fields = Dak822(open(self.poolfile.fullpath, 'r'))
2479 metadata = association_proxy('key', 'value')
2481 def scan_contents(self):
2483 Returns a set of names for non directories. The path names are
2484 normalized after converting them from either utf-8 or iso8859-1
2487 fullpath = self.poolfile.fullpath
2488 from daklib.contents import UnpackedSource
2489 unpacked = UnpackedSource(fullpath)
2491 for name in unpacked.get_all_filenames():
2492 # enforce proper utf-8 encoding
2494 name.decode('utf-8')
2495 except UnicodeDecodeError:
2496 name = name.decode('iso8859-1').encode('utf-8')
2500 __all__.append('DBSource')
2503 def source_exists(source, source_version, suites = ["any"], session=None):
2505 Ensure that source exists somewhere in the archive for the binary
2506 upload being processed.
2507 1. exact match => 1.0-3
2508 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2510 @type source: string
2511 @param source: source name
2513 @type source_version: string
2514 @param source_version: expected source version
2517 @param suites: list of suites to check in, default I{any}
2519 @type session: Session
2520 @param session: Optional SQLA session object (a temporary one will be
2521 generated if not supplied)
2524 @return: returns 1 if a source with expected version is found, otherwise 0
2531 from daklib.regexes import re_bin_only_nmu
2532 orig_source_version = re_bin_only_nmu.sub('', source_version)
2534 for suite in suites:
2535 q = session.query(DBSource).filter_by(source=source). \
2536 filter(DBSource.version.in_([source_version, orig_source_version]))
2538 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2539 s = get_suite(suite, session)
2540 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2541 considered_suites = [ vc.reference for vc in enhances_vcs ]
2542 considered_suites.append(s)
2544 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2549 # No source found so return not ok
2554 __all__.append('source_exists')
2557 def get_suites_source_in(source, session=None):
2559 Returns list of Suite objects which given C{source} name is in
2562 @param source: DBSource package name to search for
2565 @return: list of Suite objects for the given source
2568 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2570 __all__.append('get_suites_source_in')
2573 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2575 Returns list of DBSource objects for given C{source} name and other parameters
2578 @param source: DBSource package name to search for
2580 @type version: str or None
2581 @param version: DBSource version name to search for or None if not applicable
2583 @type dm_upload_allowed: bool
2584 @param dm_upload_allowed: If None, no effect. If True or False, only
2585 return packages with that dm_upload_allowed setting
2587 @type session: Session
2588 @param session: Optional SQL session object (a temporary one will be
2589 generated if not supplied)
2592 @return: list of DBSource objects for the given name (may be empty)
2595 q = session.query(DBSource).filter_by(source=source)
2597 if version is not None:
2598 q = q.filter_by(version=version)
2600 if dm_upload_allowed is not None:
2601 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2605 __all__.append('get_sources_from_name')
2607 # FIXME: This function fails badly if it finds more than 1 source package and
2608 # its implementation is trivial enough to be inlined.
2610 def get_source_in_suite(source, suite, session=None):
2612 Returns a DBSource object for a combination of C{source} and C{suite}.
2614 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2615 - B{suite} - a suite name, eg. I{unstable}
2617 @type source: string
2618 @param source: source package name
2621 @param suite: the suite name
2624 @return: the version for I{source} in I{suite}
2628 q = get_suite(suite, session).get_sources(source)
2631 except NoResultFound:
2634 __all__.append('get_source_in_suite')
2637 def import_metadata_into_db(obj, session=None):
2639 This routine works on either DBBinary or DBSource objects and imports
2640 their metadata into the database
2642 fields = obj.read_control_fields()
2643 for k in fields.keys():
2646 val = str(fields[k])
2647 except UnicodeEncodeError:
2648 # Fall back to UTF-8
2650 val = fields[k].encode('utf-8')
2651 except UnicodeEncodeError:
2652 # Finally try iso8859-1
2653 val = fields[k].encode('iso8859-1')
2654 # Otherwise we allow the exception to percolate up and we cause
2655 # a reject as someone is playing silly buggers
2657 obj.metadata[get_or_set_metadatakey(k, session)] = val
2659 session.commit_or_flush()
2661 __all__.append('import_metadata_into_db')
2664 ################################################################################
2666 def split_uploaders(uploaders_list):
2668 Split the Uploaders field into the individual uploaders and yield each of
2669 them. Beware: email addresses might contain commas.
2672 for uploader in re.sub(">[ ]*,", ">\t", uploaders_list).split("\t"):
2673 yield uploader.strip()
2676 def add_dsc_to_db(u, filename, session=None):
2677 entry = u.pkg.files[filename]
2681 source.source = u.pkg.dsc["source"]
2682 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2683 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2684 # If Changed-By isn't available, fall back to maintainer
2685 if u.pkg.changes.has_key("changed-by"):
2686 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2688 source.changedby_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2689 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2690 source.install_date = datetime.now().date()
2692 dsc_component = entry["component"]
2693 dsc_location_id = entry["location id"]
2695 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2697 # Set up a new poolfile if necessary
2698 if not entry.has_key("files id") or not entry["files id"]:
2699 filename = entry["pool name"] + filename
2700 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2702 pfs.append(poolfile)
2703 entry["files id"] = poolfile.file_id
2705 source.poolfile_id = entry["files id"]
2708 suite_names = u.pkg.changes["distribution"].keys()
2709 source.suites = session.query(Suite). \
2710 filter(Suite.suite_name.in_(suite_names)).all()
2712 # Add the source files to the DB (files and dsc_files)
2714 dscfile.source_id = source.source_id
2715 dscfile.poolfile_id = entry["files id"]
2716 session.add(dscfile)
2718 for dsc_file, dentry in u.pkg.dsc_files.items():
2720 df.source_id = source.source_id
2722 # If the .orig tarball is already in the pool, it's
2723 # files id is stored in dsc_files by check_dsc().
2724 files_id = dentry.get("files id", None)
2726 # Find the entry in the files hash
2727 # TODO: Bail out here properly
2729 for f, e in u.pkg.files.items():
2734 if files_id is None:
2735 filename = dfentry["pool name"] + dsc_file
2737 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2738 # FIXME: needs to check for -1/-2 and or handle exception
2739 if found and obj is not None:
2740 files_id = obj.file_id
2743 # If still not found, add it
2744 if files_id is None:
2745 # HACK: Force sha1sum etc into dentry
2746 dentry["sha1sum"] = dfentry["sha1sum"]
2747 dentry["sha256sum"] = dfentry["sha256sum"]
2748 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2749 pfs.append(poolfile)
2750 files_id = poolfile.file_id
2752 poolfile = get_poolfile_by_id(files_id, session)
2753 if poolfile is None:
2754 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2755 pfs.append(poolfile)
2757 df.poolfile_id = files_id
2760 # Add the src_uploaders to the DB
2762 session.refresh(source)
2763 source.uploaders = [source.maintainer]
2764 if u.pkg.dsc.has_key("uploaders"):
2765 for up in split_uploaders(u.pkg.dsc["uploaders"]):
2766 source.uploaders.append(get_or_set_maintainer(up, session))
2770 return source, dsc_component, dsc_location_id, pfs
2772 __all__.append('add_dsc_to_db')
2775 def add_deb_to_db(u, filename, session=None):
2777 Contrary to what you might expect, this routine deals with both
2778 debs and udebs. That info is in 'dbtype', whilst 'type' is
2779 'deb' for both of them
2782 entry = u.pkg.files[filename]
2785 bin.package = entry["package"]
2786 bin.version = entry["version"]
2787 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2788 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2789 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2790 bin.binarytype = entry["dbtype"]
2793 filename = entry["pool name"] + filename
2794 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2795 if not entry.get("location id", None):
2796 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2798 if entry.get("files id", None):
2799 poolfile = get_poolfile_by_id(bin.poolfile_id)
2800 bin.poolfile_id = entry["files id"]
2802 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2803 bin.poolfile_id = entry["files id"] = poolfile.file_id
2806 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2807 if len(bin_sources) != 1:
2808 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2809 (bin.package, bin.version, entry["architecture"],
2810 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2812 bin.source_id = bin_sources[0].source_id
2814 if entry.has_key("built-using"):
2815 for srcname, version in entry["built-using"]:
2816 exsources = get_sources_from_name(srcname, version, session=session)
2817 if len(exsources) != 1:
2818 raise NoSourceFieldError, "Unable to find source package (%s = %s) in Built-Using for %s (%s), %s, file %s, type %s, signed by %s" % \
2819 (srcname, version, bin.package, bin.version, entry["architecture"],
2820 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2822 bin.extra_sources.append(exsources[0])
2824 # Add and flush object so it has an ID
2827 suite_names = u.pkg.changes["distribution"].keys()
2828 bin.suites = session.query(Suite). \
2829 filter(Suite.suite_name.in_(suite_names)).all()
2833 # Deal with contents - disabled for now
2834 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2836 # print "REJECT\nCould not determine contents of package %s" % bin.package
2837 # session.rollback()
2838 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2840 return bin, poolfile
2842 __all__.append('add_deb_to_db')
2844 ################################################################################
2846 class SourceACL(object):
2847 def __init__(self, *args, **kwargs):
2851 return '<SourceACL %s>' % self.source_acl_id
2853 __all__.append('SourceACL')
2855 ################################################################################
2857 class SrcFormat(object):
2858 def __init__(self, *args, **kwargs):
2862 return '<SrcFormat %s>' % (self.format_name)
2864 __all__.append('SrcFormat')
2866 ################################################################################
2868 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2869 ('SuiteID', 'suite_id'),
2870 ('Version', 'version'),
2871 ('Origin', 'origin'),
2873 ('Description', 'description'),
2874 ('Untouchable', 'untouchable'),
2875 ('Announce', 'announce'),
2876 ('Codename', 'codename'),
2877 ('OverrideCodename', 'overridecodename'),
2878 ('ValidTime', 'validtime'),
2879 ('Priority', 'priority'),
2880 ('NotAutomatic', 'notautomatic'),
2881 ('CopyChanges', 'copychanges'),
2882 ('OverrideSuite', 'overridesuite')]
2884 # Why the heck don't we have any UNIQUE constraints in table suite?
2885 # TODO: Add UNIQUE constraints for appropriate columns.
2886 class Suite(ORMObject):
2887 def __init__(self, suite_name = None, version = None):
2888 self.suite_name = suite_name
2889 self.version = version
2891 def properties(self):
2892 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2895 def not_null_constraints(self):
2896 return ['suite_name']
2898 def __eq__(self, val):
2899 if isinstance(val, str):
2900 return (self.suite_name == val)
2901 # This signals to use the normal comparison operator
2902 return NotImplemented
2904 def __ne__(self, val):
2905 if isinstance(val, str):
2906 return (self.suite_name != val)
2907 # This signals to use the normal comparison operator
2908 return NotImplemented
2912 for disp, field in SUITE_FIELDS:
2913 val = getattr(self, field, None)
2915 ret.append("%s: %s" % (disp, val))
2917 return "\n".join(ret)
2919 def get_architectures(self, skipsrc=False, skipall=False):
2921 Returns list of Architecture objects
2923 @type skipsrc: boolean
2924 @param skipsrc: Whether to skip returning the 'source' architecture entry
2927 @type skipall: boolean
2928 @param skipall: Whether to skip returning the 'all' architecture entry
2932 @return: list of Architecture objects for the given name (may be empty)
2935 q = object_session(self).query(Architecture).with_parent(self)
2937 q = q.filter(Architecture.arch_string != 'source')
2939 q = q.filter(Architecture.arch_string != 'all')
2940 return q.order_by(Architecture.arch_string).all()
2942 def get_sources(self, source):
2944 Returns a query object representing DBSource that is part of C{suite}.
2946 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2948 @type source: string
2949 @param source: source package name
2951 @rtype: sqlalchemy.orm.query.Query
2952 @return: a query of DBSource
2956 session = object_session(self)
2957 return session.query(DBSource).filter_by(source = source). \
2960 def get_overridesuite(self):
2961 if self.overridesuite is None:
2964 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2966 __all__.append('Suite')
2969 def get_suite(suite, session=None):
2971 Returns Suite object for given C{suite name}.
2974 @param suite: The name of the suite
2976 @type session: Session
2977 @param session: Optional SQLA session object (a temporary one will be
2978 generated if not supplied)
2981 @return: Suite object for the requested suite name (None if not present)
2984 q = session.query(Suite).filter_by(suite_name=suite)
2988 except NoResultFound:
2991 __all__.append('get_suite')
2993 ################################################################################
2995 # TODO: should be removed because the implementation is too trivial
2997 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2999 Returns list of Architecture objects for given C{suite} name
3002 @param suite: Suite name to search for
3004 @type skipsrc: boolean
3005 @param skipsrc: Whether to skip returning the 'source' architecture entry
3008 @type skipall: boolean
3009 @param skipall: Whether to skip returning the 'all' architecture entry
3012 @type session: Session
3013 @param session: Optional SQL session object (a temporary one will be
3014 generated if not supplied)
3017 @return: list of Architecture objects for the given name (may be empty)
3020 return get_suite(suite, session).get_architectures(skipsrc, skipall)
3022 __all__.append('get_suite_architectures')
3024 ################################################################################
3026 class SuiteSrcFormat(object):
3027 def __init__(self, *args, **kwargs):
3031 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
3033 __all__.append('SuiteSrcFormat')
3036 def get_suite_src_formats(suite, session=None):
3038 Returns list of allowed SrcFormat for C{suite}.
3041 @param suite: Suite name to search for
3043 @type session: Session
3044 @param session: Optional SQL session object (a temporary one will be
3045 generated if not supplied)
3048 @return: the list of allowed source formats for I{suite}
3051 q = session.query(SrcFormat)
3052 q = q.join(SuiteSrcFormat)
3053 q = q.join(Suite).filter_by(suite_name=suite)
3054 q = q.order_by('format_name')
3058 __all__.append('get_suite_src_formats')
3060 ################################################################################
3062 class Uid(ORMObject):
3063 def __init__(self, uid = None, name = None):
3067 def __eq__(self, val):
3068 if isinstance(val, str):
3069 return (self.uid == val)
3070 # This signals to use the normal comparison operator
3071 return NotImplemented
3073 def __ne__(self, val):
3074 if isinstance(val, str):
3075 return (self.uid != val)
3076 # This signals to use the normal comparison operator
3077 return NotImplemented
3079 def properties(self):
3080 return ['uid', 'name', 'fingerprint']
3082 def not_null_constraints(self):
3085 __all__.append('Uid')
3088 def get_or_set_uid(uidname, session=None):
3090 Returns uid object for given uidname.
3092 If no matching uidname is found, a row is inserted.
3094 @type uidname: string
3095 @param uidname: The uid to add
3097 @type session: SQLAlchemy
3098 @param session: Optional SQL session object (a temporary one will be
3099 generated if not supplied). If not passed, a commit will be performed at
3100 the end of the function, otherwise the caller is responsible for commiting.
3103 @return: the uid object for the given uidname
3106 q = session.query(Uid).filter_by(uid=uidname)
3110 except NoResultFound:
3114 session.commit_or_flush()
3119 __all__.append('get_or_set_uid')
3122 def get_uid_from_fingerprint(fpr, session=None):
3123 q = session.query(Uid)
3124 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
3128 except NoResultFound:
3131 __all__.append('get_uid_from_fingerprint')
3133 ################################################################################
3135 class UploadBlock(object):
3136 def __init__(self, *args, **kwargs):
3140 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
3142 __all__.append('UploadBlock')
3144 ################################################################################
3146 class MetadataKey(ORMObject):
3147 def __init__(self, key = None):
3150 def properties(self):
3153 def not_null_constraints(self):
3156 __all__.append('MetadataKey')
3159 def get_or_set_metadatakey(keyname, session=None):
3161 Returns MetadataKey object for given uidname.
3163 If no matching keyname is found, a row is inserted.
3165 @type uidname: string
3166 @param uidname: The keyname to add
3168 @type session: SQLAlchemy
3169 @param session: Optional SQL session object (a temporary one will be
3170 generated if not supplied). If not passed, a commit will be performed at
3171 the end of the function, otherwise the caller is responsible for commiting.
3174 @return: the metadatakey object for the given keyname
3177 q = session.query(MetadataKey).filter_by(key=keyname)
3181 except NoResultFound:
3182 ret = MetadataKey(keyname)
3184 session.commit_or_flush()
3188 __all__.append('get_or_set_metadatakey')
3190 ################################################################################
3192 class BinaryMetadata(ORMObject):
3193 def __init__(self, key = None, value = None, binary = None):
3196 self.binary = binary
3198 def properties(self):
3199 return ['binary', 'key', 'value']
3201 def not_null_constraints(self):
3204 __all__.append('BinaryMetadata')
3206 ################################################################################
3208 class SourceMetadata(ORMObject):
3209 def __init__(self, key = None, value = None, source = None):
3212 self.source = source
3214 def properties(self):
3215 return ['source', 'key', 'value']
3217 def not_null_constraints(self):
3220 __all__.append('SourceMetadata')
3222 ################################################################################
3224 class VersionCheck(ORMObject):
3225 def __init__(self, *args, **kwargs):
3228 def properties(self):
3229 #return ['suite_id', 'check', 'reference_id']
3232 def not_null_constraints(self):
3233 return ['suite', 'check', 'reference']
3235 __all__.append('VersionCheck')
3238 def get_version_checks(suite_name, check = None, session = None):
3239 suite = get_suite(suite_name, session)
3241 # Make sure that what we return is iterable so that list comprehensions
3242 # involving this don't cause a traceback
3244 q = session.query(VersionCheck).filter_by(suite=suite)
3246 q = q.filter_by(check=check)
3249 __all__.append('get_version_checks')
3251 ################################################################################
3253 class DBConn(object):
3255 database module init.
3259 def __init__(self, *args, **kwargs):
3260 self.__dict__ = self.__shared_state
3262 if not getattr(self, 'initialised', False):
3263 self.initialised = True
3264 self.debug = kwargs.has_key('debug')
3267 def __setuptables(self):
3274 'binaries_metadata',
3278 'build_queue_files',
3279 'build_queue_policy_files',
3284 'changes_pending_binaries',
3285 'changes_pending_files',
3286 'changes_pending_source',
3287 'changes_pending_files_map',
3288 'changes_pending_source_files',
3289 'changes_pool_files',
3291 'external_overrides',
3292 'extra_src_references',
3301 # TODO: the maintainer column in table override should be removed.
3315 'suite_architectures',
3316 'suite_build_queue_copy',
3317 'suite_src_formats',
3324 'almost_obsolete_all_associations',
3325 'almost_obsolete_src_associations',
3326 'any_associations_source',
3327 'bin_associations_binaries',
3328 'binaries_suite_arch',
3329 'binfiles_suite_component_arch',
3332 'newest_all_associations',
3333 'newest_any_associations',
3335 'newest_src_association',
3336 'obsolete_all_associations',
3337 'obsolete_any_associations',
3338 'obsolete_any_by_all_associations',
3339 'obsolete_src_associations',
3341 'src_associations_bin',
3342 'src_associations_src',
3343 'suite_arch_by_name',
3346 for table_name in tables:
3347 table = Table(table_name, self.db_meta, \
3348 autoload=True, useexisting=True)
3349 setattr(self, 'tbl_%s' % table_name, table)
3351 for view_name in views:
3352 view = Table(view_name, self.db_meta, autoload=True)
3353 setattr(self, 'view_%s' % view_name, view)
3355 def __setupmappers(self):
3356 mapper(Architecture, self.tbl_architecture,
3357 properties = dict(arch_id = self.tbl_architecture.c.id,
3358 suites = relation(Suite, secondary=self.tbl_suite_architectures,
3359 order_by='suite_name',
3360 backref=backref('architectures', order_by='arch_string'))),
3361 extension = validator)
3363 mapper(Archive, self.tbl_archive,
3364 properties = dict(archive_id = self.tbl_archive.c.id,
3365 archive_name = self.tbl_archive.c.name))
3367 mapper(BuildQueue, self.tbl_build_queue,
3368 properties = dict(queue_id = self.tbl_build_queue.c.id))
3370 mapper(BuildQueueFile, self.tbl_build_queue_files,
3371 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3372 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3374 mapper(BuildQueuePolicyFile, self.tbl_build_queue_policy_files,
3376 build_queue = relation(BuildQueue, backref='policy_queue_files'),
3377 file = relation(ChangePendingFile, lazy='joined')))
3379 mapper(DBBinary, self.tbl_binaries,
3380 properties = dict(binary_id = self.tbl_binaries.c.id,
3381 package = self.tbl_binaries.c.package,
3382 version = self.tbl_binaries.c.version,
3383 maintainer_id = self.tbl_binaries.c.maintainer,
3384 maintainer = relation(Maintainer),
3385 source_id = self.tbl_binaries.c.source,
3386 source = relation(DBSource, backref='binaries'),
3387 arch_id = self.tbl_binaries.c.architecture,
3388 architecture = relation(Architecture),
3389 poolfile_id = self.tbl_binaries.c.file,
3390 poolfile = relation(PoolFile, backref=backref('binary', uselist = False)),
3391 binarytype = self.tbl_binaries.c.type,
3392 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3393 fingerprint = relation(Fingerprint),
3394 install_date = self.tbl_binaries.c.install_date,
3395 suites = relation(Suite, secondary=self.tbl_bin_associations,
3396 backref=backref('binaries', lazy='dynamic')),
3397 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
3398 backref=backref('extra_binary_references', lazy='dynamic')),
3399 key = relation(BinaryMetadata, cascade='all',
3400 collection_class=attribute_mapped_collection('key'))),
3401 extension = validator)
3403 mapper(BinaryACL, self.tbl_binary_acl,
3404 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3406 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3407 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3408 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3409 architecture = relation(Architecture)))
3411 mapper(Component, self.tbl_component,
3412 properties = dict(component_id = self.tbl_component.c.id,
3413 component_name = self.tbl_component.c.name),
3414 extension = validator)
3416 mapper(DBConfig, self.tbl_config,
3417 properties = dict(config_id = self.tbl_config.c.id))
3419 mapper(DSCFile, self.tbl_dsc_files,
3420 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3421 source_id = self.tbl_dsc_files.c.source,
3422 source = relation(DBSource),
3423 poolfile_id = self.tbl_dsc_files.c.file,
3424 poolfile = relation(PoolFile)))
3426 mapper(ExternalOverride, self.tbl_external_overrides,
3428 suite_id = self.tbl_external_overrides.c.suite,
3429 suite = relation(Suite),
3430 component_id = self.tbl_external_overrides.c.component,
3431 component = relation(Component)))
3433 mapper(PoolFile, self.tbl_files,
3434 properties = dict(file_id = self.tbl_files.c.id,
3435 filesize = self.tbl_files.c.size,
3436 location_id = self.tbl_files.c.location,
3437 location = relation(Location,
3438 # using lazy='dynamic' in the back
3439 # reference because we have A LOT of
3440 # files in one location
3441 backref=backref('files', lazy='dynamic'))),
3442 extension = validator)
3444 mapper(Fingerprint, self.tbl_fingerprint,
3445 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3446 uid_id = self.tbl_fingerprint.c.uid,
3447 uid = relation(Uid),
3448 keyring_id = self.tbl_fingerprint.c.keyring,
3449 keyring = relation(Keyring),
3450 source_acl = relation(SourceACL),
3451 binary_acl = relation(BinaryACL)),
3452 extension = validator)
3454 mapper(Keyring, self.tbl_keyrings,
3455 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3456 keyring_id = self.tbl_keyrings.c.id))
3458 mapper(DBChange, self.tbl_changes,
3459 properties = dict(change_id = self.tbl_changes.c.id,
3460 poolfiles = relation(PoolFile,
3461 secondary=self.tbl_changes_pool_files,
3462 backref="changeslinks"),
3463 seen = self.tbl_changes.c.seen,
3464 source = self.tbl_changes.c.source,
3465 binaries = self.tbl_changes.c.binaries,
3466 architecture = self.tbl_changes.c.architecture,
3467 distribution = self.tbl_changes.c.distribution,
3468 urgency = self.tbl_changes.c.urgency,
3469 maintainer = self.tbl_changes.c.maintainer,
3470 changedby = self.tbl_changes.c.changedby,
3471 date = self.tbl_changes.c.date,
3472 version = self.tbl_changes.c.version,
3473 files = relation(ChangePendingFile,
3474 secondary=self.tbl_changes_pending_files_map,
3475 backref="changesfile"),
3476 in_queue_id = self.tbl_changes.c.in_queue,
3477 in_queue = relation(PolicyQueue,
3478 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3479 approved_for_id = self.tbl_changes.c.approved_for))
3481 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3482 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3484 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3485 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3486 filename = self.tbl_changes_pending_files.c.filename,
3487 size = self.tbl_changes_pending_files.c.size,
3488 md5sum = self.tbl_changes_pending_files.c.md5sum,
3489 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3490 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3492 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3493 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3494 change = relation(DBChange),
3495 maintainer = relation(Maintainer,
3496 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3497 changedby = relation(Maintainer,
3498 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3499 fingerprint = relation(Fingerprint),
3500 source_files = relation(ChangePendingFile,
3501 secondary=self.tbl_changes_pending_source_files,
3502 backref="pending_sources")))
3505 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3506 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3507 keyring = relation(Keyring, backref="keyring_acl_map"),
3508 architecture = relation(Architecture)))
3510 mapper(Location, self.tbl_location,
3511 properties = dict(location_id = self.tbl_location.c.id,
3512 component_id = self.tbl_location.c.component,
3513 component = relation(Component, backref='location'),
3514 archive_id = self.tbl_location.c.archive,
3515 archive = relation(Archive),
3516 # FIXME: the 'type' column is old cruft and
3517 # should be removed in the future.
3518 archive_type = self.tbl_location.c.type),
3519 extension = validator)
3521 mapper(Maintainer, self.tbl_maintainer,
3522 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3523 maintains_sources = relation(DBSource, backref='maintainer',
3524 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3525 changed_sources = relation(DBSource, backref='changedby',
3526 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3527 extension = validator)
3529 mapper(NewComment, self.tbl_new_comments,
3530 properties = dict(comment_id = self.tbl_new_comments.c.id))
3532 mapper(Override, self.tbl_override,
3533 properties = dict(suite_id = self.tbl_override.c.suite,
3534 suite = relation(Suite, \
3535 backref=backref('overrides', lazy='dynamic')),
3536 package = self.tbl_override.c.package,
3537 component_id = self.tbl_override.c.component,
3538 component = relation(Component, \
3539 backref=backref('overrides', lazy='dynamic')),
3540 priority_id = self.tbl_override.c.priority,
3541 priority = relation(Priority, \
3542 backref=backref('overrides', lazy='dynamic')),
3543 section_id = self.tbl_override.c.section,
3544 section = relation(Section, \
3545 backref=backref('overrides', lazy='dynamic')),
3546 overridetype_id = self.tbl_override.c.type,
3547 overridetype = relation(OverrideType, \
3548 backref=backref('overrides', lazy='dynamic'))))
3550 mapper(OverrideType, self.tbl_override_type,
3551 properties = dict(overridetype = self.tbl_override_type.c.type,
3552 overridetype_id = self.tbl_override_type.c.id))
3554 mapper(PolicyQueue, self.tbl_policy_queue,
3555 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3557 mapper(Priority, self.tbl_priority,
3558 properties = dict(priority_id = self.tbl_priority.c.id))
3560 mapper(Section, self.tbl_section,
3561 properties = dict(section_id = self.tbl_section.c.id,
3562 section=self.tbl_section.c.section))
3564 mapper(DBSource, self.tbl_source,
3565 properties = dict(source_id = self.tbl_source.c.id,
3566 version = self.tbl_source.c.version,
3567 maintainer_id = self.tbl_source.c.maintainer,
3568 poolfile_id = self.tbl_source.c.file,
3569 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3570 fingerprint_id = self.tbl_source.c.sig_fpr,
3571 fingerprint = relation(Fingerprint),
3572 changedby_id = self.tbl_source.c.changedby,
3573 srcfiles = relation(DSCFile,
3574 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3575 suites = relation(Suite, secondary=self.tbl_src_associations,
3576 backref=backref('sources', lazy='dynamic')),
3577 uploaders = relation(Maintainer,
3578 secondary=self.tbl_src_uploaders),
3579 key = relation(SourceMetadata, cascade='all',
3580 collection_class=attribute_mapped_collection('key'))),
3581 extension = validator)
3583 mapper(SourceACL, self.tbl_source_acl,
3584 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3586 mapper(SrcFormat, self.tbl_src_format,
3587 properties = dict(src_format_id = self.tbl_src_format.c.id,
3588 format_name = self.tbl_src_format.c.format_name))
3590 mapper(Suite, self.tbl_suite,
3591 properties = dict(suite_id = self.tbl_suite.c.id,
3592 policy_queue = relation(PolicyQueue),
3593 copy_queues = relation(BuildQueue,
3594 secondary=self.tbl_suite_build_queue_copy)),
3595 extension = validator)
3597 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3598 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3599 suite = relation(Suite, backref='suitesrcformats'),
3600 src_format_id = self.tbl_suite_src_formats.c.src_format,
3601 src_format = relation(SrcFormat)))
3603 mapper(Uid, self.tbl_uid,
3604 properties = dict(uid_id = self.tbl_uid.c.id,
3605 fingerprint = relation(Fingerprint)),
3606 extension = validator)
3608 mapper(UploadBlock, self.tbl_upload_blocks,
3609 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3610 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3611 uid = relation(Uid, backref="uploadblocks")))
3613 mapper(BinContents, self.tbl_bin_contents,
3615 binary = relation(DBBinary,
3616 backref=backref('contents', lazy='dynamic', cascade='all')),
3617 file = self.tbl_bin_contents.c.file))
3619 mapper(SrcContents, self.tbl_src_contents,
3621 source = relation(DBSource,
3622 backref=backref('contents', lazy='dynamic', cascade='all')),
3623 file = self.tbl_src_contents.c.file))
3625 mapper(MetadataKey, self.tbl_metadata_keys,
3627 key_id = self.tbl_metadata_keys.c.key_id,
3628 key = self.tbl_metadata_keys.c.key))
3630 mapper(BinaryMetadata, self.tbl_binaries_metadata,
3632 binary_id = self.tbl_binaries_metadata.c.bin_id,
3633 binary = relation(DBBinary),
3634 key_id = self.tbl_binaries_metadata.c.key_id,
3635 key = relation(MetadataKey),
3636 value = self.tbl_binaries_metadata.c.value))
3638 mapper(SourceMetadata, self.tbl_source_metadata,
3640 source_id = self.tbl_source_metadata.c.src_id,
3641 source = relation(DBSource),
3642 key_id = self.tbl_source_metadata.c.key_id,
3643 key = relation(MetadataKey),
3644 value = self.tbl_source_metadata.c.value))
3646 mapper(VersionCheck, self.tbl_version_check,
3648 suite_id = self.tbl_version_check.c.suite,
3649 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
3650 reference_id = self.tbl_version_check.c.reference,
3651 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
3653 ## Connection functions
3654 def __createconn(self):
3655 from config import Config
3657 if cnf.has_key("DB::Service"):
3658 connstr = "postgresql://service=%s" % cnf["DB::Service"]
3659 elif cnf.has_key("DB::Host"):
3661 connstr = "postgresql://%s" % cnf["DB::Host"]
3662 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3663 connstr += ":%s" % cnf["DB::Port"]
3664 connstr += "/%s" % cnf["DB::Name"]
3667 connstr = "postgresql:///%s" % cnf["DB::Name"]
3668 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
3669 connstr += "?port=%s" % cnf["DB::Port"]
3671 engine_args = { 'echo': self.debug }
3672 if cnf.has_key('DB::PoolSize'):
3673 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
3674 if cnf.has_key('DB::MaxOverflow'):
3675 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
3676 if sa_major_version == '0.6' and cnf.has_key('DB::Unicode') and \
3677 cnf['DB::Unicode'] == 'false':
3678 engine_args['use_native_unicode'] = False
3680 # Monkey patch a new dialect in in order to support service= syntax
3681 import sqlalchemy.dialects.postgresql
3682 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
3683 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
3684 def create_connect_args(self, url):
3685 if str(url).startswith('postgresql://service='):
3687 servicename = str(url)[21:]
3688 return (['service=%s' % servicename], {})
3690 return PGDialect_psycopg2.create_connect_args(self, url)
3692 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
3694 self.db_pg = create_engine(connstr, **engine_args)
3695 self.db_meta = MetaData()
3696 self.db_meta.bind = self.db_pg
3697 self.db_smaker = sessionmaker(bind=self.db_pg,
3701 self.__setuptables()
3702 self.__setupmappers()
3703 self.pid = os.getpid()
3705 def session(self, work_mem = 0):
3707 Returns a new session object. If a work_mem parameter is provided a new
3708 transaction is started and the work_mem parameter is set for this
3709 transaction. The work_mem parameter is measured in MB. A default value
3710 will be used if the parameter is not set.
3712 # reinitialize DBConn in new processes
3713 if self.pid != os.getpid():
3716 session = self.db_smaker()
3718 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
3721 __all__.append('DBConn')