5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
250 This function should be implemented by derived classes to validate self.
251 It may raise the DBUpdateError exception if needed.
255 __all__.append('ORMObject')
257 ################################################################################
259 class Validator(MapperExtension):
261 This class calls the validate() method for each instance for the
262 'before_update' and 'before_insert' events. A global object validator is
263 used for configuring the individual mappers.
266 def before_update(self, mapper, connection, instance):
270 def before_insert(self, mapper, connection, instance):
274 validator = Validator()
276 ################################################################################
278 class Architecture(ORMObject):
279 def __init__(self, arch_string = None, description = None):
280 self.arch_string = arch_string
281 self.description = description
283 def __eq__(self, val):
284 if isinstance(val, str):
285 return (self.arch_string== val)
286 # This signals to use the normal comparison operator
287 return NotImplemented
289 def __ne__(self, val):
290 if isinstance(val, str):
291 return (self.arch_string != val)
292 # This signals to use the normal comparison operator
293 return NotImplemented
295 def properties(self):
296 return ['arch_string', 'arch_id', 'suites_count']
299 if self.arch_string is None or len(self.arch_string) == 0:
300 raise DBUpdateError( \
301 "Validation failed because 'arch_string' must not be empty in object\n%s" % \
304 __all__.append('Architecture')
307 def get_architecture(architecture, session=None):
309 Returns database id for given C{architecture}.
311 @type architecture: string
312 @param architecture: The name of the architecture
314 @type session: Session
315 @param session: Optional SQLA session object (a temporary one will be
316 generated if not supplied)
319 @return: Architecture object for the given arch (None if not present)
322 q = session.query(Architecture).filter_by(arch_string=architecture)
326 except NoResultFound:
329 __all__.append('get_architecture')
331 # TODO: should be removed because the implementation is too trivial
333 def get_architecture_suites(architecture, session=None):
335 Returns list of Suite objects for given C{architecture} name
337 @type architecture: str
338 @param architecture: Architecture name to search for
340 @type session: Session
341 @param session: Optional SQL session object (a temporary one will be
342 generated if not supplied)
345 @return: list of Suite objects for the given name (may be empty)
348 return get_architecture(architecture, session).suites
350 __all__.append('get_architecture_suites')
352 ################################################################################
354 class Archive(object):
355 def __init__(self, *args, **kwargs):
359 return '<Archive %s>' % self.archive_name
361 __all__.append('Archive')
364 def get_archive(archive, session=None):
366 returns database id for given C{archive}.
368 @type archive: string
369 @param archive: the name of the arhive
371 @type session: Session
372 @param session: Optional SQLA session object (a temporary one will be
373 generated if not supplied)
376 @return: Archive object for the given name (None if not present)
379 archive = archive.lower()
381 q = session.query(Archive).filter_by(archive_name=archive)
385 except NoResultFound:
388 __all__.append('get_archive')
390 ################################################################################
392 class BinAssociation(object):
393 def __init__(self, *args, **kwargs):
397 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
399 __all__.append('BinAssociation')
401 ################################################################################
403 class BinContents(object):
404 def __init__(self, *args, **kwargs):
408 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
410 __all__.append('BinContents')
412 ################################################################################
414 class DBBinary(object):
415 def __init__(self, *args, **kwargs):
419 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
421 __all__.append('DBBinary')
424 def get_suites_binary_in(package, session=None):
426 Returns list of Suite objects which given C{package} name is in
429 @param package: DBBinary package name to search for
432 @return: list of Suite objects for the given package
435 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
437 __all__.append('get_suites_binary_in')
440 def get_binary_from_id(binary_id, session=None):
442 Returns DBBinary object for given C{id}
445 @param binary_id: Id of the required binary
447 @type session: Session
448 @param session: Optional SQLA session object (a temporary one will be
449 generated if not supplied)
452 @return: DBBinary object for the given binary (None if not present)
455 q = session.query(DBBinary).filter_by(binary_id=binary_id)
459 except NoResultFound:
462 __all__.append('get_binary_from_id')
465 def get_binaries_from_name(package, version=None, architecture=None, session=None):
467 Returns list of DBBinary objects for given C{package} name
470 @param package: DBBinary package name to search for
472 @type version: str or None
473 @param version: Version to search for (or None)
475 @type architecture: str, list or None
476 @param architecture: Architectures to limit to (or None if no limit)
478 @type session: Session
479 @param session: Optional SQL session object (a temporary one will be
480 generated if not supplied)
483 @return: list of DBBinary objects for the given name (may be empty)
486 q = session.query(DBBinary).filter_by(package=package)
488 if version is not None:
489 q = q.filter_by(version=version)
491 if architecture is not None:
492 if not isinstance(architecture, list):
493 architecture = [architecture]
494 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
500 __all__.append('get_binaries_from_name')
503 def get_binaries_from_source_id(source_id, session=None):
505 Returns list of DBBinary objects for given C{source_id}
508 @param source_id: source_id to search for
510 @type session: Session
511 @param session: Optional SQL session object (a temporary one will be
512 generated if not supplied)
515 @return: list of DBBinary objects for the given name (may be empty)
518 return session.query(DBBinary).filter_by(source_id=source_id).all()
520 __all__.append('get_binaries_from_source_id')
523 def get_binary_from_name_suite(package, suitename, session=None):
524 ### For dak examine-package
525 ### XXX: Doesn't use object API yet
527 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
528 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
529 WHERE b.package='%(package)s'
531 AND fi.location = l.id
532 AND l.component = c.id
535 AND su.suite_name %(suitename)s
536 ORDER BY b.version DESC"""
538 return session.execute(sql % {'package': package, 'suitename': suitename})
540 __all__.append('get_binary_from_name_suite')
543 def get_binary_components(package, suitename, arch, session=None):
544 # Check for packages that have moved from one component to another
545 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
546 WHERE b.package=:package AND s.suite_name=:suitename
547 AND (a.arch_string = :arch OR a.arch_string = 'all')
548 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
549 AND f.location = l.id
550 AND l.component = c.id
553 vals = {'package': package, 'suitename': suitename, 'arch': arch}
555 return session.execute(query, vals)
557 __all__.append('get_binary_components')
559 ################################################################################
561 class BinaryACL(object):
562 def __init__(self, *args, **kwargs):
566 return '<BinaryACL %s>' % self.binary_acl_id
568 __all__.append('BinaryACL')
570 ################################################################################
572 class BinaryACLMap(object):
573 def __init__(self, *args, **kwargs):
577 return '<BinaryACLMap %s>' % self.binary_acl_map_id
579 __all__.append('BinaryACLMap')
581 ################################################################################
586 ArchiveDir "%(archivepath)s";
587 OverrideDir "%(overridedir)s";
588 CacheDir "%(cachedir)s";
593 Packages::Compress ". bzip2 gzip";
594 Sources::Compress ". bzip2 gzip";
599 bindirectory "incoming"
604 BinOverride "override.sid.all3";
605 BinCacheDB "packages-accepted.db";
607 FileList "%(filelist)s";
610 Packages::Extensions ".deb .udeb";
613 bindirectory "incoming/"
616 BinOverride "override.sid.all3";
617 SrcOverride "override.sid.all3.src";
618 FileList "%(filelist)s";
622 class BuildQueue(object):
623 def __init__(self, *args, **kwargs):
627 return '<BuildQueue %s>' % self.queue_name
629 def write_metadata(self, starttime, force=False):
630 # Do we write out metafiles?
631 if not (force or self.generate_metadata):
634 session = DBConn().session().object_session(self)
636 fl_fd = fl_name = ac_fd = ac_name = None
638 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
639 startdir = os.getcwd()
642 # Grab files we want to include
643 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
644 # Write file list with newer files
645 (fl_fd, fl_name) = mkstemp()
647 os.write(fl_fd, '%s\n' % n.fullpath)
652 # Write minimal apt.conf
653 # TODO: Remove hardcoding from template
654 (ac_fd, ac_name) = mkstemp()
655 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
657 'cachedir': cnf["Dir::Cache"],
658 'overridedir': cnf["Dir::Override"],
662 # Run apt-ftparchive generate
663 os.chdir(os.path.dirname(ac_name))
664 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
666 # Run apt-ftparchive release
667 # TODO: Eww - fix this
668 bname = os.path.basename(self.path)
672 # We have to remove the Release file otherwise it'll be included in the
675 os.unlink(os.path.join(bname, 'Release'))
679 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
681 # Crude hack with open and append, but this whole section is and should be redone.
682 if self.notautomatic:
683 release=open("Release", "a")
684 release.write("NotAutomatic: yes")
689 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
690 if cnf.has_key("Dinstall::SigningPubKeyring"):
691 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
693 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
695 # Move the files if we got this far
696 os.rename('Release', os.path.join(bname, 'Release'))
698 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
700 # Clean up any left behind files
727 def clean_and_update(self, starttime, Logger, dryrun=False):
728 """WARNING: This routine commits for you"""
729 session = DBConn().session().object_session(self)
731 if self.generate_metadata and not dryrun:
732 self.write_metadata(starttime)
734 # Grab files older than our execution time
735 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
741 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
743 Logger.log(["I: Removing %s from the queue" % o.fullpath])
744 os.unlink(o.fullpath)
747 # If it wasn't there, don't worry
748 if e.errno == ENOENT:
751 # TODO: Replace with proper logging call
752 Logger.log(["E: Could not remove %s" % o.fullpath])
759 for f in os.listdir(self.path):
760 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
764 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
765 except NoResultFound:
766 fp = os.path.join(self.path, f)
768 Logger.log(["I: Would remove unused link %s" % fp])
770 Logger.log(["I: Removing unused link %s" % fp])
774 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
776 def add_file_from_pool(self, poolfile):
777 """Copies a file into the pool. Assumes that the PoolFile object is
778 attached to the same SQLAlchemy session as the Queue object is.
780 The caller is responsible for committing after calling this function."""
781 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
783 # Check if we have a file of this name or this ID already
784 for f in self.queuefiles:
785 if f.fileid is not None and f.fileid == poolfile.file_id or \
786 f.poolfile.filename == poolfile_basename:
787 # In this case, update the BuildQueueFile entry so we
788 # don't remove it too early
789 f.lastused = datetime.now()
790 DBConn().session().object_session(poolfile).add(f)
793 # Prepare BuildQueueFile object
794 qf = BuildQueueFile()
795 qf.build_queue_id = self.queue_id
796 qf.lastused = datetime.now()
797 qf.filename = poolfile_basename
799 targetpath = poolfile.fullpath
800 queuepath = os.path.join(self.path, poolfile_basename)
804 # We need to copy instead of symlink
806 utils.copy(targetpath, queuepath)
807 # NULL in the fileid field implies a copy
810 os.symlink(targetpath, queuepath)
811 qf.fileid = poolfile.file_id
815 # Get the same session as the PoolFile is using and add the qf to it
816 DBConn().session().object_session(poolfile).add(qf)
821 __all__.append('BuildQueue')
824 def get_build_queue(queuename, session=None):
826 Returns BuildQueue object for given C{queue name}, creating it if it does not
829 @type queuename: string
830 @param queuename: The name of the queue
832 @type session: Session
833 @param session: Optional SQLA session object (a temporary one will be
834 generated if not supplied)
837 @return: BuildQueue object for the given queue
840 q = session.query(BuildQueue).filter_by(queue_name=queuename)
844 except NoResultFound:
847 __all__.append('get_build_queue')
849 ################################################################################
851 class BuildQueueFile(object):
852 def __init__(self, *args, **kwargs):
856 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
860 return os.path.join(self.buildqueue.path, self.filename)
863 __all__.append('BuildQueueFile')
865 ################################################################################
867 class ChangePendingBinary(object):
868 def __init__(self, *args, **kwargs):
872 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
874 __all__.append('ChangePendingBinary')
876 ################################################################################
878 class ChangePendingFile(object):
879 def __init__(self, *args, **kwargs):
883 return '<ChangePendingFile %s>' % self.change_pending_file_id
885 __all__.append('ChangePendingFile')
887 ################################################################################
889 class ChangePendingSource(object):
890 def __init__(self, *args, **kwargs):
894 return '<ChangePendingSource %s>' % self.change_pending_source_id
896 __all__.append('ChangePendingSource')
898 ################################################################################
900 class Component(object):
901 def __init__(self, *args, **kwargs):
904 def __eq__(self, val):
905 if isinstance(val, str):
906 return (self.component_name == val)
907 # This signals to use the normal comparison operator
908 return NotImplemented
910 def __ne__(self, val):
911 if isinstance(val, str):
912 return (self.component_name != val)
913 # This signals to use the normal comparison operator
914 return NotImplemented
917 return '<Component %s>' % self.component_name
920 __all__.append('Component')
923 def get_component(component, session=None):
925 Returns database id for given C{component}.
927 @type component: string
928 @param component: The name of the override type
931 @return: the database id for the given component
934 component = component.lower()
936 q = session.query(Component).filter_by(component_name=component)
940 except NoResultFound:
943 __all__.append('get_component')
945 ################################################################################
947 class DBConfig(object):
948 def __init__(self, *args, **kwargs):
952 return '<DBConfig %s>' % self.name
954 __all__.append('DBConfig')
956 ################################################################################
959 def get_or_set_contents_file_id(filename, session=None):
961 Returns database id for given filename.
963 If no matching file is found, a row is inserted.
965 @type filename: string
966 @param filename: The filename
967 @type session: SQLAlchemy
968 @param session: Optional SQL session object (a temporary one will be
969 generated if not supplied). If not passed, a commit will be performed at
970 the end of the function, otherwise the caller is responsible for commiting.
973 @return: the database id for the given component
976 q = session.query(ContentFilename).filter_by(filename=filename)
979 ret = q.one().cafilename_id
980 except NoResultFound:
981 cf = ContentFilename()
982 cf.filename = filename
984 session.commit_or_flush()
985 ret = cf.cafilename_id
989 __all__.append('get_or_set_contents_file_id')
992 def get_contents(suite, overridetype, section=None, session=None):
994 Returns contents for a suite / overridetype combination, limiting
995 to a section if not None.
998 @param suite: Suite object
1000 @type overridetype: OverrideType
1001 @param overridetype: OverrideType object
1003 @type section: Section
1004 @param section: Optional section object to limit results to
1006 @type session: SQLAlchemy
1007 @param session: Optional SQL session object (a temporary one will be
1008 generated if not supplied)
1010 @rtype: ResultsProxy
1011 @return: ResultsProxy object set up to return tuples of (filename, section,
1015 # find me all of the contents for a given suite
1016 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1020 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1021 JOIN content_file_names n ON (c.filename=n.id)
1022 JOIN binaries b ON (b.id=c.binary_pkg)
1023 JOIN override o ON (o.package=b.package)
1024 JOIN section s ON (s.id=o.section)
1025 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1026 AND b.type=:overridetypename"""
1028 vals = {'suiteid': suite.suite_id,
1029 'overridetypeid': overridetype.overridetype_id,
1030 'overridetypename': overridetype.overridetype}
1032 if section is not None:
1033 contents_q += " AND s.id = :sectionid"
1034 vals['sectionid'] = section.section_id
1036 contents_q += " ORDER BY fn"
1038 return session.execute(contents_q, vals)
1040 __all__.append('get_contents')
1042 ################################################################################
1044 class ContentFilepath(object):
1045 def __init__(self, *args, **kwargs):
1049 return '<ContentFilepath %s>' % self.filepath
1051 __all__.append('ContentFilepath')
1054 def get_or_set_contents_path_id(filepath, session=None):
1056 Returns database id for given path.
1058 If no matching file is found, a row is inserted.
1060 @type filepath: string
1061 @param filepath: The filepath
1063 @type session: SQLAlchemy
1064 @param session: Optional SQL session object (a temporary one will be
1065 generated if not supplied). If not passed, a commit will be performed at
1066 the end of the function, otherwise the caller is responsible for commiting.
1069 @return: the database id for the given path
1072 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1075 ret = q.one().cafilepath_id
1076 except NoResultFound:
1077 cf = ContentFilepath()
1078 cf.filepath = filepath
1080 session.commit_or_flush()
1081 ret = cf.cafilepath_id
1085 __all__.append('get_or_set_contents_path_id')
1087 ################################################################################
1089 class ContentAssociation(object):
1090 def __init__(self, *args, **kwargs):
1094 return '<ContentAssociation %s>' % self.ca_id
1096 __all__.append('ContentAssociation')
1098 def insert_content_paths(binary_id, fullpaths, session=None):
1100 Make sure given path is associated with given binary id
1102 @type binary_id: int
1103 @param binary_id: the id of the binary
1104 @type fullpaths: list
1105 @param fullpaths: the list of paths of the file being associated with the binary
1106 @type session: SQLAlchemy session
1107 @param session: Optional SQLAlchemy session. If this is passed, the caller
1108 is responsible for ensuring a transaction has begun and committing the
1109 results or rolling back based on the result code. If not passed, a commit
1110 will be performed at the end of the function, otherwise the caller is
1111 responsible for commiting.
1113 @return: True upon success
1116 privatetrans = False
1118 session = DBConn().session()
1123 def generate_path_dicts():
1124 for fullpath in fullpaths:
1125 if fullpath.startswith( './' ):
1126 fullpath = fullpath[2:]
1128 yield {'filename':fullpath, 'id': binary_id }
1130 for d in generate_path_dicts():
1131 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1140 traceback.print_exc()
1142 # Only rollback if we set up the session ourself
1149 __all__.append('insert_content_paths')
1151 ################################################################################
1153 class DSCFile(object):
1154 def __init__(self, *args, **kwargs):
1158 return '<DSCFile %s>' % self.dscfile_id
1160 __all__.append('DSCFile')
1163 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1165 Returns a list of DSCFiles which may be empty
1167 @type dscfile_id: int (optional)
1168 @param dscfile_id: the dscfile_id of the DSCFiles to find
1170 @type source_id: int (optional)
1171 @param source_id: the source id related to the DSCFiles to find
1173 @type poolfile_id: int (optional)
1174 @param poolfile_id: the poolfile id related to the DSCFiles to find
1177 @return: Possibly empty list of DSCFiles
1180 q = session.query(DSCFile)
1182 if dscfile_id is not None:
1183 q = q.filter_by(dscfile_id=dscfile_id)
1185 if source_id is not None:
1186 q = q.filter_by(source_id=source_id)
1188 if poolfile_id is not None:
1189 q = q.filter_by(poolfile_id=poolfile_id)
1193 __all__.append('get_dscfiles')
1195 ################################################################################
1197 class PoolFile(ORMObject):
1198 def __init__(self, filename = None, location = None, filesize = -1, \
1200 self.filename = filename
1201 self.location = location
1202 self.filesize = filesize
1203 self.md5sum = md5sum
1207 return os.path.join(self.location.path, self.filename)
1209 def is_valid(self, filesize = -1, md5sum = None):\
1210 return self.filesize == filesize and self.md5sum == md5sum
1212 def properties(self):
1213 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1214 'sha256sum', 'location', 'source', 'last_used']
1217 # sha1sum and sha256sum are not validated yet
1218 if self.filename is None or len(self.filename) == 0 or \
1219 self.filesize < 0 or self.md5sum is None or \
1220 len(self.md5sum) == 0 or self.location is None:
1221 raise DBUpdateError( \
1222 "Validation failed because some properties must not be empty in object\n%s" % \
1225 __all__.append('PoolFile')
1228 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1231 (ValidFileFound [boolean], PoolFile object or None)
1233 @type filename: string
1234 @param filename: the filename of the file to check against the DB
1237 @param filesize: the size of the file to check against the DB
1239 @type md5sum: string
1240 @param md5sum: the md5sum of the file to check against the DB
1242 @type location_id: int
1243 @param location_id: the id of the location to look in
1246 @return: Tuple of length 2.
1247 - If valid pool file found: (C{True}, C{PoolFile object})
1248 - If valid pool file not found:
1249 - (C{False}, C{None}) if no file found
1250 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1253 poolfile = session.query(Location).get(location_id). \
1254 files.filter_by(filename=filename).first()
1256 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1259 return (valid, poolfile)
1261 __all__.append('check_poolfile')
1263 # TODO: the implementation can trivially be inlined at the place where the
1264 # function is called
1266 def get_poolfile_by_id(file_id, session=None):
1268 Returns a PoolFile objects or None for the given id
1271 @param file_id: the id of the file to look for
1273 @rtype: PoolFile or None
1274 @return: either the PoolFile object or None
1277 return session.query(PoolFile).get(file_id)
1279 __all__.append('get_poolfile_by_id')
1282 def get_poolfile_like_name(filename, session=None):
1284 Returns an array of PoolFile objects which are like the given name
1286 @type filename: string
1287 @param filename: the filename of the file to check against the DB
1290 @return: array of PoolFile objects
1293 # TODO: There must be a way of properly using bind parameters with %FOO%
1294 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1298 __all__.append('get_poolfile_like_name')
1301 def add_poolfile(filename, datadict, location_id, session=None):
1303 Add a new file to the pool
1305 @type filename: string
1306 @param filename: filename
1308 @type datadict: dict
1309 @param datadict: dict with needed data
1311 @type location_id: int
1312 @param location_id: database id of the location
1315 @return: the PoolFile object created
1317 poolfile = PoolFile()
1318 poolfile.filename = filename
1319 poolfile.filesize = datadict["size"]
1320 poolfile.md5sum = datadict["md5sum"]
1321 poolfile.sha1sum = datadict["sha1sum"]
1322 poolfile.sha256sum = datadict["sha256sum"]
1323 poolfile.location_id = location_id
1325 session.add(poolfile)
1326 # Flush to get a file id (NB: This is not a commit)
1331 __all__.append('add_poolfile')
1333 ################################################################################
1335 class Fingerprint(object):
1336 def __init__(self, fingerprint = None):
1337 self.fingerprint = fingerprint
1340 return '<Fingerprint %s>' % self.fingerprint
1342 __all__.append('Fingerprint')
1345 def get_fingerprint(fpr, session=None):
1347 Returns Fingerprint object for given fpr.
1350 @param fpr: The fpr to find / add
1352 @type session: SQLAlchemy
1353 @param session: Optional SQL session object (a temporary one will be
1354 generated if not supplied).
1357 @return: the Fingerprint object for the given fpr or None
1360 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1364 except NoResultFound:
1369 __all__.append('get_fingerprint')
1372 def get_or_set_fingerprint(fpr, session=None):
1374 Returns Fingerprint object for given fpr.
1376 If no matching fpr is found, a row is inserted.
1379 @param fpr: The fpr to find / add
1381 @type session: SQLAlchemy
1382 @param session: Optional SQL session object (a temporary one will be
1383 generated if not supplied). If not passed, a commit will be performed at
1384 the end of the function, otherwise the caller is responsible for commiting.
1385 A flush will be performed either way.
1388 @return: the Fingerprint object for the given fpr
1391 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1395 except NoResultFound:
1396 fingerprint = Fingerprint()
1397 fingerprint.fingerprint = fpr
1398 session.add(fingerprint)
1399 session.commit_or_flush()
1404 __all__.append('get_or_set_fingerprint')
1406 ################################################################################
1408 # Helper routine for Keyring class
1409 def get_ldap_name(entry):
1411 for k in ["cn", "mn", "sn"]:
1413 if ret and ret[0] != "" and ret[0] != "-":
1415 return " ".join(name)
1417 ################################################################################
1419 class Keyring(object):
1420 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1421 " --with-colons --fingerprint --fingerprint"
1426 def __init__(self, *args, **kwargs):
1430 return '<Keyring %s>' % self.keyring_name
1432 def de_escape_gpg_str(self, txt):
1433 esclist = re.split(r'(\\x..)', txt)
1434 for x in range(1,len(esclist),2):
1435 esclist[x] = "%c" % (int(esclist[x][2:],16))
1436 return "".join(esclist)
1438 def parse_address(self, uid):
1439 """parses uid and returns a tuple of real name and email address"""
1441 (name, address) = email.Utils.parseaddr(uid)
1442 name = re.sub(r"\s*[(].*[)]", "", name)
1443 name = self.de_escape_gpg_str(name)
1446 return (name, address)
1448 def load_keys(self, keyring):
1449 if not self.keyring_id:
1450 raise Exception('Must be initialized with database information')
1452 k = os.popen(self.gpg_invocation % keyring, "r")
1456 for line in k.xreadlines():
1457 field = line.split(":")
1458 if field[0] == "pub":
1461 (name, addr) = self.parse_address(field[9])
1463 self.keys[key]["email"] = addr
1464 self.keys[key]["name"] = name
1465 self.keys[key]["fingerprints"] = []
1467 elif key and field[0] == "sub" and len(field) >= 12:
1468 signingkey = ("s" in field[11])
1469 elif key and field[0] == "uid":
1470 (name, addr) = self.parse_address(field[9])
1471 if "email" not in self.keys[key] and "@" in addr:
1472 self.keys[key]["email"] = addr
1473 self.keys[key]["name"] = name
1474 elif signingkey and field[0] == "fpr":
1475 self.keys[key]["fingerprints"].append(field[9])
1476 self.fpr_lookup[field[9]] = key
1478 def import_users_from_ldap(self, session):
1482 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1483 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1485 l = ldap.open(LDAPServer)
1486 l.simple_bind_s("","")
1487 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1488 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1489 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1491 ldap_fin_uid_id = {}
1498 uid = entry["uid"][0]
1499 name = get_ldap_name(entry)
1500 fingerprints = entry["keyFingerPrint"]
1502 for f in fingerprints:
1503 key = self.fpr_lookup.get(f, None)
1504 if key not in self.keys:
1506 self.keys[key]["uid"] = uid
1510 keyid = get_or_set_uid(uid, session).uid_id
1511 byuid[keyid] = (uid, name)
1512 byname[uid] = (keyid, name)
1514 return (byname, byuid)
1516 def generate_users_from_keyring(self, format, session):
1520 for x in self.keys.keys():
1521 if "email" not in self.keys[x]:
1523 self.keys[x]["uid"] = format % "invalid-uid"
1525 uid = format % self.keys[x]["email"]
1526 keyid = get_or_set_uid(uid, session).uid_id
1527 byuid[keyid] = (uid, self.keys[x]["name"])
1528 byname[uid] = (keyid, self.keys[x]["name"])
1529 self.keys[x]["uid"] = uid
1532 uid = format % "invalid-uid"
1533 keyid = get_or_set_uid(uid, session).uid_id
1534 byuid[keyid] = (uid, "ungeneratable user id")
1535 byname[uid] = (keyid, "ungeneratable user id")
1537 return (byname, byuid)
1539 __all__.append('Keyring')
1542 def get_keyring(keyring, session=None):
1544 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1545 If C{keyring} already has an entry, simply return the existing Keyring
1547 @type keyring: string
1548 @param keyring: the keyring name
1551 @return: the Keyring object for this keyring
1554 q = session.query(Keyring).filter_by(keyring_name=keyring)
1558 except NoResultFound:
1561 __all__.append('get_keyring')
1563 ################################################################################
1565 class KeyringACLMap(object):
1566 def __init__(self, *args, **kwargs):
1570 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1572 __all__.append('KeyringACLMap')
1574 ################################################################################
1576 class DBChange(object):
1577 def __init__(self, *args, **kwargs):
1581 return '<DBChange %s>' % self.changesname
1583 def clean_from_queue(self):
1584 session = DBConn().session().object_session(self)
1586 # Remove changes_pool_files entries
1589 # Remove changes_pending_files references
1592 # Clear out of queue
1593 self.in_queue = None
1594 self.approved_for_id = None
1596 __all__.append('DBChange')
1599 def get_dbchange(filename, session=None):
1601 returns DBChange object for given C{filename}.
1603 @type filename: string
1604 @param filename: the name of the file
1606 @type session: Session
1607 @param session: Optional SQLA session object (a temporary one will be
1608 generated if not supplied)
1611 @return: DBChange object for the given filename (C{None} if not present)
1614 q = session.query(DBChange).filter_by(changesname=filename)
1618 except NoResultFound:
1621 __all__.append('get_dbchange')
1623 ################################################################################
1625 class Location(object):
1626 def __init__(self, path = None):
1628 # the column 'type' should go away, see comment at mapper
1629 self.archive_type = 'pool'
1632 return '<Location %s (%s)>' % (self.path, self.location_id)
1634 __all__.append('Location')
1637 def get_location(location, component=None, archive=None, session=None):
1639 Returns Location object for the given combination of location, component
1642 @type location: string
1643 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1645 @type component: string
1646 @param component: the component name (if None, no restriction applied)
1648 @type archive: string
1649 @param archive: the archive name (if None, no restriction applied)
1651 @rtype: Location / None
1652 @return: Either a Location object or None if one can't be found
1655 q = session.query(Location).filter_by(path=location)
1657 if archive is not None:
1658 q = q.join(Archive).filter_by(archive_name=archive)
1660 if component is not None:
1661 q = q.join(Component).filter_by(component_name=component)
1665 except NoResultFound:
1668 __all__.append('get_location')
1670 ################################################################################
1672 class Maintainer(object):
1673 def __init__(self, name = None):
1677 return '''<Maintainer '%s' (%s)>''' % (self.name, self.maintainer_id)
1679 def get_split_maintainer(self):
1680 if not hasattr(self, 'name') or self.name is None:
1681 return ('', '', '', '')
1683 return fix_maintainer(self.name.strip())
1685 __all__.append('Maintainer')
1688 def get_or_set_maintainer(name, session=None):
1690 Returns Maintainer object for given maintainer name.
1692 If no matching maintainer name is found, a row is inserted.
1695 @param name: The maintainer name to add
1697 @type session: SQLAlchemy
1698 @param session: Optional SQL session object (a temporary one will be
1699 generated if not supplied). If not passed, a commit will be performed at
1700 the end of the function, otherwise the caller is responsible for commiting.
1701 A flush will be performed either way.
1704 @return: the Maintainer object for the given maintainer
1707 q = session.query(Maintainer).filter_by(name=name)
1710 except NoResultFound:
1711 maintainer = Maintainer()
1712 maintainer.name = name
1713 session.add(maintainer)
1714 session.commit_or_flush()
1719 __all__.append('get_or_set_maintainer')
1722 def get_maintainer(maintainer_id, session=None):
1724 Return the name of the maintainer behind C{maintainer_id} or None if that
1725 maintainer_id is invalid.
1727 @type maintainer_id: int
1728 @param maintainer_id: the id of the maintainer
1731 @return: the Maintainer with this C{maintainer_id}
1734 return session.query(Maintainer).get(maintainer_id)
1736 __all__.append('get_maintainer')
1738 ################################################################################
1740 class NewComment(object):
1741 def __init__(self, *args, **kwargs):
1745 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1747 __all__.append('NewComment')
1750 def has_new_comment(package, version, session=None):
1752 Returns true if the given combination of C{package}, C{version} has a comment.
1754 @type package: string
1755 @param package: name of the package
1757 @type version: string
1758 @param version: package version
1760 @type session: Session
1761 @param session: Optional SQLA session object (a temporary one will be
1762 generated if not supplied)
1768 q = session.query(NewComment)
1769 q = q.filter_by(package=package)
1770 q = q.filter_by(version=version)
1772 return bool(q.count() > 0)
1774 __all__.append('has_new_comment')
1777 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1779 Returns (possibly empty) list of NewComment objects for the given
1782 @type package: string (optional)
1783 @param package: name of the package
1785 @type version: string (optional)
1786 @param version: package version
1788 @type comment_id: int (optional)
1789 @param comment_id: An id of a comment
1791 @type session: Session
1792 @param session: Optional SQLA session object (a temporary one will be
1793 generated if not supplied)
1796 @return: A (possibly empty) list of NewComment objects will be returned
1799 q = session.query(NewComment)
1800 if package is not None: q = q.filter_by(package=package)
1801 if version is not None: q = q.filter_by(version=version)
1802 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1806 __all__.append('get_new_comments')
1808 ################################################################################
1810 class Override(object):
1811 def __init__(self, *args, **kwargs):
1815 return '<Override %s (%s)>' % (self.package, self.suite_id)
1817 __all__.append('Override')
1820 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1822 Returns Override object for the given parameters
1824 @type package: string
1825 @param package: The name of the package
1827 @type suite: string, list or None
1828 @param suite: The name of the suite (or suites if a list) to limit to. If
1829 None, don't limit. Defaults to None.
1831 @type component: string, list or None
1832 @param component: The name of the component (or components if a list) to
1833 limit to. If None, don't limit. Defaults to None.
1835 @type overridetype: string, list or None
1836 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1837 limit to. If None, don't limit. Defaults to None.
1839 @type session: Session
1840 @param session: Optional SQLA session object (a temporary one will be
1841 generated if not supplied)
1844 @return: A (possibly empty) list of Override objects will be returned
1847 q = session.query(Override)
1848 q = q.filter_by(package=package)
1850 if suite is not None:
1851 if not isinstance(suite, list): suite = [suite]
1852 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1854 if component is not None:
1855 if not isinstance(component, list): component = [component]
1856 q = q.join(Component).filter(Component.component_name.in_(component))
1858 if overridetype is not None:
1859 if not isinstance(overridetype, list): overridetype = [overridetype]
1860 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1864 __all__.append('get_override')
1867 ################################################################################
1869 class OverrideType(object):
1870 def __init__(self, *args, **kwargs):
1874 return '<OverrideType %s>' % self.overridetype
1876 __all__.append('OverrideType')
1879 def get_override_type(override_type, session=None):
1881 Returns OverrideType object for given C{override type}.
1883 @type override_type: string
1884 @param override_type: The name of the override type
1886 @type session: Session
1887 @param session: Optional SQLA session object (a temporary one will be
1888 generated if not supplied)
1891 @return: the database id for the given override type
1894 q = session.query(OverrideType).filter_by(overridetype=override_type)
1898 except NoResultFound:
1901 __all__.append('get_override_type')
1903 ################################################################################
1905 class DebContents(object):
1906 def __init__(self, *args, **kwargs):
1910 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1912 __all__.append('DebContents')
1915 class UdebContents(object):
1916 def __init__(self, *args, **kwargs):
1920 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1922 __all__.append('UdebContents')
1924 class PendingBinContents(object):
1925 def __init__(self, *args, **kwargs):
1929 return '<PendingBinContents %s>' % self.contents_id
1931 __all__.append('PendingBinContents')
1933 def insert_pending_content_paths(package,
1938 Make sure given paths are temporarily associated with given
1942 @param package: the package to associate with should have been read in from the binary control file
1943 @type fullpaths: list
1944 @param fullpaths: the list of paths of the file being associated with the binary
1945 @type session: SQLAlchemy session
1946 @param session: Optional SQLAlchemy session. If this is passed, the caller
1947 is responsible for ensuring a transaction has begun and committing the
1948 results or rolling back based on the result code. If not passed, a commit
1949 will be performed at the end of the function
1951 @return: True upon success, False if there is a problem
1954 privatetrans = False
1957 session = DBConn().session()
1961 arch = get_architecture(package['Architecture'], session)
1962 arch_id = arch.arch_id
1964 # Remove any already existing recorded files for this package
1965 q = session.query(PendingBinContents)
1966 q = q.filter_by(package=package['Package'])
1967 q = q.filter_by(version=package['Version'])
1968 q = q.filter_by(architecture=arch_id)
1971 for fullpath in fullpaths:
1973 if fullpath.startswith( "./" ):
1974 fullpath = fullpath[2:]
1976 pca = PendingBinContents()
1977 pca.package = package['Package']
1978 pca.version = package['Version']
1980 pca.architecture = arch_id
1983 pca.type = 8 # gross
1985 pca.type = 7 # also gross
1988 # Only commit if we set up the session ourself
1996 except Exception, e:
1997 traceback.print_exc()
1999 # Only rollback if we set up the session ourself
2006 __all__.append('insert_pending_content_paths')
2008 ################################################################################
2010 class PolicyQueue(object):
2011 def __init__(self, *args, **kwargs):
2015 return '<PolicyQueue %s>' % self.queue_name
2017 __all__.append('PolicyQueue')
2020 def get_policy_queue(queuename, session=None):
2022 Returns PolicyQueue object for given C{queue name}
2024 @type queuename: string
2025 @param queuename: The name of the queue
2027 @type session: Session
2028 @param session: Optional SQLA session object (a temporary one will be
2029 generated if not supplied)
2032 @return: PolicyQueue object for the given queue
2035 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2039 except NoResultFound:
2042 __all__.append('get_policy_queue')
2045 def get_policy_queue_from_path(pathname, session=None):
2047 Returns PolicyQueue object for given C{path name}
2049 @type queuename: string
2050 @param queuename: The path
2052 @type session: Session
2053 @param session: Optional SQLA session object (a temporary one will be
2054 generated if not supplied)
2057 @return: PolicyQueue object for the given queue
2060 q = session.query(PolicyQueue).filter_by(path=pathname)
2064 except NoResultFound:
2067 __all__.append('get_policy_queue_from_path')
2069 ################################################################################
2071 class Priority(object):
2072 def __init__(self, *args, **kwargs):
2075 def __eq__(self, val):
2076 if isinstance(val, str):
2077 return (self.priority == val)
2078 # This signals to use the normal comparison operator
2079 return NotImplemented
2081 def __ne__(self, val):
2082 if isinstance(val, str):
2083 return (self.priority != val)
2084 # This signals to use the normal comparison operator
2085 return NotImplemented
2088 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2090 __all__.append('Priority')
2093 def get_priority(priority, session=None):
2095 Returns Priority object for given C{priority name}.
2097 @type priority: string
2098 @param priority: The name of the priority
2100 @type session: Session
2101 @param session: Optional SQLA session object (a temporary one will be
2102 generated if not supplied)
2105 @return: Priority object for the given priority
2108 q = session.query(Priority).filter_by(priority=priority)
2112 except NoResultFound:
2115 __all__.append('get_priority')
2118 def get_priorities(session=None):
2120 Returns dictionary of priority names -> id mappings
2122 @type session: Session
2123 @param session: Optional SQL session object (a temporary one will be
2124 generated if not supplied)
2127 @return: dictionary of priority names -> id mappings
2131 q = session.query(Priority)
2133 ret[x.priority] = x.priority_id
2137 __all__.append('get_priorities')
2139 ################################################################################
2141 class Section(object):
2142 def __init__(self, *args, **kwargs):
2145 def __eq__(self, val):
2146 if isinstance(val, str):
2147 return (self.section == val)
2148 # This signals to use the normal comparison operator
2149 return NotImplemented
2151 def __ne__(self, val):
2152 if isinstance(val, str):
2153 return (self.section != val)
2154 # This signals to use the normal comparison operator
2155 return NotImplemented
2158 return '<Section %s>' % self.section
2160 __all__.append('Section')
2163 def get_section(section, session=None):
2165 Returns Section object for given C{section name}.
2167 @type section: string
2168 @param section: The name of the section
2170 @type session: Session
2171 @param session: Optional SQLA session object (a temporary one will be
2172 generated if not supplied)
2175 @return: Section object for the given section name
2178 q = session.query(Section).filter_by(section=section)
2182 except NoResultFound:
2185 __all__.append('get_section')
2188 def get_sections(session=None):
2190 Returns dictionary of section names -> id mappings
2192 @type session: Session
2193 @param session: Optional SQL session object (a temporary one will be
2194 generated if not supplied)
2197 @return: dictionary of section names -> id mappings
2201 q = session.query(Section)
2203 ret[x.section] = x.section_id
2207 __all__.append('get_sections')
2209 ################################################################################
2211 class DBSource(object):
2212 def __init__(self, source = None, version = None, maintainer = None, \
2213 changedby = None, poolfile = None, install_date = None):
2214 self.source = source
2215 self.version = version
2216 self.maintainer = maintainer
2217 self.changedby = changedby
2218 self.poolfile = poolfile
2219 self.install_date = install_date
2222 return '<DBSource %s (%s)>' % (self.source, self.version)
2224 __all__.append('DBSource')
2227 def source_exists(source, source_version, suites = ["any"], session=None):
2229 Ensure that source exists somewhere in the archive for the binary
2230 upload being processed.
2231 1. exact match => 1.0-3
2232 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2234 @type source: string
2235 @param source: source name
2237 @type source_version: string
2238 @param source_version: expected source version
2241 @param suites: list of suites to check in, default I{any}
2243 @type session: Session
2244 @param session: Optional SQLA session object (a temporary one will be
2245 generated if not supplied)
2248 @return: returns 1 if a source with expected version is found, otherwise 0
2255 from daklib.regexes import re_bin_only_nmu
2256 orig_source_version = re_bin_only_nmu.sub('', source_version)
2258 for suite in suites:
2259 q = session.query(DBSource).filter_by(source=source). \
2260 filter(DBSource.version.in_([source_version, orig_source_version]))
2262 # source must exist in suite X, or in some other suite that's
2263 # mapped to X, recursively... silent-maps are counted too,
2264 # unreleased-maps aren't.
2265 maps = cnf.ValueList("SuiteMappings")[:]
2267 maps = [ m.split() for m in maps ]
2268 maps = [ (x[1], x[2]) for x in maps
2269 if x[0] == "map" or x[0] == "silent-map" ]
2272 if x[1] in s and x[0] not in s:
2275 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2280 # No source found so return not ok
2285 __all__.append('source_exists')
2288 def get_suites_source_in(source, session=None):
2290 Returns list of Suite objects which given C{source} name is in
2293 @param source: DBSource package name to search for
2296 @return: list of Suite objects for the given source
2299 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2301 __all__.append('get_suites_source_in')
2304 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2306 Returns list of DBSource objects for given C{source} name and other parameters
2309 @param source: DBSource package name to search for
2311 @type version: str or None
2312 @param version: DBSource version name to search for or None if not applicable
2314 @type dm_upload_allowed: bool
2315 @param dm_upload_allowed: If None, no effect. If True or False, only
2316 return packages with that dm_upload_allowed setting
2318 @type session: Session
2319 @param session: Optional SQL session object (a temporary one will be
2320 generated if not supplied)
2323 @return: list of DBSource objects for the given name (may be empty)
2326 q = session.query(DBSource).filter_by(source=source)
2328 if version is not None:
2329 q = q.filter_by(version=version)
2331 if dm_upload_allowed is not None:
2332 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2336 __all__.append('get_sources_from_name')
2338 # FIXME: This function fails badly if it finds more than 1 source package and
2339 # its implementation is trivial enough to be inlined.
2341 def get_source_in_suite(source, suite, session=None):
2343 Returns a DBSource object for a combination of C{source} and C{suite}.
2345 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2346 - B{suite} - a suite name, eg. I{unstable}
2348 @type source: string
2349 @param source: source package name
2352 @param suite: the suite name
2355 @return: the version for I{source} in I{suite}
2359 q = get_suite(suite, session).get_sources(source)
2362 except NoResultFound:
2365 __all__.append('get_source_in_suite')
2367 ################################################################################
2370 def add_dsc_to_db(u, filename, session=None):
2371 entry = u.pkg.files[filename]
2375 source.source = u.pkg.dsc["source"]
2376 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2377 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2378 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2379 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2380 source.install_date = datetime.now().date()
2382 dsc_component = entry["component"]
2383 dsc_location_id = entry["location id"]
2385 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2387 # Set up a new poolfile if necessary
2388 if not entry.has_key("files id") or not entry["files id"]:
2389 filename = entry["pool name"] + filename
2390 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2392 pfs.append(poolfile)
2393 entry["files id"] = poolfile.file_id
2395 source.poolfile_id = entry["files id"]
2398 suite_names = u.pkg.changes["distribution"].keys()
2399 source.suites = session.query(Suite). \
2400 filter(Suite.suite_name.in_(suite_names)).all()
2402 # Add the source files to the DB (files and dsc_files)
2404 dscfile.source_id = source.source_id
2405 dscfile.poolfile_id = entry["files id"]
2406 session.add(dscfile)
2408 for dsc_file, dentry in u.pkg.dsc_files.items():
2410 df.source_id = source.source_id
2412 # If the .orig tarball is already in the pool, it's
2413 # files id is stored in dsc_files by check_dsc().
2414 files_id = dentry.get("files id", None)
2416 # Find the entry in the files hash
2417 # TODO: Bail out here properly
2419 for f, e in u.pkg.files.items():
2424 if files_id is None:
2425 filename = dfentry["pool name"] + dsc_file
2427 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2428 # FIXME: needs to check for -1/-2 and or handle exception
2429 if found and obj is not None:
2430 files_id = obj.file_id
2433 # If still not found, add it
2434 if files_id is None:
2435 # HACK: Force sha1sum etc into dentry
2436 dentry["sha1sum"] = dfentry["sha1sum"]
2437 dentry["sha256sum"] = dfentry["sha256sum"]
2438 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2439 pfs.append(poolfile)
2440 files_id = poolfile.file_id
2442 poolfile = get_poolfile_by_id(files_id, session)
2443 if poolfile is None:
2444 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2445 pfs.append(poolfile)
2447 df.poolfile_id = files_id
2450 # Add the src_uploaders to the DB
2451 uploader_ids = [source.maintainer_id]
2452 if u.pkg.dsc.has_key("uploaders"):
2453 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2455 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2458 for up_id in uploader_ids:
2459 if added_ids.has_key(up_id):
2461 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2467 su.maintainer_id = up_id
2468 su.source_id = source.source_id
2473 return source, dsc_component, dsc_location_id, pfs
2475 __all__.append('add_dsc_to_db')
2478 def add_deb_to_db(u, filename, session=None):
2480 Contrary to what you might expect, this routine deals with both
2481 debs and udebs. That info is in 'dbtype', whilst 'type' is
2482 'deb' for both of them
2485 entry = u.pkg.files[filename]
2488 bin.package = entry["package"]
2489 bin.version = entry["version"]
2490 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2491 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2492 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2493 bin.binarytype = entry["dbtype"]
2496 filename = entry["pool name"] + filename
2497 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2498 if not entry.get("location id", None):
2499 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2501 if entry.get("files id", None):
2502 poolfile = get_poolfile_by_id(bin.poolfile_id)
2503 bin.poolfile_id = entry["files id"]
2505 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2506 bin.poolfile_id = entry["files id"] = poolfile.file_id
2509 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2510 if len(bin_sources) != 1:
2511 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2512 (bin.package, bin.version, entry["architecture"],
2513 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2515 bin.source_id = bin_sources[0].source_id
2517 # Add and flush object so it has an ID
2521 # Add BinAssociations
2522 for suite_name in u.pkg.changes["distribution"].keys():
2523 ba = BinAssociation()
2524 ba.binary_id = bin.binary_id
2525 ba.suite_id = get_suite(suite_name).suite_id
2530 # Deal with contents - disabled for now
2531 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2533 # print "REJECT\nCould not determine contents of package %s" % bin.package
2534 # session.rollback()
2535 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2539 __all__.append('add_deb_to_db')
2541 ################################################################################
2543 class SourceACL(object):
2544 def __init__(self, *args, **kwargs):
2548 return '<SourceACL %s>' % self.source_acl_id
2550 __all__.append('SourceACL')
2552 ################################################################################
2554 class SrcFormat(object):
2555 def __init__(self, *args, **kwargs):
2559 return '<SrcFormat %s>' % (self.format_name)
2561 __all__.append('SrcFormat')
2563 ################################################################################
2565 class SrcUploader(object):
2566 def __init__(self, *args, **kwargs):
2570 return '<SrcUploader %s>' % self.uploader_id
2572 __all__.append('SrcUploader')
2574 ################################################################################
2576 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2577 ('SuiteID', 'suite_id'),
2578 ('Version', 'version'),
2579 ('Origin', 'origin'),
2581 ('Description', 'description'),
2582 ('Untouchable', 'untouchable'),
2583 ('Announce', 'announce'),
2584 ('Codename', 'codename'),
2585 ('OverrideCodename', 'overridecodename'),
2586 ('ValidTime', 'validtime'),
2587 ('Priority', 'priority'),
2588 ('NotAutomatic', 'notautomatic'),
2589 ('CopyChanges', 'copychanges'),
2590 ('OverrideSuite', 'overridesuite')]
2592 # Why the heck don't we have any UNIQUE constraints in table suite?
2593 # TODO: Add UNIQUE constraints for appropriate columns.
2594 class Suite(object):
2595 def __init__(self, suite_name = None, version = None):
2596 self.suite_name = suite_name
2597 self.version = version
2600 return '<Suite %s>' % self.suite_name
2602 def __eq__(self, val):
2603 if isinstance(val, str):
2604 return (self.suite_name == val)
2605 # This signals to use the normal comparison operator
2606 return NotImplemented
2608 def __ne__(self, val):
2609 if isinstance(val, str):
2610 return (self.suite_name != val)
2611 # This signals to use the normal comparison operator
2612 return NotImplemented
2616 for disp, field in SUITE_FIELDS:
2617 val = getattr(self, field, None)
2619 ret.append("%s: %s" % (disp, val))
2621 return "\n".join(ret)
2623 def get_architectures(self, skipsrc=False, skipall=False):
2625 Returns list of Architecture objects
2627 @type skipsrc: boolean
2628 @param skipsrc: Whether to skip returning the 'source' architecture entry
2631 @type skipall: boolean
2632 @param skipall: Whether to skip returning the 'all' architecture entry
2636 @return: list of Architecture objects for the given name (may be empty)
2639 q = object_session(self).query(Architecture).with_parent(self)
2641 q = q.filter(Architecture.arch_string != 'source')
2643 q = q.filter(Architecture.arch_string != 'all')
2644 return q.order_by(Architecture.arch_string).all()
2646 def get_sources(self, source):
2648 Returns a query object representing DBSource that is part of C{suite}.
2650 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2652 @type source: string
2653 @param source: source package name
2655 @rtype: sqlalchemy.orm.query.Query
2656 @return: a query of DBSource
2660 session = object_session(self)
2661 return session.query(DBSource).filter_by(source = source). \
2664 __all__.append('Suite')
2667 def get_suite(suite, session=None):
2669 Returns Suite object for given C{suite name}.
2672 @param suite: The name of the suite
2674 @type session: Session
2675 @param session: Optional SQLA session object (a temporary one will be
2676 generated if not supplied)
2679 @return: Suite object for the requested suite name (None if not present)
2682 q = session.query(Suite).filter_by(suite_name=suite)
2686 except NoResultFound:
2689 __all__.append('get_suite')
2691 ################################################################################
2693 # TODO: should be removed because the implementation is too trivial
2695 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2697 Returns list of Architecture objects for given C{suite} name
2700 @param suite: Suite name to search for
2702 @type skipsrc: boolean
2703 @param skipsrc: Whether to skip returning the 'source' architecture entry
2706 @type skipall: boolean
2707 @param skipall: Whether to skip returning the 'all' architecture entry
2710 @type session: Session
2711 @param session: Optional SQL session object (a temporary one will be
2712 generated if not supplied)
2715 @return: list of Architecture objects for the given name (may be empty)
2718 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2720 __all__.append('get_suite_architectures')
2722 ################################################################################
2724 class SuiteSrcFormat(object):
2725 def __init__(self, *args, **kwargs):
2729 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2731 __all__.append('SuiteSrcFormat')
2734 def get_suite_src_formats(suite, session=None):
2736 Returns list of allowed SrcFormat for C{suite}.
2739 @param suite: Suite name to search for
2741 @type session: Session
2742 @param session: Optional SQL session object (a temporary one will be
2743 generated if not supplied)
2746 @return: the list of allowed source formats for I{suite}
2749 q = session.query(SrcFormat)
2750 q = q.join(SuiteSrcFormat)
2751 q = q.join(Suite).filter_by(suite_name=suite)
2752 q = q.order_by('format_name')
2756 __all__.append('get_suite_src_formats')
2758 ################################################################################
2761 def __init__(self, uid = None, name = None):
2765 def __eq__(self, val):
2766 if isinstance(val, str):
2767 return (self.uid == val)
2768 # This signals to use the normal comparison operator
2769 return NotImplemented
2771 def __ne__(self, val):
2772 if isinstance(val, str):
2773 return (self.uid != val)
2774 # This signals to use the normal comparison operator
2775 return NotImplemented
2778 return '<Uid %s (%s)>' % (self.uid, self.name)
2780 __all__.append('Uid')
2783 def get_or_set_uid(uidname, session=None):
2785 Returns uid object for given uidname.
2787 If no matching uidname is found, a row is inserted.
2789 @type uidname: string
2790 @param uidname: The uid to add
2792 @type session: SQLAlchemy
2793 @param session: Optional SQL session object (a temporary one will be
2794 generated if not supplied). If not passed, a commit will be performed at
2795 the end of the function, otherwise the caller is responsible for commiting.
2798 @return: the uid object for the given uidname
2801 q = session.query(Uid).filter_by(uid=uidname)
2805 except NoResultFound:
2809 session.commit_or_flush()
2814 __all__.append('get_or_set_uid')
2817 def get_uid_from_fingerprint(fpr, session=None):
2818 q = session.query(Uid)
2819 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2823 except NoResultFound:
2826 __all__.append('get_uid_from_fingerprint')
2828 ################################################################################
2830 class UploadBlock(object):
2831 def __init__(self, *args, **kwargs):
2835 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2837 __all__.append('UploadBlock')
2839 ################################################################################
2841 class DBConn(object):
2843 database module init.
2847 def __init__(self, *args, **kwargs):
2848 self.__dict__ = self.__shared_state
2850 if not getattr(self, 'initialised', False):
2851 self.initialised = True
2852 self.debug = kwargs.has_key('debug')
2855 def __setuptables(self):
2856 tables_with_primary = (
2867 'changes_pending_binaries',
2868 'changes_pending_files',
2869 'changes_pending_source',
2879 'pending_bin_contents',
2891 # The following tables have primary keys but sqlalchemy
2892 # version 0.5 fails to reflect them correctly with database
2893 # versions before upgrade #41.
2895 #'build_queue_files',
2898 tables_no_primary = (
2900 'changes_pending_files_map',
2901 'changes_pending_source_files',
2902 'changes_pool_files',
2905 'suite_architectures',
2906 'suite_src_formats',
2907 'suite_build_queue_copy',
2909 # see the comment above
2911 'build_queue_files',
2915 'almost_obsolete_all_associations',
2916 'almost_obsolete_src_associations',
2917 'any_associations_source',
2918 'bin_assoc_by_arch',
2919 'bin_associations_binaries',
2920 'binaries_suite_arch',
2921 'binfiles_suite_component_arch',
2924 'newest_all_associations',
2925 'newest_any_associations',
2927 'newest_src_association',
2928 'obsolete_all_associations',
2929 'obsolete_any_associations',
2930 'obsolete_any_by_all_associations',
2931 'obsolete_src_associations',
2933 'src_associations_bin',
2934 'src_associations_src',
2935 'suite_arch_by_name',
2938 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2939 # correctly and that is why we have to use a workaround. It can
2940 # be removed as soon as we switch to version 0.6.
2941 for table_name in tables_with_primary:
2942 table = Table(table_name, self.db_meta, \
2943 Column('id', Integer, primary_key = True), \
2944 autoload=True, useexisting=True)
2945 setattr(self, 'tbl_%s' % table_name, table)
2947 for table_name in tables_no_primary:
2948 table = Table(table_name, self.db_meta, autoload=True)
2949 setattr(self, 'tbl_%s' % table_name, table)
2951 for view_name in views:
2952 view = Table(view_name, self.db_meta, autoload=True)
2953 setattr(self, 'view_%s' % view_name, view)
2955 def __setupmappers(self):
2956 mapper(Architecture, self.tbl_architecture,
2957 properties = dict(arch_id = self.tbl_architecture.c.id,
2958 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2959 order_by='suite_name',
2960 backref=backref('architectures', order_by='arch_string'))),
2961 extension = validator)
2963 mapper(Archive, self.tbl_archive,
2964 properties = dict(archive_id = self.tbl_archive.c.id,
2965 archive_name = self.tbl_archive.c.name))
2967 mapper(BinAssociation, self.tbl_bin_associations,
2968 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2969 suite_id = self.tbl_bin_associations.c.suite,
2970 suite = relation(Suite),
2971 binary_id = self.tbl_bin_associations.c.bin,
2972 binary = relation(DBBinary)))
2974 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2975 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
2976 filename = self.tbl_pending_bin_contents.c.filename,
2977 package = self.tbl_pending_bin_contents.c.package,
2978 version = self.tbl_pending_bin_contents.c.version,
2979 arch = self.tbl_pending_bin_contents.c.arch,
2980 otype = self.tbl_pending_bin_contents.c.type))
2982 mapper(DebContents, self.tbl_deb_contents,
2983 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
2984 package=self.tbl_deb_contents.c.package,
2985 suite=self.tbl_deb_contents.c.suite,
2986 arch=self.tbl_deb_contents.c.arch,
2987 section=self.tbl_deb_contents.c.section,
2988 filename=self.tbl_deb_contents.c.filename))
2990 mapper(UdebContents, self.tbl_udeb_contents,
2991 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
2992 package=self.tbl_udeb_contents.c.package,
2993 suite=self.tbl_udeb_contents.c.suite,
2994 arch=self.tbl_udeb_contents.c.arch,
2995 section=self.tbl_udeb_contents.c.section,
2996 filename=self.tbl_udeb_contents.c.filename))
2998 mapper(BuildQueue, self.tbl_build_queue,
2999 properties = dict(queue_id = self.tbl_build_queue.c.id))
3001 mapper(BuildQueueFile, self.tbl_build_queue_files,
3002 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3003 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3005 mapper(DBBinary, self.tbl_binaries,
3006 properties = dict(binary_id = self.tbl_binaries.c.id,
3007 package = self.tbl_binaries.c.package,
3008 version = self.tbl_binaries.c.version,
3009 maintainer_id = self.tbl_binaries.c.maintainer,
3010 maintainer = relation(Maintainer),
3011 source_id = self.tbl_binaries.c.source,
3012 source = relation(DBSource),
3013 arch_id = self.tbl_binaries.c.architecture,
3014 architecture = relation(Architecture),
3015 poolfile_id = self.tbl_binaries.c.file,
3016 poolfile = relation(PoolFile),
3017 binarytype = self.tbl_binaries.c.type,
3018 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3019 fingerprint = relation(Fingerprint),
3020 install_date = self.tbl_binaries.c.install_date,
3021 binassociations = relation(BinAssociation,
3022 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
3024 mapper(BinaryACL, self.tbl_binary_acl,
3025 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3027 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3028 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3029 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3030 architecture = relation(Architecture)))
3032 mapper(Component, self.tbl_component,
3033 properties = dict(component_id = self.tbl_component.c.id,
3034 component_name = self.tbl_component.c.name))
3036 mapper(DBConfig, self.tbl_config,
3037 properties = dict(config_id = self.tbl_config.c.id))
3039 mapper(DSCFile, self.tbl_dsc_files,
3040 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3041 source_id = self.tbl_dsc_files.c.source,
3042 source = relation(DBSource),
3043 poolfile_id = self.tbl_dsc_files.c.file,
3044 poolfile = relation(PoolFile)))
3046 mapper(PoolFile, self.tbl_files,
3047 properties = dict(file_id = self.tbl_files.c.id,
3048 filesize = self.tbl_files.c.size,
3049 location_id = self.tbl_files.c.location,
3050 location = relation(Location,
3051 # using lazy='dynamic' in the back
3052 # reference because we have A LOT of
3053 # files in one location
3054 backref=backref('files', lazy='dynamic'))))
3056 mapper(Fingerprint, self.tbl_fingerprint,
3057 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3058 uid_id = self.tbl_fingerprint.c.uid,
3059 uid = relation(Uid),
3060 keyring_id = self.tbl_fingerprint.c.keyring,
3061 keyring = relation(Keyring),
3062 source_acl = relation(SourceACL),
3063 binary_acl = relation(BinaryACL)))
3065 mapper(Keyring, self.tbl_keyrings,
3066 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3067 keyring_id = self.tbl_keyrings.c.id))
3069 mapper(DBChange, self.tbl_changes,
3070 properties = dict(change_id = self.tbl_changes.c.id,
3071 poolfiles = relation(PoolFile,
3072 secondary=self.tbl_changes_pool_files,
3073 backref="changeslinks"),
3074 seen = self.tbl_changes.c.seen,
3075 source = self.tbl_changes.c.source,
3076 binaries = self.tbl_changes.c.binaries,
3077 architecture = self.tbl_changes.c.architecture,
3078 distribution = self.tbl_changes.c.distribution,
3079 urgency = self.tbl_changes.c.urgency,
3080 maintainer = self.tbl_changes.c.maintainer,
3081 changedby = self.tbl_changes.c.changedby,
3082 date = self.tbl_changes.c.date,
3083 version = self.tbl_changes.c.version,
3084 files = relation(ChangePendingFile,
3085 secondary=self.tbl_changes_pending_files_map,
3086 backref="changesfile"),
3087 in_queue_id = self.tbl_changes.c.in_queue,
3088 in_queue = relation(PolicyQueue,
3089 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3090 approved_for_id = self.tbl_changes.c.approved_for))
3092 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3093 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3095 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3096 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3097 filename = self.tbl_changes_pending_files.c.filename,
3098 size = self.tbl_changes_pending_files.c.size,
3099 md5sum = self.tbl_changes_pending_files.c.md5sum,
3100 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3101 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3103 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3104 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3105 change = relation(DBChange),
3106 maintainer = relation(Maintainer,
3107 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3108 changedby = relation(Maintainer,
3109 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3110 fingerprint = relation(Fingerprint),
3111 source_files = relation(ChangePendingFile,
3112 secondary=self.tbl_changes_pending_source_files,
3113 backref="pending_sources")))
3116 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3117 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3118 keyring = relation(Keyring, backref="keyring_acl_map"),
3119 architecture = relation(Architecture)))
3121 mapper(Location, self.tbl_location,
3122 properties = dict(location_id = self.tbl_location.c.id,
3123 component_id = self.tbl_location.c.component,
3124 component = relation(Component),
3125 archive_id = self.tbl_location.c.archive,
3126 archive = relation(Archive),
3127 # FIXME: the 'type' column is old cruft and
3128 # should be removed in the future.
3129 archive_type = self.tbl_location.c.type))
3131 mapper(Maintainer, self.tbl_maintainer,
3132 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3133 maintains_sources = relation(DBSource, backref='maintainer',
3134 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3135 changed_sources = relation(DBSource, backref='changedby',
3136 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))))
3138 mapper(NewComment, self.tbl_new_comments,
3139 properties = dict(comment_id = self.tbl_new_comments.c.id))
3141 mapper(Override, self.tbl_override,
3142 properties = dict(suite_id = self.tbl_override.c.suite,
3143 suite = relation(Suite),
3144 package = self.tbl_override.c.package,
3145 component_id = self.tbl_override.c.component,
3146 component = relation(Component),
3147 priority_id = self.tbl_override.c.priority,
3148 priority = relation(Priority),
3149 section_id = self.tbl_override.c.section,
3150 section = relation(Section),
3151 overridetype_id = self.tbl_override.c.type,
3152 overridetype = relation(OverrideType)))
3154 mapper(OverrideType, self.tbl_override_type,
3155 properties = dict(overridetype = self.tbl_override_type.c.type,
3156 overridetype_id = self.tbl_override_type.c.id))
3158 mapper(PolicyQueue, self.tbl_policy_queue,
3159 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3161 mapper(Priority, self.tbl_priority,
3162 properties = dict(priority_id = self.tbl_priority.c.id))
3164 mapper(Section, self.tbl_section,
3165 properties = dict(section_id = self.tbl_section.c.id,
3166 section=self.tbl_section.c.section))
3168 mapper(DBSource, self.tbl_source,
3169 properties = dict(source_id = self.tbl_source.c.id,
3170 version = self.tbl_source.c.version,
3171 maintainer_id = self.tbl_source.c.maintainer,
3172 poolfile_id = self.tbl_source.c.file,
3173 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3174 fingerprint_id = self.tbl_source.c.sig_fpr,
3175 fingerprint = relation(Fingerprint),
3176 changedby_id = self.tbl_source.c.changedby,
3177 srcfiles = relation(DSCFile,
3178 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3179 suites = relation(Suite, secondary=self.tbl_src_associations,
3181 srcuploaders = relation(SrcUploader)))
3183 mapper(SourceACL, self.tbl_source_acl,
3184 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3186 mapper(SrcFormat, self.tbl_src_format,
3187 properties = dict(src_format_id = self.tbl_src_format.c.id,
3188 format_name = self.tbl_src_format.c.format_name))
3190 mapper(SrcUploader, self.tbl_src_uploaders,
3191 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3192 source_id = self.tbl_src_uploaders.c.source,
3193 source = relation(DBSource,
3194 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3195 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3196 maintainer = relation(Maintainer,
3197 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3199 mapper(Suite, self.tbl_suite,
3200 properties = dict(suite_id = self.tbl_suite.c.id,
3201 policy_queue = relation(PolicyQueue),
3202 copy_queues = relation(BuildQueue, secondary=self.tbl_suite_build_queue_copy)))
3204 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3205 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3206 suite = relation(Suite, backref='suitesrcformats'),
3207 src_format_id = self.tbl_suite_src_formats.c.src_format,
3208 src_format = relation(SrcFormat)))
3210 mapper(Uid, self.tbl_uid,
3211 properties = dict(uid_id = self.tbl_uid.c.id,
3212 fingerprint = relation(Fingerprint)))
3214 mapper(UploadBlock, self.tbl_upload_blocks,
3215 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3216 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3217 uid = relation(Uid, backref="uploadblocks")))
3219 ## Connection functions
3220 def __createconn(self):
3221 from config import Config
3225 connstr = "postgres://%s" % cnf["DB::Host"]
3226 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3227 connstr += ":%s" % cnf["DB::Port"]
3228 connstr += "/%s" % cnf["DB::Name"]
3231 connstr = "postgres:///%s" % cnf["DB::Name"]
3232 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3233 connstr += "?port=%s" % cnf["DB::Port"]
3235 self.db_pg = create_engine(connstr, echo=self.debug)
3236 self.db_meta = MetaData()
3237 self.db_meta.bind = self.db_pg
3238 self.db_smaker = sessionmaker(bind=self.db_pg,
3242 self.__setuptables()
3243 self.__setupmappers()
3246 return self.db_smaker()
3248 __all__.append('DBConn')