5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
47 import simplejson as json
49 from datetime import datetime, timedelta
50 from errno import ENOENT
51 from tempfile import mkstemp, mkdtemp
53 from inspect import getargspec
56 from sqlalchemy import create_engine, Table, MetaData, Column, Integer
57 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
58 backref, MapperExtension, EXT_CONTINUE
59 from sqlalchemy import types as sqltypes
61 # Don't remove this, we re-export the exceptions to scripts which import us
62 from sqlalchemy.exc import *
63 from sqlalchemy.orm.exc import NoResultFound
65 # Only import Config until Queue stuff is changed to store its config
67 from config import Config
68 from textutils import fix_maintainer
69 from dak_exceptions import DBUpdateError, NoSourceFieldError
71 # suppress some deprecation warnings in squeeze related to sqlalchemy
73 warnings.filterwarnings('ignore', \
74 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
76 # TODO: sqlalchemy needs some extra configuration to correctly reflect
77 # the ind_deb_contents_* indexes - we ignore the warnings at the moment
78 warnings.filterwarnings("ignore", 'Predicate of partial index', SAWarning)
81 ################################################################################
83 # Patch in support for the debversion field type so that it works during
87 # that is for sqlalchemy 0.6
88 UserDefinedType = sqltypes.UserDefinedType
90 # this one for sqlalchemy 0.5
91 UserDefinedType = sqltypes.TypeEngine
93 class DebVersion(UserDefinedType):
94 def get_col_spec(self):
97 def bind_processor(self, dialect):
100 # ' = None' is needed for sqlalchemy 0.5:
101 def result_processor(self, dialect, coltype = None):
104 sa_major_version = sqlalchemy.__version__[0:3]
105 if sa_major_version in ["0.5", "0.6"]:
106 from sqlalchemy.databases import postgres
107 postgres.ischema_names['debversion'] = DebVersion
109 raise Exception("dak only ported to SQLA versions 0.5 and 0.6. See daklib/dbconn.py")
111 ################################################################################
113 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
115 ################################################################################
117 def session_wrapper(fn):
119 Wrapper around common ".., session=None):" handling. If the wrapped
120 function is called without passing 'session', we create a local one
121 and destroy it when the function ends.
123 Also attaches a commit_or_flush method to the session; if we created a
124 local session, this is a synonym for session.commit(), otherwise it is a
125 synonym for session.flush().
128 def wrapped(*args, **kwargs):
129 private_transaction = False
131 # Find the session object
132 session = kwargs.get('session')
135 if len(args) <= len(getargspec(fn)[0]) - 1:
136 # No session specified as last argument or in kwargs
137 private_transaction = True
138 session = kwargs['session'] = DBConn().session()
140 # Session is last argument in args
144 session = args[-1] = DBConn().session()
145 private_transaction = True
147 if private_transaction:
148 session.commit_or_flush = session.commit
150 session.commit_or_flush = session.flush
153 return fn(*args, **kwargs)
155 if private_transaction:
156 # We created a session; close it.
159 wrapped.__doc__ = fn.__doc__
160 wrapped.func_name = fn.func_name
164 __all__.append('session_wrapper')
166 ################################################################################
168 class ORMObject(object):
170 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
171 derived classes must implement the properties() method.
174 def properties(self):
176 This method should be implemented by all derived classes and returns a
177 list of the important properties. The properties 'created' and
178 'modified' will be added automatically. A suffix '_count' should be
179 added to properties that are lists or query objects. The most important
180 property name should be returned as the first element in the list
181 because it is used by repr().
187 Returns a JSON representation of the object based on the properties
188 returned from the properties() method.
191 # add created and modified
192 all_properties = self.properties() + ['created', 'modified']
193 for property in all_properties:
194 # check for list or query
195 if property[-6:] == '_count':
196 real_property = property[:-6]
197 if not hasattr(self, real_property):
199 value = getattr(self, real_property)
200 if hasattr(value, '__len__'):
203 elif hasattr(value, 'count'):
205 value = value.count()
207 raise KeyError('Do not understand property %s.' % property)
209 if not hasattr(self, property):
212 value = getattr(self, property)
216 elif isinstance(value, ORMObject):
217 # use repr() for ORMObject types
220 # we want a string for all other types because json cannot
223 data[property] = value
224 return json.dumps(data)
228 Returns the name of the class.
230 return type(self).__name__
234 Returns a short string representation of the object using the first
235 element from the properties() method.
237 primary_property = self.properties()[0]
238 value = getattr(self, primary_property)
239 return '<%s %s>' % (self.classname(), str(value))
243 Returns a human readable form of the object using the properties()
246 return '<%s %s>' % (self.classname(), self.json())
248 def not_null_constraints(self):
250 Returns a list of properties that must be not NULL. Derived classes
251 should override this method if needed.
255 validation_message = \
256 "Validation failed because property '%s' must not be empty in object\n%s"
260 This function validates the not NULL constraints as returned by
261 not_null_constraints(). It raises the DBUpdateError exception if
264 for property in self.not_null_constraints():
265 if not hasattr(self, property) or getattr(self, property) is None:
266 raise DBUpdateError(self.validation_message % \
267 (property, str(self)))
269 __all__.append('ORMObject')
271 ################################################################################
273 class Validator(MapperExtension):
275 This class calls the validate() method for each instance for the
276 'before_update' and 'before_insert' events. A global object validator is
277 used for configuring the individual mappers.
280 def before_update(self, mapper, connection, instance):
284 def before_insert(self, mapper, connection, instance):
288 validator = Validator()
290 ################################################################################
292 class Architecture(ORMObject):
293 def __init__(self, arch_string = None, description = None):
294 self.arch_string = arch_string
295 self.description = description
297 def __eq__(self, val):
298 if isinstance(val, str):
299 return (self.arch_string== val)
300 # This signals to use the normal comparison operator
301 return NotImplemented
303 def __ne__(self, val):
304 if isinstance(val, str):
305 return (self.arch_string != val)
306 # This signals to use the normal comparison operator
307 return NotImplemented
309 def properties(self):
310 return ['arch_string', 'arch_id', 'suites_count']
312 def not_null_constraints(self):
313 return ['arch_string']
315 __all__.append('Architecture')
318 def get_architecture(architecture, session=None):
320 Returns database id for given C{architecture}.
322 @type architecture: string
323 @param architecture: The name of the architecture
325 @type session: Session
326 @param session: Optional SQLA session object (a temporary one will be
327 generated if not supplied)
330 @return: Architecture object for the given arch (None if not present)
333 q = session.query(Architecture).filter_by(arch_string=architecture)
337 except NoResultFound:
340 __all__.append('get_architecture')
342 # TODO: should be removed because the implementation is too trivial
344 def get_architecture_suites(architecture, session=None):
346 Returns list of Suite objects for given C{architecture} name
348 @type architecture: str
349 @param architecture: Architecture name to search for
351 @type session: Session
352 @param session: Optional SQL session object (a temporary one will be
353 generated if not supplied)
356 @return: list of Suite objects for the given name (may be empty)
359 return get_architecture(architecture, session).suites
361 __all__.append('get_architecture_suites')
363 ################################################################################
365 class Archive(object):
366 def __init__(self, *args, **kwargs):
370 return '<Archive %s>' % self.archive_name
372 __all__.append('Archive')
375 def get_archive(archive, session=None):
377 returns database id for given C{archive}.
379 @type archive: string
380 @param archive: the name of the arhive
382 @type session: Session
383 @param session: Optional SQLA session object (a temporary one will be
384 generated if not supplied)
387 @return: Archive object for the given name (None if not present)
390 archive = archive.lower()
392 q = session.query(Archive).filter_by(archive_name=archive)
396 except NoResultFound:
399 __all__.append('get_archive')
401 ################################################################################
403 class BinAssociation(object):
404 def __init__(self, *args, **kwargs):
408 return '<BinAssociation %s (%s, %s)>' % (self.ba_id, self.binary, self.suite)
410 __all__.append('BinAssociation')
412 ################################################################################
414 class BinContents(object):
415 def __init__(self, *args, **kwargs):
419 return '<BinContents (%s, %s)>' % (self.binary, self.filename)
421 __all__.append('BinContents')
423 ################################################################################
425 class DBBinary(object):
426 def __init__(self, *args, **kwargs):
430 return '<DBBinary %s (%s, %s)>' % (self.package, self.version, self.architecture)
432 __all__.append('DBBinary')
435 def get_suites_binary_in(package, session=None):
437 Returns list of Suite objects which given C{package} name is in
440 @param package: DBBinary package name to search for
443 @return: list of Suite objects for the given package
446 return session.query(Suite).join(BinAssociation).join(DBBinary).filter_by(package=package).all()
448 __all__.append('get_suites_binary_in')
451 def get_binary_from_id(binary_id, session=None):
453 Returns DBBinary object for given C{id}
456 @param binary_id: Id of the required binary
458 @type session: Session
459 @param session: Optional SQLA session object (a temporary one will be
460 generated if not supplied)
463 @return: DBBinary object for the given binary (None if not present)
466 q = session.query(DBBinary).filter_by(binary_id=binary_id)
470 except NoResultFound:
473 __all__.append('get_binary_from_id')
476 def get_binaries_from_name(package, version=None, architecture=None, session=None):
478 Returns list of DBBinary objects for given C{package} name
481 @param package: DBBinary package name to search for
483 @type version: str or None
484 @param version: Version to search for (or None)
486 @type architecture: str, list or None
487 @param architecture: Architectures to limit to (or None if no limit)
489 @type session: Session
490 @param session: Optional SQL session object (a temporary one will be
491 generated if not supplied)
494 @return: list of DBBinary objects for the given name (may be empty)
497 q = session.query(DBBinary).filter_by(package=package)
499 if version is not None:
500 q = q.filter_by(version=version)
502 if architecture is not None:
503 if not isinstance(architecture, list):
504 architecture = [architecture]
505 q = q.join(Architecture).filter(Architecture.arch_string.in_(architecture))
511 __all__.append('get_binaries_from_name')
514 def get_binaries_from_source_id(source_id, session=None):
516 Returns list of DBBinary objects for given C{source_id}
519 @param source_id: source_id to search for
521 @type session: Session
522 @param session: Optional SQL session object (a temporary one will be
523 generated if not supplied)
526 @return: list of DBBinary objects for the given name (may be empty)
529 return session.query(DBBinary).filter_by(source_id=source_id).all()
531 __all__.append('get_binaries_from_source_id')
534 def get_binary_from_name_suite(package, suitename, session=None):
535 ### For dak examine-package
536 ### XXX: Doesn't use object API yet
538 sql = """SELECT DISTINCT(b.package), b.version, c.name, su.suite_name
539 FROM binaries b, files fi, location l, component c, bin_associations ba, suite su
540 WHERE b.package='%(package)s'
542 AND fi.location = l.id
543 AND l.component = c.id
546 AND su.suite_name %(suitename)s
547 ORDER BY b.version DESC"""
549 return session.execute(sql % {'package': package, 'suitename': suitename})
551 __all__.append('get_binary_from_name_suite')
554 def get_binary_components(package, suitename, arch, session=None):
555 # Check for packages that have moved from one component to another
556 query = """SELECT c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f
557 WHERE b.package=:package AND s.suite_name=:suitename
558 AND (a.arch_string = :arch OR a.arch_string = 'all')
559 AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id
560 AND f.location = l.id
561 AND l.component = c.id
564 vals = {'package': package, 'suitename': suitename, 'arch': arch}
566 return session.execute(query, vals)
568 __all__.append('get_binary_components')
570 ################################################################################
572 class BinaryACL(object):
573 def __init__(self, *args, **kwargs):
577 return '<BinaryACL %s>' % self.binary_acl_id
579 __all__.append('BinaryACL')
581 ################################################################################
583 class BinaryACLMap(object):
584 def __init__(self, *args, **kwargs):
588 return '<BinaryACLMap %s>' % self.binary_acl_map_id
590 __all__.append('BinaryACLMap')
592 ################################################################################
597 ArchiveDir "%(archivepath)s";
598 OverrideDir "%(overridedir)s";
599 CacheDir "%(cachedir)s";
604 Packages::Compress ". bzip2 gzip";
605 Sources::Compress ". bzip2 gzip";
610 bindirectory "incoming"
615 BinOverride "override.sid.all3";
616 BinCacheDB "packages-accepted.db";
618 FileList "%(filelist)s";
621 Packages::Extensions ".deb .udeb";
624 bindirectory "incoming/"
627 BinOverride "override.sid.all3";
628 SrcOverride "override.sid.all3.src";
629 FileList "%(filelist)s";
633 class BuildQueue(object):
634 def __init__(self, *args, **kwargs):
638 return '<BuildQueue %s>' % self.queue_name
640 def write_metadata(self, starttime, force=False):
641 # Do we write out metafiles?
642 if not (force or self.generate_metadata):
645 session = DBConn().session().object_session(self)
647 fl_fd = fl_name = ac_fd = ac_name = None
649 arches = " ".join([ a.arch_string for a in session.query(Architecture).all() if a.arch_string != 'source' ])
650 startdir = os.getcwd()
653 # Grab files we want to include
654 newer = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) > starttime).all()
655 # Write file list with newer files
656 (fl_fd, fl_name) = mkstemp()
658 os.write(fl_fd, '%s\n' % n.fullpath)
663 # Write minimal apt.conf
664 # TODO: Remove hardcoding from template
665 (ac_fd, ac_name) = mkstemp()
666 os.write(ac_fd, MINIMAL_APT_CONF % {'archivepath': self.path,
668 'cachedir': cnf["Dir::Cache"],
669 'overridedir': cnf["Dir::Override"],
673 # Run apt-ftparchive generate
674 os.chdir(os.path.dirname(ac_name))
675 os.system('apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate %s' % os.path.basename(ac_name))
677 # Run apt-ftparchive release
678 # TODO: Eww - fix this
679 bname = os.path.basename(self.path)
683 # We have to remove the Release file otherwise it'll be included in the
686 os.unlink(os.path.join(bname, 'Release'))
690 os.system("""apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="%s" -o APT::FTPArchive::Release::Label="%s" -o APT::FTPArchive::Release::Description="%s" -o APT::FTPArchive::Release::Architectures="%s" release %s > Release""" % (self.origin, self.label, self.releasedescription, arches, bname))
692 # Crude hack with open and append, but this whole section is and should be redone.
693 if self.notautomatic:
694 release=open("Release", "a")
695 release.write("NotAutomatic: yes")
700 keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"]
701 if cnf.has_key("Dinstall::SigningPubKeyring"):
702 keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"]
704 os.system("gpg %s --no-options --batch --no-tty --armour --default-key %s --detach-sign -o Release.gpg Release""" % (keyring, self.signingkey))
706 # Move the files if we got this far
707 os.rename('Release', os.path.join(bname, 'Release'))
709 os.rename('Release.gpg', os.path.join(bname, 'Release.gpg'))
711 # Clean up any left behind files
738 def clean_and_update(self, starttime, Logger, dryrun=False):
739 """WARNING: This routine commits for you"""
740 session = DBConn().session().object_session(self)
742 if self.generate_metadata and not dryrun:
743 self.write_metadata(starttime)
745 # Grab files older than our execution time
746 older = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter(BuildQueueFile.lastused + timedelta(seconds=self.stay_of_execution) <= starttime).all()
752 Logger.log(["I: Would have removed %s from the queue" % o.fullpath])
754 Logger.log(["I: Removing %s from the queue" % o.fullpath])
755 os.unlink(o.fullpath)
758 # If it wasn't there, don't worry
759 if e.errno == ENOENT:
762 # TODO: Replace with proper logging call
763 Logger.log(["E: Could not remove %s" % o.fullpath])
770 for f in os.listdir(self.path):
771 if f.startswith('Packages') or f.startswith('Source') or f.startswith('Release') or f.startswith('advisory'):
775 r = session.query(BuildQueueFile).filter_by(build_queue_id = self.queue_id).filter_by(filename = f).one()
776 except NoResultFound:
777 fp = os.path.join(self.path, f)
779 Logger.log(["I: Would remove unused link %s" % fp])
781 Logger.log(["I: Removing unused link %s" % fp])
785 Logger.log(["E: Failed to unlink unreferenced file %s" % r.fullpath])
787 def add_file_from_pool(self, poolfile):
788 """Copies a file into the pool. Assumes that the PoolFile object is
789 attached to the same SQLAlchemy session as the Queue object is.
791 The caller is responsible for committing after calling this function."""
792 poolfile_basename = poolfile.filename[poolfile.filename.rindex(os.sep)+1:]
794 # Check if we have a file of this name or this ID already
795 for f in self.queuefiles:
796 if f.fileid is not None and f.fileid == poolfile.file_id or \
797 f.poolfile.filename == poolfile_basename:
798 # In this case, update the BuildQueueFile entry so we
799 # don't remove it too early
800 f.lastused = datetime.now()
801 DBConn().session().object_session(poolfile).add(f)
804 # Prepare BuildQueueFile object
805 qf = BuildQueueFile()
806 qf.build_queue_id = self.queue_id
807 qf.lastused = datetime.now()
808 qf.filename = poolfile_basename
810 targetpath = poolfile.fullpath
811 queuepath = os.path.join(self.path, poolfile_basename)
815 # We need to copy instead of symlink
817 utils.copy(targetpath, queuepath)
818 # NULL in the fileid field implies a copy
821 os.symlink(targetpath, queuepath)
822 qf.fileid = poolfile.file_id
826 # Get the same session as the PoolFile is using and add the qf to it
827 DBConn().session().object_session(poolfile).add(qf)
832 __all__.append('BuildQueue')
835 def get_build_queue(queuename, session=None):
837 Returns BuildQueue object for given C{queue name}, creating it if it does not
840 @type queuename: string
841 @param queuename: The name of the queue
843 @type session: Session
844 @param session: Optional SQLA session object (a temporary one will be
845 generated if not supplied)
848 @return: BuildQueue object for the given queue
851 q = session.query(BuildQueue).filter_by(queue_name=queuename)
855 except NoResultFound:
858 __all__.append('get_build_queue')
860 ################################################################################
862 class BuildQueueFile(object):
863 def __init__(self, *args, **kwargs):
867 return '<BuildQueueFile %s (%s)>' % (self.filename, self.build_queue_id)
871 return os.path.join(self.buildqueue.path, self.filename)
874 __all__.append('BuildQueueFile')
876 ################################################################################
878 class ChangePendingBinary(object):
879 def __init__(self, *args, **kwargs):
883 return '<ChangePendingBinary %s>' % self.change_pending_binary_id
885 __all__.append('ChangePendingBinary')
887 ################################################################################
889 class ChangePendingFile(object):
890 def __init__(self, *args, **kwargs):
894 return '<ChangePendingFile %s>' % self.change_pending_file_id
896 __all__.append('ChangePendingFile')
898 ################################################################################
900 class ChangePendingSource(object):
901 def __init__(self, *args, **kwargs):
905 return '<ChangePendingSource %s>' % self.change_pending_source_id
907 __all__.append('ChangePendingSource')
909 ################################################################################
911 class Component(object):
912 def __init__(self, *args, **kwargs):
915 def __eq__(self, val):
916 if isinstance(val, str):
917 return (self.component_name == val)
918 # This signals to use the normal comparison operator
919 return NotImplemented
921 def __ne__(self, val):
922 if isinstance(val, str):
923 return (self.component_name != val)
924 # This signals to use the normal comparison operator
925 return NotImplemented
928 return '<Component %s>' % self.component_name
931 __all__.append('Component')
934 def get_component(component, session=None):
936 Returns database id for given C{component}.
938 @type component: string
939 @param component: The name of the override type
942 @return: the database id for the given component
945 component = component.lower()
947 q = session.query(Component).filter_by(component_name=component)
951 except NoResultFound:
954 __all__.append('get_component')
956 ################################################################################
958 class DBConfig(object):
959 def __init__(self, *args, **kwargs):
963 return '<DBConfig %s>' % self.name
965 __all__.append('DBConfig')
967 ################################################################################
970 def get_or_set_contents_file_id(filename, session=None):
972 Returns database id for given filename.
974 If no matching file is found, a row is inserted.
976 @type filename: string
977 @param filename: The filename
978 @type session: SQLAlchemy
979 @param session: Optional SQL session object (a temporary one will be
980 generated if not supplied). If not passed, a commit will be performed at
981 the end of the function, otherwise the caller is responsible for commiting.
984 @return: the database id for the given component
987 q = session.query(ContentFilename).filter_by(filename=filename)
990 ret = q.one().cafilename_id
991 except NoResultFound:
992 cf = ContentFilename()
993 cf.filename = filename
995 session.commit_or_flush()
996 ret = cf.cafilename_id
1000 __all__.append('get_or_set_contents_file_id')
1003 def get_contents(suite, overridetype, section=None, session=None):
1005 Returns contents for a suite / overridetype combination, limiting
1006 to a section if not None.
1009 @param suite: Suite object
1011 @type overridetype: OverrideType
1012 @param overridetype: OverrideType object
1014 @type section: Section
1015 @param section: Optional section object to limit results to
1017 @type session: SQLAlchemy
1018 @param session: Optional SQL session object (a temporary one will be
1019 generated if not supplied)
1021 @rtype: ResultsProxy
1022 @return: ResultsProxy object set up to return tuples of (filename, section,
1026 # find me all of the contents for a given suite
1027 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
1031 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
1032 JOIN content_file_names n ON (c.filename=n.id)
1033 JOIN binaries b ON (b.id=c.binary_pkg)
1034 JOIN override o ON (o.package=b.package)
1035 JOIN section s ON (s.id=o.section)
1036 WHERE o.suite = :suiteid AND o.type = :overridetypeid
1037 AND b.type=:overridetypename"""
1039 vals = {'suiteid': suite.suite_id,
1040 'overridetypeid': overridetype.overridetype_id,
1041 'overridetypename': overridetype.overridetype}
1043 if section is not None:
1044 contents_q += " AND s.id = :sectionid"
1045 vals['sectionid'] = section.section_id
1047 contents_q += " ORDER BY fn"
1049 return session.execute(contents_q, vals)
1051 __all__.append('get_contents')
1053 ################################################################################
1055 class ContentFilepath(object):
1056 def __init__(self, *args, **kwargs):
1060 return '<ContentFilepath %s>' % self.filepath
1062 __all__.append('ContentFilepath')
1065 def get_or_set_contents_path_id(filepath, session=None):
1067 Returns database id for given path.
1069 If no matching file is found, a row is inserted.
1071 @type filepath: string
1072 @param filepath: The filepath
1074 @type session: SQLAlchemy
1075 @param session: Optional SQL session object (a temporary one will be
1076 generated if not supplied). If not passed, a commit will be performed at
1077 the end of the function, otherwise the caller is responsible for commiting.
1080 @return: the database id for the given path
1083 q = session.query(ContentFilepath).filter_by(filepath=filepath)
1086 ret = q.one().cafilepath_id
1087 except NoResultFound:
1088 cf = ContentFilepath()
1089 cf.filepath = filepath
1091 session.commit_or_flush()
1092 ret = cf.cafilepath_id
1096 __all__.append('get_or_set_contents_path_id')
1098 ################################################################################
1100 class ContentAssociation(object):
1101 def __init__(self, *args, **kwargs):
1105 return '<ContentAssociation %s>' % self.ca_id
1107 __all__.append('ContentAssociation')
1109 def insert_content_paths(binary_id, fullpaths, session=None):
1111 Make sure given path is associated with given binary id
1113 @type binary_id: int
1114 @param binary_id: the id of the binary
1115 @type fullpaths: list
1116 @param fullpaths: the list of paths of the file being associated with the binary
1117 @type session: SQLAlchemy session
1118 @param session: Optional SQLAlchemy session. If this is passed, the caller
1119 is responsible for ensuring a transaction has begun and committing the
1120 results or rolling back based on the result code. If not passed, a commit
1121 will be performed at the end of the function, otherwise the caller is
1122 responsible for commiting.
1124 @return: True upon success
1127 privatetrans = False
1129 session = DBConn().session()
1134 def generate_path_dicts():
1135 for fullpath in fullpaths:
1136 if fullpath.startswith( './' ):
1137 fullpath = fullpath[2:]
1139 yield {'filename':fullpath, 'id': binary_id }
1141 for d in generate_path_dicts():
1142 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
1151 traceback.print_exc()
1153 # Only rollback if we set up the session ourself
1160 __all__.append('insert_content_paths')
1162 ################################################################################
1164 class DSCFile(object):
1165 def __init__(self, *args, **kwargs):
1169 return '<DSCFile %s>' % self.dscfile_id
1171 __all__.append('DSCFile')
1174 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
1176 Returns a list of DSCFiles which may be empty
1178 @type dscfile_id: int (optional)
1179 @param dscfile_id: the dscfile_id of the DSCFiles to find
1181 @type source_id: int (optional)
1182 @param source_id: the source id related to the DSCFiles to find
1184 @type poolfile_id: int (optional)
1185 @param poolfile_id: the poolfile id related to the DSCFiles to find
1188 @return: Possibly empty list of DSCFiles
1191 q = session.query(DSCFile)
1193 if dscfile_id is not None:
1194 q = q.filter_by(dscfile_id=dscfile_id)
1196 if source_id is not None:
1197 q = q.filter_by(source_id=source_id)
1199 if poolfile_id is not None:
1200 q = q.filter_by(poolfile_id=poolfile_id)
1204 __all__.append('get_dscfiles')
1206 ################################################################################
1208 class PoolFile(ORMObject):
1209 def __init__(self, filename = None, location = None, filesize = -1, \
1211 self.filename = filename
1212 self.location = location
1213 self.filesize = filesize
1214 self.md5sum = md5sum
1218 return os.path.join(self.location.path, self.filename)
1220 def is_valid(self, filesize = -1, md5sum = None):\
1221 return self.filesize == filesize and self.md5sum == md5sum
1223 def properties(self):
1224 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1225 'sha256sum', 'location', 'source', 'last_used']
1227 def not_null_constraints(self):
1228 return ['filename', 'md5sum', 'location']
1230 __all__.append('PoolFile')
1233 def check_poolfile(filename, filesize, md5sum, location_id, session=None):
1236 (ValidFileFound [boolean], PoolFile object or None)
1238 @type filename: string
1239 @param filename: the filename of the file to check against the DB
1242 @param filesize: the size of the file to check against the DB
1244 @type md5sum: string
1245 @param md5sum: the md5sum of the file to check against the DB
1247 @type location_id: int
1248 @param location_id: the id of the location to look in
1251 @return: Tuple of length 2.
1252 - If valid pool file found: (C{True}, C{PoolFile object})
1253 - If valid pool file not found:
1254 - (C{False}, C{None}) if no file found
1255 - (C{False}, C{PoolFile object}) if file found with size/md5sum mismatch
1258 poolfile = session.query(Location).get(location_id). \
1259 files.filter_by(filename=filename).first()
1261 if poolfile and poolfile.is_valid(filesize = filesize, md5sum = md5sum):
1264 return (valid, poolfile)
1266 __all__.append('check_poolfile')
1268 # TODO: the implementation can trivially be inlined at the place where the
1269 # function is called
1271 def get_poolfile_by_id(file_id, session=None):
1273 Returns a PoolFile objects or None for the given id
1276 @param file_id: the id of the file to look for
1278 @rtype: PoolFile or None
1279 @return: either the PoolFile object or None
1282 return session.query(PoolFile).get(file_id)
1284 __all__.append('get_poolfile_by_id')
1287 def get_poolfile_like_name(filename, session=None):
1289 Returns an array of PoolFile objects which are like the given name
1291 @type filename: string
1292 @param filename: the filename of the file to check against the DB
1295 @return: array of PoolFile objects
1298 # TODO: There must be a way of properly using bind parameters with %FOO%
1299 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1303 __all__.append('get_poolfile_like_name')
1306 def add_poolfile(filename, datadict, location_id, session=None):
1308 Add a new file to the pool
1310 @type filename: string
1311 @param filename: filename
1313 @type datadict: dict
1314 @param datadict: dict with needed data
1316 @type location_id: int
1317 @param location_id: database id of the location
1320 @return: the PoolFile object created
1322 poolfile = PoolFile()
1323 poolfile.filename = filename
1324 poolfile.filesize = datadict["size"]
1325 poolfile.md5sum = datadict["md5sum"]
1326 poolfile.sha1sum = datadict["sha1sum"]
1327 poolfile.sha256sum = datadict["sha256sum"]
1328 poolfile.location_id = location_id
1330 session.add(poolfile)
1331 # Flush to get a file id (NB: This is not a commit)
1336 __all__.append('add_poolfile')
1338 ################################################################################
1340 class Fingerprint(ORMObject):
1341 def __init__(self, fingerprint = None):
1342 self.fingerprint = fingerprint
1344 def properties(self):
1345 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1348 def not_null_constraints(self):
1349 return ['fingerprint']
1351 __all__.append('Fingerprint')
1354 def get_fingerprint(fpr, session=None):
1356 Returns Fingerprint object for given fpr.
1359 @param fpr: The fpr to find / add
1361 @type session: SQLAlchemy
1362 @param session: Optional SQL session object (a temporary one will be
1363 generated if not supplied).
1366 @return: the Fingerprint object for the given fpr or None
1369 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1373 except NoResultFound:
1378 __all__.append('get_fingerprint')
1381 def get_or_set_fingerprint(fpr, session=None):
1383 Returns Fingerprint object for given fpr.
1385 If no matching fpr is found, a row is inserted.
1388 @param fpr: The fpr to find / add
1390 @type session: SQLAlchemy
1391 @param session: Optional SQL session object (a temporary one will be
1392 generated if not supplied). If not passed, a commit will be performed at
1393 the end of the function, otherwise the caller is responsible for commiting.
1394 A flush will be performed either way.
1397 @return: the Fingerprint object for the given fpr
1400 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1404 except NoResultFound:
1405 fingerprint = Fingerprint()
1406 fingerprint.fingerprint = fpr
1407 session.add(fingerprint)
1408 session.commit_or_flush()
1413 __all__.append('get_or_set_fingerprint')
1415 ################################################################################
1417 # Helper routine for Keyring class
1418 def get_ldap_name(entry):
1420 for k in ["cn", "mn", "sn"]:
1422 if ret and ret[0] != "" and ret[0] != "-":
1424 return " ".join(name)
1426 ################################################################################
1428 class Keyring(object):
1429 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1430 " --with-colons --fingerprint --fingerprint"
1435 def __init__(self, *args, **kwargs):
1439 return '<Keyring %s>' % self.keyring_name
1441 def de_escape_gpg_str(self, txt):
1442 esclist = re.split(r'(\\x..)', txt)
1443 for x in range(1,len(esclist),2):
1444 esclist[x] = "%c" % (int(esclist[x][2:],16))
1445 return "".join(esclist)
1447 def parse_address(self, uid):
1448 """parses uid and returns a tuple of real name and email address"""
1450 (name, address) = email.Utils.parseaddr(uid)
1451 name = re.sub(r"\s*[(].*[)]", "", name)
1452 name = self.de_escape_gpg_str(name)
1455 return (name, address)
1457 def load_keys(self, keyring):
1458 if not self.keyring_id:
1459 raise Exception('Must be initialized with database information')
1461 k = os.popen(self.gpg_invocation % keyring, "r")
1465 for line in k.xreadlines():
1466 field = line.split(":")
1467 if field[0] == "pub":
1470 (name, addr) = self.parse_address(field[9])
1472 self.keys[key]["email"] = addr
1473 self.keys[key]["name"] = name
1474 self.keys[key]["fingerprints"] = []
1476 elif key and field[0] == "sub" and len(field) >= 12:
1477 signingkey = ("s" in field[11])
1478 elif key and field[0] == "uid":
1479 (name, addr) = self.parse_address(field[9])
1480 if "email" not in self.keys[key] and "@" in addr:
1481 self.keys[key]["email"] = addr
1482 self.keys[key]["name"] = name
1483 elif signingkey and field[0] == "fpr":
1484 self.keys[key]["fingerprints"].append(field[9])
1485 self.fpr_lookup[field[9]] = key
1487 def import_users_from_ldap(self, session):
1491 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1492 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1494 l = ldap.open(LDAPServer)
1495 l.simple_bind_s("","")
1496 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1497 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1498 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1500 ldap_fin_uid_id = {}
1507 uid = entry["uid"][0]
1508 name = get_ldap_name(entry)
1509 fingerprints = entry["keyFingerPrint"]
1511 for f in fingerprints:
1512 key = self.fpr_lookup.get(f, None)
1513 if key not in self.keys:
1515 self.keys[key]["uid"] = uid
1519 keyid = get_or_set_uid(uid, session).uid_id
1520 byuid[keyid] = (uid, name)
1521 byname[uid] = (keyid, name)
1523 return (byname, byuid)
1525 def generate_users_from_keyring(self, format, session):
1529 for x in self.keys.keys():
1530 if "email" not in self.keys[x]:
1532 self.keys[x]["uid"] = format % "invalid-uid"
1534 uid = format % self.keys[x]["email"]
1535 keyid = get_or_set_uid(uid, session).uid_id
1536 byuid[keyid] = (uid, self.keys[x]["name"])
1537 byname[uid] = (keyid, self.keys[x]["name"])
1538 self.keys[x]["uid"] = uid
1541 uid = format % "invalid-uid"
1542 keyid = get_or_set_uid(uid, session).uid_id
1543 byuid[keyid] = (uid, "ungeneratable user id")
1544 byname[uid] = (keyid, "ungeneratable user id")
1546 return (byname, byuid)
1548 __all__.append('Keyring')
1551 def get_keyring(keyring, session=None):
1553 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1554 If C{keyring} already has an entry, simply return the existing Keyring
1556 @type keyring: string
1557 @param keyring: the keyring name
1560 @return: the Keyring object for this keyring
1563 q = session.query(Keyring).filter_by(keyring_name=keyring)
1567 except NoResultFound:
1570 __all__.append('get_keyring')
1572 ################################################################################
1574 class KeyringACLMap(object):
1575 def __init__(self, *args, **kwargs):
1579 return '<KeyringACLMap %s>' % self.keyring_acl_map_id
1581 __all__.append('KeyringACLMap')
1583 ################################################################################
1585 class DBChange(object):
1586 def __init__(self, *args, **kwargs):
1590 return '<DBChange %s>' % self.changesname
1592 def clean_from_queue(self):
1593 session = DBConn().session().object_session(self)
1595 # Remove changes_pool_files entries
1598 # Remove changes_pending_files references
1601 # Clear out of queue
1602 self.in_queue = None
1603 self.approved_for_id = None
1605 __all__.append('DBChange')
1608 def get_dbchange(filename, session=None):
1610 returns DBChange object for given C{filename}.
1612 @type filename: string
1613 @param filename: the name of the file
1615 @type session: Session
1616 @param session: Optional SQLA session object (a temporary one will be
1617 generated if not supplied)
1620 @return: DBChange object for the given filename (C{None} if not present)
1623 q = session.query(DBChange).filter_by(changesname=filename)
1627 except NoResultFound:
1630 __all__.append('get_dbchange')
1632 ################################################################################
1634 class Location(ORMObject):
1635 def __init__(self, path = None):
1637 # the column 'type' should go away, see comment at mapper
1638 self.archive_type = 'pool'
1640 def properties(self):
1641 return ['path', 'archive_type', 'component', 'files_count']
1643 def not_null_constraints(self):
1644 return ['path', 'archive_type']
1646 __all__.append('Location')
1649 def get_location(location, component=None, archive=None, session=None):
1651 Returns Location object for the given combination of location, component
1654 @type location: string
1655 @param location: the path of the location, e.g. I{/srv/ftp-master.debian.org/ftp/pool/}
1657 @type component: string
1658 @param component: the component name (if None, no restriction applied)
1660 @type archive: string
1661 @param archive: the archive name (if None, no restriction applied)
1663 @rtype: Location / None
1664 @return: Either a Location object or None if one can't be found
1667 q = session.query(Location).filter_by(path=location)
1669 if archive is not None:
1670 q = q.join(Archive).filter_by(archive_name=archive)
1672 if component is not None:
1673 q = q.join(Component).filter_by(component_name=component)
1677 except NoResultFound:
1680 __all__.append('get_location')
1682 ################################################################################
1684 class Maintainer(ORMObject):
1685 def __init__(self, name = None):
1688 def properties(self):
1689 return ['name', 'maintainer_id']
1691 def not_null_constraints(self):
1694 def get_split_maintainer(self):
1695 if not hasattr(self, 'name') or self.name is None:
1696 return ('', '', '', '')
1698 return fix_maintainer(self.name.strip())
1700 __all__.append('Maintainer')
1703 def get_or_set_maintainer(name, session=None):
1705 Returns Maintainer object for given maintainer name.
1707 If no matching maintainer name is found, a row is inserted.
1710 @param name: The maintainer name to add
1712 @type session: SQLAlchemy
1713 @param session: Optional SQL session object (a temporary one will be
1714 generated if not supplied). If not passed, a commit will be performed at
1715 the end of the function, otherwise the caller is responsible for commiting.
1716 A flush will be performed either way.
1719 @return: the Maintainer object for the given maintainer
1722 q = session.query(Maintainer).filter_by(name=name)
1725 except NoResultFound:
1726 maintainer = Maintainer()
1727 maintainer.name = name
1728 session.add(maintainer)
1729 session.commit_or_flush()
1734 __all__.append('get_or_set_maintainer')
1737 def get_maintainer(maintainer_id, session=None):
1739 Return the name of the maintainer behind C{maintainer_id} or None if that
1740 maintainer_id is invalid.
1742 @type maintainer_id: int
1743 @param maintainer_id: the id of the maintainer
1746 @return: the Maintainer with this C{maintainer_id}
1749 return session.query(Maintainer).get(maintainer_id)
1751 __all__.append('get_maintainer')
1753 ################################################################################
1755 class NewComment(object):
1756 def __init__(self, *args, **kwargs):
1760 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1762 __all__.append('NewComment')
1765 def has_new_comment(package, version, session=None):
1767 Returns true if the given combination of C{package}, C{version} has a comment.
1769 @type package: string
1770 @param package: name of the package
1772 @type version: string
1773 @param version: package version
1775 @type session: Session
1776 @param session: Optional SQLA session object (a temporary one will be
1777 generated if not supplied)
1783 q = session.query(NewComment)
1784 q = q.filter_by(package=package)
1785 q = q.filter_by(version=version)
1787 return bool(q.count() > 0)
1789 __all__.append('has_new_comment')
1792 def get_new_comments(package=None, version=None, comment_id=None, session=None):
1794 Returns (possibly empty) list of NewComment objects for the given
1797 @type package: string (optional)
1798 @param package: name of the package
1800 @type version: string (optional)
1801 @param version: package version
1803 @type comment_id: int (optional)
1804 @param comment_id: An id of a comment
1806 @type session: Session
1807 @param session: Optional SQLA session object (a temporary one will be
1808 generated if not supplied)
1811 @return: A (possibly empty) list of NewComment objects will be returned
1814 q = session.query(NewComment)
1815 if package is not None: q = q.filter_by(package=package)
1816 if version is not None: q = q.filter_by(version=version)
1817 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1821 __all__.append('get_new_comments')
1823 ################################################################################
1825 class Override(object):
1826 def __init__(self, *args, **kwargs):
1830 return '<Override %s (%s)>' % (self.package, self.suite_id)
1832 __all__.append('Override')
1835 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1837 Returns Override object for the given parameters
1839 @type package: string
1840 @param package: The name of the package
1842 @type suite: string, list or None
1843 @param suite: The name of the suite (or suites if a list) to limit to. If
1844 None, don't limit. Defaults to None.
1846 @type component: string, list or None
1847 @param component: The name of the component (or components if a list) to
1848 limit to. If None, don't limit. Defaults to None.
1850 @type overridetype: string, list or None
1851 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1852 limit to. If None, don't limit. Defaults to None.
1854 @type session: Session
1855 @param session: Optional SQLA session object (a temporary one will be
1856 generated if not supplied)
1859 @return: A (possibly empty) list of Override objects will be returned
1862 q = session.query(Override)
1863 q = q.filter_by(package=package)
1865 if suite is not None:
1866 if not isinstance(suite, list): suite = [suite]
1867 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1869 if component is not None:
1870 if not isinstance(component, list): component = [component]
1871 q = q.join(Component).filter(Component.component_name.in_(component))
1873 if overridetype is not None:
1874 if not isinstance(overridetype, list): overridetype = [overridetype]
1875 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1879 __all__.append('get_override')
1882 ################################################################################
1884 class OverrideType(object):
1885 def __init__(self, *args, **kwargs):
1889 return '<OverrideType %s>' % self.overridetype
1891 __all__.append('OverrideType')
1894 def get_override_type(override_type, session=None):
1896 Returns OverrideType object for given C{override type}.
1898 @type override_type: string
1899 @param override_type: The name of the override type
1901 @type session: Session
1902 @param session: Optional SQLA session object (a temporary one will be
1903 generated if not supplied)
1906 @return: the database id for the given override type
1909 q = session.query(OverrideType).filter_by(overridetype=override_type)
1913 except NoResultFound:
1916 __all__.append('get_override_type')
1918 ################################################################################
1920 class DebContents(object):
1921 def __init__(self, *args, **kwargs):
1925 return '<DebConetnts %s: %s>' % (self.package.package,self.file)
1927 __all__.append('DebContents')
1930 class UdebContents(object):
1931 def __init__(self, *args, **kwargs):
1935 return '<UdebConetnts %s: %s>' % (self.package.package,self.file)
1937 __all__.append('UdebContents')
1939 class PendingBinContents(object):
1940 def __init__(self, *args, **kwargs):
1944 return '<PendingBinContents %s>' % self.contents_id
1946 __all__.append('PendingBinContents')
1948 def insert_pending_content_paths(package,
1953 Make sure given paths are temporarily associated with given
1957 @param package: the package to associate with should have been read in from the binary control file
1958 @type fullpaths: list
1959 @param fullpaths: the list of paths of the file being associated with the binary
1960 @type session: SQLAlchemy session
1961 @param session: Optional SQLAlchemy session. If this is passed, the caller
1962 is responsible for ensuring a transaction has begun and committing the
1963 results or rolling back based on the result code. If not passed, a commit
1964 will be performed at the end of the function
1966 @return: True upon success, False if there is a problem
1969 privatetrans = False
1972 session = DBConn().session()
1976 arch = get_architecture(package['Architecture'], session)
1977 arch_id = arch.arch_id
1979 # Remove any already existing recorded files for this package
1980 q = session.query(PendingBinContents)
1981 q = q.filter_by(package=package['Package'])
1982 q = q.filter_by(version=package['Version'])
1983 q = q.filter_by(architecture=arch_id)
1986 for fullpath in fullpaths:
1988 if fullpath.startswith( "./" ):
1989 fullpath = fullpath[2:]
1991 pca = PendingBinContents()
1992 pca.package = package['Package']
1993 pca.version = package['Version']
1995 pca.architecture = arch_id
1998 pca.type = 8 # gross
2000 pca.type = 7 # also gross
2003 # Only commit if we set up the session ourself
2011 except Exception, e:
2012 traceback.print_exc()
2014 # Only rollback if we set up the session ourself
2021 __all__.append('insert_pending_content_paths')
2023 ################################################################################
2025 class PolicyQueue(object):
2026 def __init__(self, *args, **kwargs):
2030 return '<PolicyQueue %s>' % self.queue_name
2032 __all__.append('PolicyQueue')
2035 def get_policy_queue(queuename, session=None):
2037 Returns PolicyQueue object for given C{queue name}
2039 @type queuename: string
2040 @param queuename: The name of the queue
2042 @type session: Session
2043 @param session: Optional SQLA session object (a temporary one will be
2044 generated if not supplied)
2047 @return: PolicyQueue object for the given queue
2050 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
2054 except NoResultFound:
2057 __all__.append('get_policy_queue')
2060 def get_policy_queue_from_path(pathname, session=None):
2062 Returns PolicyQueue object for given C{path name}
2064 @type queuename: string
2065 @param queuename: The path
2067 @type session: Session
2068 @param session: Optional SQLA session object (a temporary one will be
2069 generated if not supplied)
2072 @return: PolicyQueue object for the given queue
2075 q = session.query(PolicyQueue).filter_by(path=pathname)
2079 except NoResultFound:
2082 __all__.append('get_policy_queue_from_path')
2084 ################################################################################
2086 class Priority(object):
2087 def __init__(self, *args, **kwargs):
2090 def __eq__(self, val):
2091 if isinstance(val, str):
2092 return (self.priority == val)
2093 # This signals to use the normal comparison operator
2094 return NotImplemented
2096 def __ne__(self, val):
2097 if isinstance(val, str):
2098 return (self.priority != val)
2099 # This signals to use the normal comparison operator
2100 return NotImplemented
2103 return '<Priority %s (%s)>' % (self.priority, self.priority_id)
2105 __all__.append('Priority')
2108 def get_priority(priority, session=None):
2110 Returns Priority object for given C{priority name}.
2112 @type priority: string
2113 @param priority: The name of the priority
2115 @type session: Session
2116 @param session: Optional SQLA session object (a temporary one will be
2117 generated if not supplied)
2120 @return: Priority object for the given priority
2123 q = session.query(Priority).filter_by(priority=priority)
2127 except NoResultFound:
2130 __all__.append('get_priority')
2133 def get_priorities(session=None):
2135 Returns dictionary of priority names -> id mappings
2137 @type session: Session
2138 @param session: Optional SQL session object (a temporary one will be
2139 generated if not supplied)
2142 @return: dictionary of priority names -> id mappings
2146 q = session.query(Priority)
2148 ret[x.priority] = x.priority_id
2152 __all__.append('get_priorities')
2154 ################################################################################
2156 class Section(object):
2157 def __init__(self, *args, **kwargs):
2160 def __eq__(self, val):
2161 if isinstance(val, str):
2162 return (self.section == val)
2163 # This signals to use the normal comparison operator
2164 return NotImplemented
2166 def __ne__(self, val):
2167 if isinstance(val, str):
2168 return (self.section != val)
2169 # This signals to use the normal comparison operator
2170 return NotImplemented
2173 return '<Section %s>' % self.section
2175 __all__.append('Section')
2178 def get_section(section, session=None):
2180 Returns Section object for given C{section name}.
2182 @type section: string
2183 @param section: The name of the section
2185 @type session: Session
2186 @param session: Optional SQLA session object (a temporary one will be
2187 generated if not supplied)
2190 @return: Section object for the given section name
2193 q = session.query(Section).filter_by(section=section)
2197 except NoResultFound:
2200 __all__.append('get_section')
2203 def get_sections(session=None):
2205 Returns dictionary of section names -> id mappings
2207 @type session: Session
2208 @param session: Optional SQL session object (a temporary one will be
2209 generated if not supplied)
2212 @return: dictionary of section names -> id mappings
2216 q = session.query(Section)
2218 ret[x.section] = x.section_id
2222 __all__.append('get_sections')
2224 ################################################################################
2226 class DBSource(ORMObject):
2227 def __init__(self, source = None, version = None, maintainer = None, \
2228 changedby = None, poolfile = None, install_date = None):
2229 self.source = source
2230 self.version = version
2231 self.maintainer = maintainer
2232 self.changedby = changedby
2233 self.poolfile = poolfile
2234 self.install_date = install_date
2236 def properties(self):
2237 return ['source', 'source_id', 'maintainer', 'changedby', \
2238 'fingerprint', 'poolfile', 'version', 'suites_count', \
2241 def not_null_constraints(self):
2242 return ['source', 'version', 'maintainer', 'changedby', \
2243 'poolfile', 'install_date']
2245 __all__.append('DBSource')
2248 def source_exists(source, source_version, suites = ["any"], session=None):
2250 Ensure that source exists somewhere in the archive for the binary
2251 upload being processed.
2252 1. exact match => 1.0-3
2253 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
2255 @type source: string
2256 @param source: source name
2258 @type source_version: string
2259 @param source_version: expected source version
2262 @param suites: list of suites to check in, default I{any}
2264 @type session: Session
2265 @param session: Optional SQLA session object (a temporary one will be
2266 generated if not supplied)
2269 @return: returns 1 if a source with expected version is found, otherwise 0
2276 from daklib.regexes import re_bin_only_nmu
2277 orig_source_version = re_bin_only_nmu.sub('', source_version)
2279 for suite in suites:
2280 q = session.query(DBSource).filter_by(source=source). \
2281 filter(DBSource.version.in_([source_version, orig_source_version]))
2283 # source must exist in suite X, or in some other suite that's
2284 # mapped to X, recursively... silent-maps are counted too,
2285 # unreleased-maps aren't.
2286 maps = cnf.ValueList("SuiteMappings")[:]
2288 maps = [ m.split() for m in maps ]
2289 maps = [ (x[1], x[2]) for x in maps
2290 if x[0] == "map" or x[0] == "silent-map" ]
2293 if x[1] in s and x[0] not in s:
2296 q = q.filter(DBSource.suites.any(Suite.suite_name.in_(s)))
2301 # No source found so return not ok
2306 __all__.append('source_exists')
2309 def get_suites_source_in(source, session=None):
2311 Returns list of Suite objects which given C{source} name is in
2314 @param source: DBSource package name to search for
2317 @return: list of Suite objects for the given source
2320 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2322 __all__.append('get_suites_source_in')
2325 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2327 Returns list of DBSource objects for given C{source} name and other parameters
2330 @param source: DBSource package name to search for
2332 @type version: str or None
2333 @param version: DBSource version name to search for or None if not applicable
2335 @type dm_upload_allowed: bool
2336 @param dm_upload_allowed: If None, no effect. If True or False, only
2337 return packages with that dm_upload_allowed setting
2339 @type session: Session
2340 @param session: Optional SQL session object (a temporary one will be
2341 generated if not supplied)
2344 @return: list of DBSource objects for the given name (may be empty)
2347 q = session.query(DBSource).filter_by(source=source)
2349 if version is not None:
2350 q = q.filter_by(version=version)
2352 if dm_upload_allowed is not None:
2353 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2357 __all__.append('get_sources_from_name')
2359 # FIXME: This function fails badly if it finds more than 1 source package and
2360 # its implementation is trivial enough to be inlined.
2362 def get_source_in_suite(source, suite, session=None):
2364 Returns a DBSource object for a combination of C{source} and C{suite}.
2366 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2367 - B{suite} - a suite name, eg. I{unstable}
2369 @type source: string
2370 @param source: source package name
2373 @param suite: the suite name
2376 @return: the version for I{source} in I{suite}
2380 q = get_suite(suite, session).get_sources(source)
2383 except NoResultFound:
2386 __all__.append('get_source_in_suite')
2388 ################################################################################
2391 def add_dsc_to_db(u, filename, session=None):
2392 entry = u.pkg.files[filename]
2396 source.source = u.pkg.dsc["source"]
2397 source.version = u.pkg.dsc["version"] # NB: not files[file]["version"], that has no epoch
2398 source.maintainer_id = get_or_set_maintainer(u.pkg.dsc["maintainer"], session).maintainer_id
2399 source.changedby_id = get_or_set_maintainer(u.pkg.changes["changed-by"], session).maintainer_id
2400 source.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2401 source.install_date = datetime.now().date()
2403 dsc_component = entry["component"]
2404 dsc_location_id = entry["location id"]
2406 source.dm_upload_allowed = (u.pkg.dsc.get("dm-upload-allowed", '') == "yes")
2408 # Set up a new poolfile if necessary
2409 if not entry.has_key("files id") or not entry["files id"]:
2410 filename = entry["pool name"] + filename
2411 poolfile = add_poolfile(filename, entry, dsc_location_id, session)
2413 pfs.append(poolfile)
2414 entry["files id"] = poolfile.file_id
2416 source.poolfile_id = entry["files id"]
2419 suite_names = u.pkg.changes["distribution"].keys()
2420 source.suites = session.query(Suite). \
2421 filter(Suite.suite_name.in_(suite_names)).all()
2423 # Add the source files to the DB (files and dsc_files)
2425 dscfile.source_id = source.source_id
2426 dscfile.poolfile_id = entry["files id"]
2427 session.add(dscfile)
2429 for dsc_file, dentry in u.pkg.dsc_files.items():
2431 df.source_id = source.source_id
2433 # If the .orig tarball is already in the pool, it's
2434 # files id is stored in dsc_files by check_dsc().
2435 files_id = dentry.get("files id", None)
2437 # Find the entry in the files hash
2438 # TODO: Bail out here properly
2440 for f, e in u.pkg.files.items():
2445 if files_id is None:
2446 filename = dfentry["pool name"] + dsc_file
2448 (found, obj) = check_poolfile(filename, dentry["size"], dentry["md5sum"], dsc_location_id)
2449 # FIXME: needs to check for -1/-2 and or handle exception
2450 if found and obj is not None:
2451 files_id = obj.file_id
2454 # If still not found, add it
2455 if files_id is None:
2456 # HACK: Force sha1sum etc into dentry
2457 dentry["sha1sum"] = dfentry["sha1sum"]
2458 dentry["sha256sum"] = dfentry["sha256sum"]
2459 poolfile = add_poolfile(filename, dentry, dsc_location_id, session)
2460 pfs.append(poolfile)
2461 files_id = poolfile.file_id
2463 poolfile = get_poolfile_by_id(files_id, session)
2464 if poolfile is None:
2465 utils.fubar("INTERNAL ERROR. Found no poolfile with id %d" % files_id)
2466 pfs.append(poolfile)
2468 df.poolfile_id = files_id
2471 # Add the src_uploaders to the DB
2472 uploader_ids = [source.maintainer_id]
2473 if u.pkg.dsc.has_key("uploaders"):
2474 for up in u.pkg.dsc["uploaders"].replace(">, ", ">\t").split("\t"):
2476 uploader_ids.append(get_or_set_maintainer(up, session).maintainer_id)
2479 for up_id in uploader_ids:
2480 if added_ids.has_key(up_id):
2482 utils.warn("Already saw uploader %s for source %s" % (up_id, source.source))
2488 su.maintainer_id = up_id
2489 su.source_id = source.source_id
2494 return source, dsc_component, dsc_location_id, pfs
2496 __all__.append('add_dsc_to_db')
2499 def add_deb_to_db(u, filename, session=None):
2501 Contrary to what you might expect, this routine deals with both
2502 debs and udebs. That info is in 'dbtype', whilst 'type' is
2503 'deb' for both of them
2506 entry = u.pkg.files[filename]
2509 bin.package = entry["package"]
2510 bin.version = entry["version"]
2511 bin.maintainer_id = get_or_set_maintainer(entry["maintainer"], session).maintainer_id
2512 bin.fingerprint_id = get_or_set_fingerprint(u.pkg.changes["fingerprint"], session).fingerprint_id
2513 bin.arch_id = get_architecture(entry["architecture"], session).arch_id
2514 bin.binarytype = entry["dbtype"]
2517 filename = entry["pool name"] + filename
2518 fullpath = os.path.join(cnf["Dir::Pool"], filename)
2519 if not entry.get("location id", None):
2520 entry["location id"] = get_location(cnf["Dir::Pool"], entry["component"], session=session).location_id
2522 if entry.get("files id", None):
2523 poolfile = get_poolfile_by_id(bin.poolfile_id)
2524 bin.poolfile_id = entry["files id"]
2526 poolfile = add_poolfile(filename, entry, entry["location id"], session)
2527 bin.poolfile_id = entry["files id"] = poolfile.file_id
2530 bin_sources = get_sources_from_name(entry["source package"], entry["source version"], session=session)
2531 if len(bin_sources) != 1:
2532 raise NoSourceFieldError, "Unable to find a unique source id for %s (%s), %s, file %s, type %s, signed by %s" % \
2533 (bin.package, bin.version, entry["architecture"],
2534 filename, bin.binarytype, u.pkg.changes["fingerprint"])
2536 bin.source_id = bin_sources[0].source_id
2538 # Add and flush object so it has an ID
2542 # Add BinAssociations
2543 for suite_name in u.pkg.changes["distribution"].keys():
2544 ba = BinAssociation()
2545 ba.binary_id = bin.binary_id
2546 ba.suite_id = get_suite(suite_name).suite_id
2551 # Deal with contents - disabled for now
2552 #contents = copy_temporary_contents(bin.package, bin.version, bin.architecture.arch_string, os.path.basename(filename), None, session)
2554 # print "REJECT\nCould not determine contents of package %s" % bin.package
2555 # session.rollback()
2556 # raise MissingContents, "No contents stored for package %s, and couldn't determine contents of %s" % (bin.package, filename)
2560 __all__.append('add_deb_to_db')
2562 ################################################################################
2564 class SourceACL(object):
2565 def __init__(self, *args, **kwargs):
2569 return '<SourceACL %s>' % self.source_acl_id
2571 __all__.append('SourceACL')
2573 ################################################################################
2575 class SrcFormat(object):
2576 def __init__(self, *args, **kwargs):
2580 return '<SrcFormat %s>' % (self.format_name)
2582 __all__.append('SrcFormat')
2584 ################################################################################
2586 class SrcUploader(object):
2587 def __init__(self, *args, **kwargs):
2591 return '<SrcUploader %s>' % self.uploader_id
2593 __all__.append('SrcUploader')
2595 ################################################################################
2597 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2598 ('SuiteID', 'suite_id'),
2599 ('Version', 'version'),
2600 ('Origin', 'origin'),
2602 ('Description', 'description'),
2603 ('Untouchable', 'untouchable'),
2604 ('Announce', 'announce'),
2605 ('Codename', 'codename'),
2606 ('OverrideCodename', 'overridecodename'),
2607 ('ValidTime', 'validtime'),
2608 ('Priority', 'priority'),
2609 ('NotAutomatic', 'notautomatic'),
2610 ('CopyChanges', 'copychanges'),
2611 ('OverrideSuite', 'overridesuite')]
2613 # Why the heck don't we have any UNIQUE constraints in table suite?
2614 # TODO: Add UNIQUE constraints for appropriate columns.
2615 class Suite(ORMObject):
2616 def __init__(self, suite_name = None, version = None):
2617 self.suite_name = suite_name
2618 self.version = version
2620 def properties(self):
2621 return ['suite_name', 'version']
2623 def not_null_constraints(self):
2624 return ['suite_name', 'version']
2626 def __eq__(self, val):
2627 if isinstance(val, str):
2628 return (self.suite_name == val)
2629 # This signals to use the normal comparison operator
2630 return NotImplemented
2632 def __ne__(self, val):
2633 if isinstance(val, str):
2634 return (self.suite_name != val)
2635 # This signals to use the normal comparison operator
2636 return NotImplemented
2640 for disp, field in SUITE_FIELDS:
2641 val = getattr(self, field, None)
2643 ret.append("%s: %s" % (disp, val))
2645 return "\n".join(ret)
2647 def get_architectures(self, skipsrc=False, skipall=False):
2649 Returns list of Architecture objects
2651 @type skipsrc: boolean
2652 @param skipsrc: Whether to skip returning the 'source' architecture entry
2655 @type skipall: boolean
2656 @param skipall: Whether to skip returning the 'all' architecture entry
2660 @return: list of Architecture objects for the given name (may be empty)
2663 q = object_session(self).query(Architecture).with_parent(self)
2665 q = q.filter(Architecture.arch_string != 'source')
2667 q = q.filter(Architecture.arch_string != 'all')
2668 return q.order_by(Architecture.arch_string).all()
2670 def get_sources(self, source):
2672 Returns a query object representing DBSource that is part of C{suite}.
2674 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2676 @type source: string
2677 @param source: source package name
2679 @rtype: sqlalchemy.orm.query.Query
2680 @return: a query of DBSource
2684 session = object_session(self)
2685 return session.query(DBSource).filter_by(source = source). \
2688 __all__.append('Suite')
2691 def get_suite(suite, session=None):
2693 Returns Suite object for given C{suite name}.
2696 @param suite: The name of the suite
2698 @type session: Session
2699 @param session: Optional SQLA session object (a temporary one will be
2700 generated if not supplied)
2703 @return: Suite object for the requested suite name (None if not present)
2706 q = session.query(Suite).filter_by(suite_name=suite)
2710 except NoResultFound:
2713 __all__.append('get_suite')
2715 ################################################################################
2717 # TODO: should be removed because the implementation is too trivial
2719 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2721 Returns list of Architecture objects for given C{suite} name
2724 @param suite: Suite name to search for
2726 @type skipsrc: boolean
2727 @param skipsrc: Whether to skip returning the 'source' architecture entry
2730 @type skipall: boolean
2731 @param skipall: Whether to skip returning the 'all' architecture entry
2734 @type session: Session
2735 @param session: Optional SQL session object (a temporary one will be
2736 generated if not supplied)
2739 @return: list of Architecture objects for the given name (may be empty)
2742 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2744 __all__.append('get_suite_architectures')
2746 ################################################################################
2748 class SuiteSrcFormat(object):
2749 def __init__(self, *args, **kwargs):
2753 return '<SuiteSrcFormat (%s, %s)>' % (self.suite_id, self.src_format_id)
2755 __all__.append('SuiteSrcFormat')
2758 def get_suite_src_formats(suite, session=None):
2760 Returns list of allowed SrcFormat for C{suite}.
2763 @param suite: Suite name to search for
2765 @type session: Session
2766 @param session: Optional SQL session object (a temporary one will be
2767 generated if not supplied)
2770 @return: the list of allowed source formats for I{suite}
2773 q = session.query(SrcFormat)
2774 q = q.join(SuiteSrcFormat)
2775 q = q.join(Suite).filter_by(suite_name=suite)
2776 q = q.order_by('format_name')
2780 __all__.append('get_suite_src_formats')
2782 ################################################################################
2785 def __init__(self, uid = None, name = None):
2789 def __eq__(self, val):
2790 if isinstance(val, str):
2791 return (self.uid == val)
2792 # This signals to use the normal comparison operator
2793 return NotImplemented
2795 def __ne__(self, val):
2796 if isinstance(val, str):
2797 return (self.uid != val)
2798 # This signals to use the normal comparison operator
2799 return NotImplemented
2802 return '<Uid %s (%s)>' % (self.uid, self.name)
2804 __all__.append('Uid')
2807 def get_or_set_uid(uidname, session=None):
2809 Returns uid object for given uidname.
2811 If no matching uidname is found, a row is inserted.
2813 @type uidname: string
2814 @param uidname: The uid to add
2816 @type session: SQLAlchemy
2817 @param session: Optional SQL session object (a temporary one will be
2818 generated if not supplied). If not passed, a commit will be performed at
2819 the end of the function, otherwise the caller is responsible for commiting.
2822 @return: the uid object for the given uidname
2825 q = session.query(Uid).filter_by(uid=uidname)
2829 except NoResultFound:
2833 session.commit_or_flush()
2838 __all__.append('get_or_set_uid')
2841 def get_uid_from_fingerprint(fpr, session=None):
2842 q = session.query(Uid)
2843 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2847 except NoResultFound:
2850 __all__.append('get_uid_from_fingerprint')
2852 ################################################################################
2854 class UploadBlock(object):
2855 def __init__(self, *args, **kwargs):
2859 return '<UploadBlock %s (%s)>' % (self.source, self.upload_block_id)
2861 __all__.append('UploadBlock')
2863 ################################################################################
2865 class DBConn(object):
2867 database module init.
2871 def __init__(self, *args, **kwargs):
2872 self.__dict__ = self.__shared_state
2874 if not getattr(self, 'initialised', False):
2875 self.initialised = True
2876 self.debug = kwargs.has_key('debug')
2879 def __setuptables(self):
2880 tables_with_primary = (
2891 'changes_pending_binaries',
2892 'changes_pending_files',
2893 'changes_pending_source',
2903 'pending_bin_contents',
2915 # The following tables have primary keys but sqlalchemy
2916 # version 0.5 fails to reflect them correctly with database
2917 # versions before upgrade #41.
2919 #'build_queue_files',
2922 tables_no_primary = (
2924 'changes_pending_files_map',
2925 'changes_pending_source_files',
2926 'changes_pool_files',
2929 'suite_architectures',
2930 'suite_src_formats',
2931 'suite_build_queue_copy',
2933 # see the comment above
2935 'build_queue_files',
2939 'almost_obsolete_all_associations',
2940 'almost_obsolete_src_associations',
2941 'any_associations_source',
2942 'bin_assoc_by_arch',
2943 'bin_associations_binaries',
2944 'binaries_suite_arch',
2945 'binfiles_suite_component_arch',
2948 'newest_all_associations',
2949 'newest_any_associations',
2951 'newest_src_association',
2952 'obsolete_all_associations',
2953 'obsolete_any_associations',
2954 'obsolete_any_by_all_associations',
2955 'obsolete_src_associations',
2957 'src_associations_bin',
2958 'src_associations_src',
2959 'suite_arch_by_name',
2962 # Sqlalchemy version 0.5 fails to reflect the SERIAL type
2963 # correctly and that is why we have to use a workaround. It can
2964 # be removed as soon as we switch to version 0.6.
2965 for table_name in tables_with_primary:
2966 table = Table(table_name, self.db_meta, \
2967 Column('id', Integer, primary_key = True), \
2968 autoload=True, useexisting=True)
2969 setattr(self, 'tbl_%s' % table_name, table)
2971 for table_name in tables_no_primary:
2972 table = Table(table_name, self.db_meta, autoload=True)
2973 setattr(self, 'tbl_%s' % table_name, table)
2975 for view_name in views:
2976 view = Table(view_name, self.db_meta, autoload=True)
2977 setattr(self, 'view_%s' % view_name, view)
2979 def __setupmappers(self):
2980 mapper(Architecture, self.tbl_architecture,
2981 properties = dict(arch_id = self.tbl_architecture.c.id,
2982 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2983 order_by='suite_name',
2984 backref=backref('architectures', order_by='arch_string'))),
2985 extension = validator)
2987 mapper(Archive, self.tbl_archive,
2988 properties = dict(archive_id = self.tbl_archive.c.id,
2989 archive_name = self.tbl_archive.c.name))
2991 mapper(BinAssociation, self.tbl_bin_associations,
2992 properties = dict(ba_id = self.tbl_bin_associations.c.id,
2993 suite_id = self.tbl_bin_associations.c.suite,
2994 suite = relation(Suite),
2995 binary_id = self.tbl_bin_associations.c.bin,
2996 binary = relation(DBBinary)))
2998 mapper(PendingBinContents, self.tbl_pending_bin_contents,
2999 properties = dict(contents_id =self.tbl_pending_bin_contents.c.id,
3000 filename = self.tbl_pending_bin_contents.c.filename,
3001 package = self.tbl_pending_bin_contents.c.package,
3002 version = self.tbl_pending_bin_contents.c.version,
3003 arch = self.tbl_pending_bin_contents.c.arch,
3004 otype = self.tbl_pending_bin_contents.c.type))
3006 mapper(DebContents, self.tbl_deb_contents,
3007 properties = dict(binary_id=self.tbl_deb_contents.c.binary_id,
3008 package=self.tbl_deb_contents.c.package,
3009 suite=self.tbl_deb_contents.c.suite,
3010 arch=self.tbl_deb_contents.c.arch,
3011 section=self.tbl_deb_contents.c.section,
3012 filename=self.tbl_deb_contents.c.filename))
3014 mapper(UdebContents, self.tbl_udeb_contents,
3015 properties = dict(binary_id=self.tbl_udeb_contents.c.binary_id,
3016 package=self.tbl_udeb_contents.c.package,
3017 suite=self.tbl_udeb_contents.c.suite,
3018 arch=self.tbl_udeb_contents.c.arch,
3019 section=self.tbl_udeb_contents.c.section,
3020 filename=self.tbl_udeb_contents.c.filename))
3022 mapper(BuildQueue, self.tbl_build_queue,
3023 properties = dict(queue_id = self.tbl_build_queue.c.id))
3025 mapper(BuildQueueFile, self.tbl_build_queue_files,
3026 properties = dict(buildqueue = relation(BuildQueue, backref='queuefiles'),
3027 poolfile = relation(PoolFile, backref='buildqueueinstances')))
3029 mapper(DBBinary, self.tbl_binaries,
3030 properties = dict(binary_id = self.tbl_binaries.c.id,
3031 package = self.tbl_binaries.c.package,
3032 version = self.tbl_binaries.c.version,
3033 maintainer_id = self.tbl_binaries.c.maintainer,
3034 maintainer = relation(Maintainer),
3035 source_id = self.tbl_binaries.c.source,
3036 source = relation(DBSource),
3037 arch_id = self.tbl_binaries.c.architecture,
3038 architecture = relation(Architecture),
3039 poolfile_id = self.tbl_binaries.c.file,
3040 poolfile = relation(PoolFile),
3041 binarytype = self.tbl_binaries.c.type,
3042 fingerprint_id = self.tbl_binaries.c.sig_fpr,
3043 fingerprint = relation(Fingerprint),
3044 install_date = self.tbl_binaries.c.install_date,
3045 binassociations = relation(BinAssociation,
3046 primaryjoin=(self.tbl_binaries.c.id==self.tbl_bin_associations.c.bin))))
3048 mapper(BinaryACL, self.tbl_binary_acl,
3049 properties = dict(binary_acl_id = self.tbl_binary_acl.c.id))
3051 mapper(BinaryACLMap, self.tbl_binary_acl_map,
3052 properties = dict(binary_acl_map_id = self.tbl_binary_acl_map.c.id,
3053 fingerprint = relation(Fingerprint, backref="binary_acl_map"),
3054 architecture = relation(Architecture)))
3056 mapper(Component, self.tbl_component,
3057 properties = dict(component_id = self.tbl_component.c.id,
3058 component_name = self.tbl_component.c.name))
3060 mapper(DBConfig, self.tbl_config,
3061 properties = dict(config_id = self.tbl_config.c.id))
3063 mapper(DSCFile, self.tbl_dsc_files,
3064 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
3065 source_id = self.tbl_dsc_files.c.source,
3066 source = relation(DBSource),
3067 poolfile_id = self.tbl_dsc_files.c.file,
3068 poolfile = relation(PoolFile)))
3070 mapper(PoolFile, self.tbl_files,
3071 properties = dict(file_id = self.tbl_files.c.id,
3072 filesize = self.tbl_files.c.size,
3073 location_id = self.tbl_files.c.location,
3074 location = relation(Location,
3075 # using lazy='dynamic' in the back
3076 # reference because we have A LOT of
3077 # files in one location
3078 backref=backref('files', lazy='dynamic'))),
3079 extension = validator)
3081 mapper(Fingerprint, self.tbl_fingerprint,
3082 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
3083 uid_id = self.tbl_fingerprint.c.uid,
3084 uid = relation(Uid),
3085 keyring_id = self.tbl_fingerprint.c.keyring,
3086 keyring = relation(Keyring),
3087 source_acl = relation(SourceACL),
3088 binary_acl = relation(BinaryACL)),
3089 extension = validator)
3091 mapper(Keyring, self.tbl_keyrings,
3092 properties = dict(keyring_name = self.tbl_keyrings.c.name,
3093 keyring_id = self.tbl_keyrings.c.id))
3095 mapper(DBChange, self.tbl_changes,
3096 properties = dict(change_id = self.tbl_changes.c.id,
3097 poolfiles = relation(PoolFile,
3098 secondary=self.tbl_changes_pool_files,
3099 backref="changeslinks"),
3100 seen = self.tbl_changes.c.seen,
3101 source = self.tbl_changes.c.source,
3102 binaries = self.tbl_changes.c.binaries,
3103 architecture = self.tbl_changes.c.architecture,
3104 distribution = self.tbl_changes.c.distribution,
3105 urgency = self.tbl_changes.c.urgency,
3106 maintainer = self.tbl_changes.c.maintainer,
3107 changedby = self.tbl_changes.c.changedby,
3108 date = self.tbl_changes.c.date,
3109 version = self.tbl_changes.c.version,
3110 files = relation(ChangePendingFile,
3111 secondary=self.tbl_changes_pending_files_map,
3112 backref="changesfile"),
3113 in_queue_id = self.tbl_changes.c.in_queue,
3114 in_queue = relation(PolicyQueue,
3115 primaryjoin=(self.tbl_changes.c.in_queue==self.tbl_policy_queue.c.id)),
3116 approved_for_id = self.tbl_changes.c.approved_for))
3118 mapper(ChangePendingBinary, self.tbl_changes_pending_binaries,
3119 properties = dict(change_pending_binary_id = self.tbl_changes_pending_binaries.c.id))
3121 mapper(ChangePendingFile, self.tbl_changes_pending_files,
3122 properties = dict(change_pending_file_id = self.tbl_changes_pending_files.c.id,
3123 filename = self.tbl_changes_pending_files.c.filename,
3124 size = self.tbl_changes_pending_files.c.size,
3125 md5sum = self.tbl_changes_pending_files.c.md5sum,
3126 sha1sum = self.tbl_changes_pending_files.c.sha1sum,
3127 sha256sum = self.tbl_changes_pending_files.c.sha256sum))
3129 mapper(ChangePendingSource, self.tbl_changes_pending_source,
3130 properties = dict(change_pending_source_id = self.tbl_changes_pending_source.c.id,
3131 change = relation(DBChange),
3132 maintainer = relation(Maintainer,
3133 primaryjoin=(self.tbl_changes_pending_source.c.maintainer_id==self.tbl_maintainer.c.id)),
3134 changedby = relation(Maintainer,
3135 primaryjoin=(self.tbl_changes_pending_source.c.changedby_id==self.tbl_maintainer.c.id)),
3136 fingerprint = relation(Fingerprint),
3137 source_files = relation(ChangePendingFile,
3138 secondary=self.tbl_changes_pending_source_files,
3139 backref="pending_sources")))
3142 mapper(KeyringACLMap, self.tbl_keyring_acl_map,
3143 properties = dict(keyring_acl_map_id = self.tbl_keyring_acl_map.c.id,
3144 keyring = relation(Keyring, backref="keyring_acl_map"),
3145 architecture = relation(Architecture)))
3147 mapper(Location, self.tbl_location,
3148 properties = dict(location_id = self.tbl_location.c.id,
3149 component_id = self.tbl_location.c.component,
3150 component = relation(Component),
3151 archive_id = self.tbl_location.c.archive,
3152 archive = relation(Archive),
3153 # FIXME: the 'type' column is old cruft and
3154 # should be removed in the future.
3155 archive_type = self.tbl_location.c.type),
3156 extension = validator)
3158 mapper(Maintainer, self.tbl_maintainer,
3159 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
3160 maintains_sources = relation(DBSource, backref='maintainer',
3161 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
3162 changed_sources = relation(DBSource, backref='changedby',
3163 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
3164 extension = validator)
3166 mapper(NewComment, self.tbl_new_comments,
3167 properties = dict(comment_id = self.tbl_new_comments.c.id))
3169 mapper(Override, self.tbl_override,
3170 properties = dict(suite_id = self.tbl_override.c.suite,
3171 suite = relation(Suite),
3172 package = self.tbl_override.c.package,
3173 component_id = self.tbl_override.c.component,
3174 component = relation(Component),
3175 priority_id = self.tbl_override.c.priority,
3176 priority = relation(Priority),
3177 section_id = self.tbl_override.c.section,
3178 section = relation(Section),
3179 overridetype_id = self.tbl_override.c.type,
3180 overridetype = relation(OverrideType)))
3182 mapper(OverrideType, self.tbl_override_type,
3183 properties = dict(overridetype = self.tbl_override_type.c.type,
3184 overridetype_id = self.tbl_override_type.c.id))
3186 mapper(PolicyQueue, self.tbl_policy_queue,
3187 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id))
3189 mapper(Priority, self.tbl_priority,
3190 properties = dict(priority_id = self.tbl_priority.c.id))
3192 mapper(Section, self.tbl_section,
3193 properties = dict(section_id = self.tbl_section.c.id,
3194 section=self.tbl_section.c.section))
3196 mapper(DBSource, self.tbl_source,
3197 properties = dict(source_id = self.tbl_source.c.id,
3198 version = self.tbl_source.c.version,
3199 maintainer_id = self.tbl_source.c.maintainer,
3200 poolfile_id = self.tbl_source.c.file,
3201 poolfile = relation(PoolFile, backref=backref('source', uselist = False)),
3202 fingerprint_id = self.tbl_source.c.sig_fpr,
3203 fingerprint = relation(Fingerprint),
3204 changedby_id = self.tbl_source.c.changedby,
3205 srcfiles = relation(DSCFile,
3206 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
3207 suites = relation(Suite, secondary=self.tbl_src_associations,
3209 srcuploaders = relation(SrcUploader)),
3210 extension = validator)
3212 mapper(SourceACL, self.tbl_source_acl,
3213 properties = dict(source_acl_id = self.tbl_source_acl.c.id))
3215 mapper(SrcFormat, self.tbl_src_format,
3216 properties = dict(src_format_id = self.tbl_src_format.c.id,
3217 format_name = self.tbl_src_format.c.format_name))
3219 mapper(SrcUploader, self.tbl_src_uploaders,
3220 properties = dict(uploader_id = self.tbl_src_uploaders.c.id,
3221 source_id = self.tbl_src_uploaders.c.source,
3222 source = relation(DBSource,
3223 primaryjoin=(self.tbl_src_uploaders.c.source==self.tbl_source.c.id)),
3224 maintainer_id = self.tbl_src_uploaders.c.maintainer,
3225 maintainer = relation(Maintainer,
3226 primaryjoin=(self.tbl_src_uploaders.c.maintainer==self.tbl_maintainer.c.id))))
3228 mapper(Suite, self.tbl_suite,
3229 properties = dict(suite_id = self.tbl_suite.c.id,
3230 policy_queue = relation(PolicyQueue),
3231 copy_queues = relation(BuildQueue,
3232 secondary=self.tbl_suite_build_queue_copy)),
3233 extension = validator)
3235 mapper(SuiteSrcFormat, self.tbl_suite_src_formats,
3236 properties = dict(suite_id = self.tbl_suite_src_formats.c.suite,
3237 suite = relation(Suite, backref='suitesrcformats'),
3238 src_format_id = self.tbl_suite_src_formats.c.src_format,
3239 src_format = relation(SrcFormat)))
3241 mapper(Uid, self.tbl_uid,
3242 properties = dict(uid_id = self.tbl_uid.c.id,
3243 fingerprint = relation(Fingerprint)))
3245 mapper(UploadBlock, self.tbl_upload_blocks,
3246 properties = dict(upload_block_id = self.tbl_upload_blocks.c.id,
3247 fingerprint = relation(Fingerprint, backref="uploadblocks"),
3248 uid = relation(Uid, backref="uploadblocks")))
3250 ## Connection functions
3251 def __createconn(self):
3252 from config import Config
3256 connstr = "postgres://%s" % cnf["DB::Host"]
3257 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3258 connstr += ":%s" % cnf["DB::Port"]
3259 connstr += "/%s" % cnf["DB::Name"]
3262 connstr = "postgres:///%s" % cnf["DB::Name"]
3263 if cnf["DB::Port"] and cnf["DB::Port"] != "-1":
3264 connstr += "?port=%s" % cnf["DB::Port"]
3266 self.db_pg = create_engine(connstr, echo=self.debug)
3267 self.db_meta = MetaData()
3268 self.db_meta.bind = self.db_pg
3269 self.db_smaker = sessionmaker(bind=self.db_pg,
3273 self.__setuptables()
3274 self.__setupmappers()
3277 return self.db_smaker()
3279 __all__.append('DBConn')