5 @contact: Debian FTPMaster <ftpmaster@debian.org>
6 @copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
7 @copyright: 2008-2009 Mark Hymers <mhy@debian.org>
8 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
9 @copyright: 2009 Mike O'Connor <stew@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ################################################################################
29 # < mhy> I need a funny comment
30 # < sgran> two peanuts were walking down a dark street
31 # < sgran> one was a-salted
32 # * mhy looks up the definition of "funny"
34 ################################################################################
37 import daklib.daksubprocess
39 from os.path import normpath
50 import simplejson as json
52 from datetime import datetime, timedelta
53 from errno import ENOENT
54 from tempfile import mkstemp, mkdtemp
55 from tarfile import TarFile
57 from inspect import getargspec
60 from sqlalchemy import create_engine, Table, MetaData, Column, Integer, desc, \
62 from sqlalchemy.orm import sessionmaker, mapper, relation, object_session, \
63 backref, MapperExtension, EXT_CONTINUE, object_mapper, clear_mappers
64 from sqlalchemy import types as sqltypes
65 from sqlalchemy.orm.collections import attribute_mapped_collection
66 from sqlalchemy.ext.associationproxy import association_proxy
68 # Don't remove this, we re-export the exceptions to scripts which import us
69 from sqlalchemy.exc import *
70 from sqlalchemy.orm.exc import NoResultFound
72 # Only import Config until Queue stuff is changed to store its config
74 from config import Config
75 from textutils import fix_maintainer
76 from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError
78 # suppress some deprecation warnings in squeeze related to sqlalchemy
80 warnings.filterwarnings('ignore', \
81 "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'.*", \
83 warnings.filterwarnings('ignore', \
84 "Predicate of partial index .* ignored during reflection", \
88 ################################################################################
90 # Patch in support for the debversion field type so that it works during
94 # that is for sqlalchemy 0.6
95 UserDefinedType = sqltypes.UserDefinedType
97 # this one for sqlalchemy 0.5
98 UserDefinedType = sqltypes.TypeEngine
100 class DebVersion(UserDefinedType):
101 def get_col_spec(self):
104 def bind_processor(self, dialect):
107 # ' = None' is needed for sqlalchemy 0.5:
108 def result_processor(self, dialect, coltype = None):
111 sa_major_version = sqlalchemy.__version__[0:3]
112 if sa_major_version in ["0.5", "0.6", "0.7", "0.8", "0.9"]:
113 from sqlalchemy.databases import postgres
114 postgres.ischema_names['debversion'] = DebVersion
116 raise Exception("dak only ported to SQLA versions 0.5 to 0.9. See daklib/dbconn.py")
118 ################################################################################
120 __all__ = ['IntegrityError', 'SQLAlchemyError', 'DebVersion']
122 ################################################################################
124 def session_wrapper(fn):
126 Wrapper around common ".., session=None):" handling. If the wrapped
127 function is called without passing 'session', we create a local one
128 and destroy it when the function ends.
130 Also attaches a commit_or_flush method to the session; if we created a
131 local session, this is a synonym for session.commit(), otherwise it is a
132 synonym for session.flush().
135 def wrapped(*args, **kwargs):
136 private_transaction = False
138 # Find the session object
139 session = kwargs.get('session')
142 if len(args) <= len(getargspec(fn)[0]) - 1:
143 # No session specified as last argument or in kwargs
144 private_transaction = True
145 session = kwargs['session'] = DBConn().session()
147 # Session is last argument in args
151 session = args[-1] = DBConn().session()
152 private_transaction = True
154 if private_transaction:
155 session.commit_or_flush = session.commit
157 session.commit_or_flush = session.flush
160 return fn(*args, **kwargs)
162 if private_transaction:
163 # We created a session; close it.
166 wrapped.__doc__ = fn.__doc__
167 wrapped.func_name = fn.func_name
171 __all__.append('session_wrapper')
173 ################################################################################
175 class ORMObject(object):
177 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All
178 derived classes must implement the properties() method.
181 def properties(self):
183 This method should be implemented by all derived classes and returns a
184 list of the important properties. The properties 'created' and
185 'modified' will be added automatically. A suffix '_count' should be
186 added to properties that are lists or query objects. The most important
187 property name should be returned as the first element in the list
188 because it is used by repr().
194 Returns a JSON representation of the object based on the properties
195 returned from the properties() method.
198 # add created and modified
199 all_properties = self.properties() + ['created', 'modified']
200 for property in all_properties:
201 # check for list or query
202 if property[-6:] == '_count':
203 real_property = property[:-6]
204 if not hasattr(self, real_property):
206 value = getattr(self, real_property)
207 if hasattr(value, '__len__'):
210 elif hasattr(value, 'count'):
211 # query (but not during validation)
212 if self.in_validation:
214 value = value.count()
216 raise KeyError('Do not understand property %s.' % property)
218 if not hasattr(self, property):
221 value = getattr(self, property)
225 elif isinstance(value, ORMObject):
226 # use repr() for ORMObject types
229 # we want a string for all other types because json cannot
232 data[property] = value
233 return json.dumps(data)
237 Returns the name of the class.
239 return type(self).__name__
243 Returns a short string representation of the object using the first
244 element from the properties() method.
246 primary_property = self.properties()[0]
247 value = getattr(self, primary_property)
248 return '<%s %s>' % (self.classname(), str(value))
252 Returns a human readable form of the object using the properties()
255 return '<%s %s>' % (self.classname(), self.json())
257 def not_null_constraints(self):
259 Returns a list of properties that must be not NULL. Derived classes
260 should override this method if needed.
264 validation_message = \
265 "Validation failed because property '%s' must not be empty in object\n%s"
267 in_validation = False
271 This function validates the not NULL constraints as returned by
272 not_null_constraints(). It raises the DBUpdateError exception if
275 for property in self.not_null_constraints():
276 # TODO: It is a bit awkward that the mapper configuration allow
277 # directly setting the numeric _id columns. We should get rid of it
279 if hasattr(self, property + '_id') and \
280 getattr(self, property + '_id') is not None:
282 if not hasattr(self, property) or getattr(self, property) is None:
283 # str() might lead to races due to a 2nd flush
284 self.in_validation = True
285 message = self.validation_message % (property, str(self))
286 self.in_validation = False
287 raise DBUpdateError(message)
291 def get(cls, primary_key, session = None):
293 This is a support function that allows getting an object by its primary
296 Architecture.get(3[, session])
298 instead of the more verbose
300 session.query(Architecture).get(3)
302 return session.query(cls).get(primary_key)
304 def session(self, replace = False):
306 Returns the current session that is associated with the object. May
307 return None is object is in detached state.
310 return object_session(self)
312 def clone(self, session = None):
314 Clones the current object in a new session and returns the new clone. A
315 fresh session is created if the optional session parameter is not
316 provided. The function will fail if a session is provided and has
319 RATIONALE: SQLAlchemy's session is not thread safe. This method clones
320 an existing object to allow several threads to work with their own
321 instances of an ORMObject.
323 WARNING: Only persistent (committed) objects can be cloned. Changes
324 made to the original object that are not committed yet will get lost.
325 The session of the new object will always be rolled back to avoid
329 if self.session() is None:
330 raise RuntimeError( \
331 'Method clone() failed for detached object:\n%s' % self)
332 self.session().flush()
333 mapper = object_mapper(self)
334 primary_key = mapper.primary_key_from_instance(self)
335 object_class = self.__class__
337 session = DBConn().session()
338 elif len(session.new) + len(session.dirty) + len(session.deleted) > 0:
339 raise RuntimeError( \
340 'Method clone() failed due to unflushed changes in session.')
341 new_object = session.query(object_class).get(primary_key)
343 if new_object is None:
344 raise RuntimeError( \
345 'Method clone() failed for non-persistent object:\n%s' % self)
348 __all__.append('ORMObject')
350 ################################################################################
352 class Validator(MapperExtension):
354 This class calls the validate() method for each instance for the
355 'before_update' and 'before_insert' events. A global object validator is
356 used for configuring the individual mappers.
359 def before_update(self, mapper, connection, instance):
363 def before_insert(self, mapper, connection, instance):
367 validator = Validator()
369 ################################################################################
371 class ACL(ORMObject):
373 return "<ACL {0}>".format(self.name)
375 __all__.append('ACL')
377 class ACLPerSource(ORMObject):
379 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format(self.acl.name, self.fingerprint.fingerprint, self.source, self.reason)
381 __all__.append('ACLPerSource')
383 ################################################################################
385 class Architecture(ORMObject):
386 def __init__(self, arch_string = None, description = None):
387 self.arch_string = arch_string
388 self.description = description
390 def __eq__(self, val):
391 if isinstance(val, str):
392 return (self.arch_string== val)
393 # This signals to use the normal comparison operator
394 return NotImplemented
396 def __ne__(self, val):
397 if isinstance(val, str):
398 return (self.arch_string != val)
399 # This signals to use the normal comparison operator
400 return NotImplemented
402 def properties(self):
403 return ['arch_string', 'arch_id', 'suites_count']
405 def not_null_constraints(self):
406 return ['arch_string']
408 __all__.append('Architecture')
411 def get_architecture(architecture, session=None):
413 Returns database id for given C{architecture}.
415 @type architecture: string
416 @param architecture: The name of the architecture
418 @type session: Session
419 @param session: Optional SQLA session object (a temporary one will be
420 generated if not supplied)
423 @return: Architecture object for the given arch (None if not present)
426 q = session.query(Architecture).filter_by(arch_string=architecture)
430 except NoResultFound:
433 __all__.append('get_architecture')
435 ################################################################################
437 class Archive(object):
438 def __init__(self, *args, **kwargs):
442 return '<Archive %s>' % self.archive_name
444 __all__.append('Archive')
447 def get_archive(archive, session=None):
449 returns database id for given C{archive}.
451 @type archive: string
452 @param archive: the name of the arhive
454 @type session: Session
455 @param session: Optional SQLA session object (a temporary one will be
456 generated if not supplied)
459 @return: Archive object for the given name (None if not present)
462 archive = archive.lower()
464 q = session.query(Archive).filter_by(archive_name=archive)
468 except NoResultFound:
471 __all__.append('get_archive')
473 ################################################################################
475 class ArchiveFile(object):
476 def __init__(self, archive=None, component=None, file=None):
477 self.archive = archive
478 self.component = component
482 return os.path.join(self.archive.path, 'pool', self.component.component_name, self.file.filename)
484 __all__.append('ArchiveFile')
486 ################################################################################
488 class BinContents(ORMObject):
489 def __init__(self, file = None, binary = None):
493 def properties(self):
494 return ['file', 'binary']
496 __all__.append('BinContents')
498 ################################################################################
500 class DBBinary(ORMObject):
501 def __init__(self, package = None, source = None, version = None, \
502 maintainer = None, architecture = None, poolfile = None, \
503 binarytype = 'deb', fingerprint=None):
504 self.package = package
506 self.version = version
507 self.maintainer = maintainer
508 self.architecture = architecture
509 self.poolfile = poolfile
510 self.binarytype = binarytype
511 self.fingerprint = fingerprint
515 return self.binary_id
517 def properties(self):
518 return ['package', 'version', 'maintainer', 'source', 'architecture', \
519 'poolfile', 'binarytype', 'fingerprint', 'install_date', \
520 'suites_count', 'binary_id', 'contents_count', 'extra_sources']
522 def not_null_constraints(self):
523 return ['package', 'version', 'maintainer', 'source', 'poolfile', \
526 metadata = association_proxy('key', 'value')
528 def scan_contents(self):
530 Yields the contents of the package. Only regular files are yielded and
531 the path names are normalized after converting them from either utf-8
532 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the
533 package does not contain any regular file.
535 fullpath = self.poolfile.fullpath
536 dpkg_cmd = ('dpkg-deb', '--fsys-tarfile', fullpath)
537 dpkg = daklib.daksubprocess.Popen(dpkg_cmd, stdout=subprocess.PIPE)
538 tar = TarFile.open(fileobj = dpkg.stdout, mode = 'r|')
539 for member in tar.getmembers():
540 if not member.isdir():
541 name = normpath(member.name)
542 # enforce proper utf-8 encoding
545 except UnicodeDecodeError:
546 name = name.decode('iso8859-1').encode('utf-8')
552 def read_control(self):
554 Reads the control information from a binary.
557 @return: stanza text of the control section.
560 fullpath = self.poolfile.fullpath
561 with open(fullpath, 'r') as deb_file:
562 return utils.deb_extract_control(deb_file)
564 def read_control_fields(self):
566 Reads the control information from a binary and return
570 @return: fields of the control section as a dictionary.
572 stanza = self.read_control()
573 return apt_pkg.TagSection(stanza)
575 __all__.append('DBBinary')
578 def get_suites_binary_in(package, session=None):
580 Returns list of Suite objects which given C{package} name is in
583 @param package: DBBinary package name to search for
586 @return: list of Suite objects for the given package
589 return session.query(Suite).filter(Suite.binaries.any(DBBinary.package == package)).all()
591 __all__.append('get_suites_binary_in')
594 def get_component_by_package_suite(package, suite_list, arch_list=[], session=None):
596 Returns the component name of the newest binary package in suite_list or
597 None if no package is found. The result can be optionally filtered by a list
598 of architecture names.
601 @param package: DBBinary package name to search for
603 @type suite_list: list of str
604 @param suite_list: list of suite_name items
606 @type arch_list: list of str
607 @param arch_list: optional list of arch_string items that defaults to []
609 @rtype: str or NoneType
610 @return: name of component or None
613 q = session.query(DBBinary).filter_by(package = package). \
614 join(DBBinary.suites).filter(Suite.suite_name.in_(suite_list))
615 if len(arch_list) > 0:
616 q = q.join(DBBinary.architecture). \
617 filter(Architecture.arch_string.in_(arch_list))
618 binary = q.order_by(desc(DBBinary.version)).first()
622 return binary.poolfile.component.component_name
624 __all__.append('get_component_by_package_suite')
626 ################################################################################
628 class BuildQueue(object):
629 def __init__(self, *args, **kwargs):
633 return '<BuildQueue %s>' % self.queue_name
635 __all__.append('BuildQueue')
637 ################################################################################
639 class Component(ORMObject):
640 def __init__(self, component_name = None):
641 self.component_name = component_name
643 def __eq__(self, val):
644 if isinstance(val, str):
645 return (self.component_name == val)
646 # This signals to use the normal comparison operator
647 return NotImplemented
649 def __ne__(self, val):
650 if isinstance(val, str):
651 return (self.component_name != val)
652 # This signals to use the normal comparison operator
653 return NotImplemented
655 def properties(self):
656 return ['component_name', 'component_id', 'description', \
657 'meets_dfsg', 'overrides_count']
659 def not_null_constraints(self):
660 return ['component_name']
663 __all__.append('Component')
666 def get_component(component, session=None):
668 Returns database id for given C{component}.
670 @type component: string
671 @param component: The name of the override type
674 @return: the database id for the given component
677 component = component.lower()
679 q = session.query(Component).filter_by(component_name=component)
683 except NoResultFound:
686 __all__.append('get_component')
689 def get_mapped_component(component_name, session=None):
690 """get component after mappings
692 Evaluate component mappings from ComponentMappings in dak.conf for the
693 given component name.
695 @todo: ansgar wants to get rid of this. It's currently only used for
698 @type component_name: str
699 @param component_name: component name
701 @param session: database session
703 @rtype: L{daklib.dbconn.Component} or C{None}
704 @return: component after applying maps or C{None}
707 for m in cnf.value_list("ComponentMappings"):
708 (src, dst) = m.split()
709 if component_name == src:
711 component = session.query(Component).filter_by(component_name=component_name).first()
714 __all__.append('get_mapped_component')
717 def get_component_names(session=None):
719 Returns list of strings of component names.
722 @return: list of strings of component names
725 return [ x.component_name for x in session.query(Component).all() ]
727 __all__.append('get_component_names')
729 ################################################################################
731 class DBConfig(object):
732 def __init__(self, *args, **kwargs):
736 return '<DBConfig %s>' % self.name
738 __all__.append('DBConfig')
740 ################################################################################
743 def get_or_set_contents_file_id(filename, session=None):
745 Returns database id for given filename.
747 If no matching file is found, a row is inserted.
749 @type filename: string
750 @param filename: The filename
751 @type session: SQLAlchemy
752 @param session: Optional SQL session object (a temporary one will be
753 generated if not supplied). If not passed, a commit will be performed at
754 the end of the function, otherwise the caller is responsible for commiting.
757 @return: the database id for the given component
760 q = session.query(ContentFilename).filter_by(filename=filename)
763 ret = q.one().cafilename_id
764 except NoResultFound:
765 cf = ContentFilename()
766 cf.filename = filename
768 session.commit_or_flush()
769 ret = cf.cafilename_id
773 __all__.append('get_or_set_contents_file_id')
776 def get_contents(suite, overridetype, section=None, session=None):
778 Returns contents for a suite / overridetype combination, limiting
779 to a section if not None.
782 @param suite: Suite object
784 @type overridetype: OverrideType
785 @param overridetype: OverrideType object
787 @type section: Section
788 @param section: Optional section object to limit results to
790 @type session: SQLAlchemy
791 @param session: Optional SQL session object (a temporary one will be
792 generated if not supplied)
795 @return: ResultsProxy object set up to return tuples of (filename, section,
799 # find me all of the contents for a given suite
800 contents_q = """SELECT (p.path||'/'||n.file) AS fn,
804 FROM content_associations c join content_file_paths p ON (c.filepath=p.id)
805 JOIN content_file_names n ON (c.filename=n.id)
806 JOIN binaries b ON (b.id=c.binary_pkg)
807 JOIN override o ON (o.package=b.package)
808 JOIN section s ON (s.id=o.section)
809 WHERE o.suite = :suiteid AND o.type = :overridetypeid
810 AND b.type=:overridetypename"""
812 vals = {'suiteid': suite.suite_id,
813 'overridetypeid': overridetype.overridetype_id,
814 'overridetypename': overridetype.overridetype}
816 if section is not None:
817 contents_q += " AND s.id = :sectionid"
818 vals['sectionid'] = section.section_id
820 contents_q += " ORDER BY fn"
822 return session.execute(contents_q, vals)
824 __all__.append('get_contents')
826 ################################################################################
828 class ContentFilepath(object):
829 def __init__(self, *args, **kwargs):
833 return '<ContentFilepath %s>' % self.filepath
835 __all__.append('ContentFilepath')
838 def get_or_set_contents_path_id(filepath, session=None):
840 Returns database id for given path.
842 If no matching file is found, a row is inserted.
844 @type filepath: string
845 @param filepath: The filepath
847 @type session: SQLAlchemy
848 @param session: Optional SQL session object (a temporary one will be
849 generated if not supplied). If not passed, a commit will be performed at
850 the end of the function, otherwise the caller is responsible for commiting.
853 @return: the database id for the given path
856 q = session.query(ContentFilepath).filter_by(filepath=filepath)
859 ret = q.one().cafilepath_id
860 except NoResultFound:
861 cf = ContentFilepath()
862 cf.filepath = filepath
864 session.commit_or_flush()
865 ret = cf.cafilepath_id
869 __all__.append('get_or_set_contents_path_id')
871 ################################################################################
873 class ContentAssociation(object):
874 def __init__(self, *args, **kwargs):
878 return '<ContentAssociation %s>' % self.ca_id
880 __all__.append('ContentAssociation')
882 def insert_content_paths(binary_id, fullpaths, session=None):
884 Make sure given path is associated with given binary id
887 @param binary_id: the id of the binary
888 @type fullpaths: list
889 @param fullpaths: the list of paths of the file being associated with the binary
890 @type session: SQLAlchemy session
891 @param session: Optional SQLAlchemy session. If this is passed, the caller
892 is responsible for ensuring a transaction has begun and committing the
893 results or rolling back based on the result code. If not passed, a commit
894 will be performed at the end of the function, otherwise the caller is
895 responsible for commiting.
897 @return: True upon success
902 session = DBConn().session()
907 def generate_path_dicts():
908 for fullpath in fullpaths:
909 if fullpath.startswith( './' ):
910 fullpath = fullpath[2:]
912 yield {'filename':fullpath, 'id': binary_id }
914 for d in generate_path_dicts():
915 session.execute( "INSERT INTO bin_contents ( file, binary_id ) VALUES ( :filename, :id )",
924 traceback.print_exc()
926 # Only rollback if we set up the session ourself
933 __all__.append('insert_content_paths')
935 ################################################################################
937 class DSCFile(object):
938 def __init__(self, *args, **kwargs):
942 return '<DSCFile %s>' % self.dscfile_id
944 __all__.append('DSCFile')
947 def get_dscfiles(dscfile_id=None, source_id=None, poolfile_id=None, session=None):
949 Returns a list of DSCFiles which may be empty
951 @type dscfile_id: int (optional)
952 @param dscfile_id: the dscfile_id of the DSCFiles to find
954 @type source_id: int (optional)
955 @param source_id: the source id related to the DSCFiles to find
957 @type poolfile_id: int (optional)
958 @param poolfile_id: the poolfile id related to the DSCFiles to find
961 @return: Possibly empty list of DSCFiles
964 q = session.query(DSCFile)
966 if dscfile_id is not None:
967 q = q.filter_by(dscfile_id=dscfile_id)
969 if source_id is not None:
970 q = q.filter_by(source_id=source_id)
972 if poolfile_id is not None:
973 q = q.filter_by(poolfile_id=poolfile_id)
977 __all__.append('get_dscfiles')
979 ################################################################################
981 class ExternalOverride(ORMObject):
982 def __init__(self, *args, **kwargs):
986 return '<ExternalOverride %s = %s: %s>' % (self.package, self.key, self.value)
988 __all__.append('ExternalOverride')
990 ################################################################################
992 class PoolFile(ORMObject):
993 def __init__(self, filename = None, filesize = -1, \
995 self.filename = filename
996 self.filesize = filesize
1001 session = DBConn().session().object_session(self)
1002 af = session.query(ArchiveFile).join(Archive) \
1003 .filter(ArchiveFile.file == self) \
1004 .order_by(Archive.tainted.desc()).first()
1008 def component(self):
1009 session = DBConn().session().object_session(self)
1010 component_id = session.query(ArchiveFile.component_id).filter(ArchiveFile.file == self) \
1011 .group_by(ArchiveFile.component_id).one()
1012 return session.query(Component).get(component_id)
1016 return os.path.basename(self.filename)
1018 def is_valid(self, filesize = -1, md5sum = None):
1019 return self.filesize == long(filesize) and self.md5sum == md5sum
1021 def properties(self):
1022 return ['filename', 'file_id', 'filesize', 'md5sum', 'sha1sum', \
1023 'sha256sum', 'source', 'binary', 'last_used']
1025 def not_null_constraints(self):
1026 return ['filename', 'md5sum']
1028 def identical_to(self, filename):
1030 compare size and hash with the given file
1033 @return: true if the given file has the same size and hash as this object; false otherwise
1035 st = os.stat(filename)
1036 if self.filesize != st.st_size:
1039 f = open(filename, "r")
1040 sha256sum = apt_pkg.sha256sum(f)
1041 if sha256sum != self.sha256sum:
1046 __all__.append('PoolFile')
1049 def get_poolfile_like_name(filename, session=None):
1051 Returns an array of PoolFile objects which are like the given name
1053 @type filename: string
1054 @param filename: the filename of the file to check against the DB
1057 @return: array of PoolFile objects
1060 # TODO: There must be a way of properly using bind parameters with %FOO%
1061 q = session.query(PoolFile).filter(PoolFile.filename.like('%%/%s' % filename))
1065 __all__.append('get_poolfile_like_name')
1067 ################################################################################
1069 class Fingerprint(ORMObject):
1070 def __init__(self, fingerprint = None):
1071 self.fingerprint = fingerprint
1073 def properties(self):
1074 return ['fingerprint', 'fingerprint_id', 'keyring', 'uid', \
1077 def not_null_constraints(self):
1078 return ['fingerprint']
1080 __all__.append('Fingerprint')
1083 def get_fingerprint(fpr, session=None):
1085 Returns Fingerprint object for given fpr.
1088 @param fpr: The fpr to find / add
1090 @type session: SQLAlchemy
1091 @param session: Optional SQL session object (a temporary one will be
1092 generated if not supplied).
1095 @return: the Fingerprint object for the given fpr or None
1098 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1102 except NoResultFound:
1107 __all__.append('get_fingerprint')
1110 def get_or_set_fingerprint(fpr, session=None):
1112 Returns Fingerprint object for given fpr.
1114 If no matching fpr is found, a row is inserted.
1117 @param fpr: The fpr to find / add
1119 @type session: SQLAlchemy
1120 @param session: Optional SQL session object (a temporary one will be
1121 generated if not supplied). If not passed, a commit will be performed at
1122 the end of the function, otherwise the caller is responsible for commiting.
1123 A flush will be performed either way.
1126 @return: the Fingerprint object for the given fpr
1129 q = session.query(Fingerprint).filter_by(fingerprint=fpr)
1133 except NoResultFound:
1134 fingerprint = Fingerprint()
1135 fingerprint.fingerprint = fpr
1136 session.add(fingerprint)
1137 session.commit_or_flush()
1142 __all__.append('get_or_set_fingerprint')
1144 ################################################################################
1146 # Helper routine for Keyring class
1147 def get_ldap_name(entry):
1149 for k in ["cn", "mn", "sn"]:
1151 if ret and ret[0] != "" and ret[0] != "-":
1153 return " ".join(name)
1155 ################################################################################
1157 class Keyring(object):
1158 gpg_invocation = "gpg --no-default-keyring --keyring %s" +\
1159 " --with-colons --fingerprint --fingerprint"
1164 def __init__(self, *args, **kwargs):
1168 return '<Keyring %s>' % self.keyring_name
1170 def de_escape_gpg_str(self, txt):
1171 esclist = re.split(r'(\\x..)', txt)
1172 for x in range(1,len(esclist),2):
1173 esclist[x] = "%c" % (int(esclist[x][2:],16))
1174 return "".join(esclist)
1176 def parse_address(self, uid):
1177 """parses uid and returns a tuple of real name and email address"""
1179 (name, address) = email.Utils.parseaddr(uid)
1180 name = re.sub(r"\s*[(].*[)]", "", name)
1181 name = self.de_escape_gpg_str(name)
1184 return (name, address)
1186 def load_keys(self, keyring):
1187 if not self.keyring_id:
1188 raise Exception('Must be initialized with database information')
1190 k = os.popen(self.gpg_invocation % keyring, "r")
1192 need_fingerprint = False
1195 field = line.split(":")
1196 if field[0] == "pub":
1199 (name, addr) = self.parse_address(field[9])
1201 self.keys[key]["email"] = addr
1202 self.keys[key]["name"] = name
1203 need_fingerprint = True
1204 elif key and field[0] == "uid":
1205 (name, addr) = self.parse_address(field[9])
1206 if "email" not in self.keys[key] and "@" in addr:
1207 self.keys[key]["email"] = addr
1208 self.keys[key]["name"] = name
1209 elif need_fingerprint and field[0] == "fpr":
1210 self.keys[key]["fingerprints"] = [field[9]]
1211 self.fpr_lookup[field[9]] = key
1212 need_fingerprint = False
1214 def import_users_from_ldap(self, session):
1218 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"]
1219 LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"]
1220 ca_cert_file = cnf.get('Import-LDAP-Fingerprints::CACertFile')
1222 l = ldap.open(LDAPServer)
1225 # TODO: This should request a new context and use
1226 # connection-specific options (i.e. "l.set_option(...)")
1228 # Request a new TLS context. If there was already one, libldap
1229 # would not change the TLS options (like which CAs to trust).
1230 #l.set_option(ldap.OPT_X_TLS_NEWCTX, True)
1231 ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_HARD)
1232 #ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, None)
1233 ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, ca_cert_file)
1236 l.simple_bind_s("","")
1237 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1238 "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]),
1239 ["uid", "keyfingerprint", "cn", "mn", "sn"])
1241 ldap_fin_uid_id = {}
1248 uid = entry["uid"][0]
1249 name = get_ldap_name(entry)
1250 fingerprints = entry["keyFingerPrint"]
1252 for f in fingerprints:
1253 key = self.fpr_lookup.get(f, None)
1254 if key not in self.keys:
1256 self.keys[key]["uid"] = uid
1260 keyid = get_or_set_uid(uid, session).uid_id
1261 byuid[keyid] = (uid, name)
1262 byname[uid] = (keyid, name)
1264 return (byname, byuid)
1266 def generate_users_from_keyring(self, format, session):
1270 for x in self.keys.keys():
1271 if "email" not in self.keys[x]:
1273 self.keys[x]["uid"] = format % "invalid-uid"
1275 uid = format % self.keys[x]["email"]
1276 keyid = get_or_set_uid(uid, session).uid_id
1277 byuid[keyid] = (uid, self.keys[x]["name"])
1278 byname[uid] = (keyid, self.keys[x]["name"])
1279 self.keys[x]["uid"] = uid
1282 uid = format % "invalid-uid"
1283 keyid = get_or_set_uid(uid, session).uid_id
1284 byuid[keyid] = (uid, "ungeneratable user id")
1285 byname[uid] = (keyid, "ungeneratable user id")
1287 return (byname, byuid)
1289 __all__.append('Keyring')
1292 def get_keyring(keyring, session=None):
1294 If C{keyring} does not have an entry in the C{keyrings} table yet, return None
1295 If C{keyring} already has an entry, simply return the existing Keyring
1297 @type keyring: string
1298 @param keyring: the keyring name
1301 @return: the Keyring object for this keyring
1304 q = session.query(Keyring).filter_by(keyring_name=keyring)
1308 except NoResultFound:
1311 __all__.append('get_keyring')
1314 def get_active_keyring_paths(session=None):
1317 @return: list of active keyring paths
1319 return [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).order_by(desc(Keyring.priority)).all() ]
1321 __all__.append('get_active_keyring_paths')
1324 def get_primary_keyring_path(session=None):
1326 Get the full path to the highest priority active keyring
1329 @return: path to the active keyring with the highest priority or None if no
1330 keyring is configured
1332 keyrings = get_active_keyring_paths()
1334 if len(keyrings) > 0:
1339 __all__.append('get_primary_keyring_path')
1341 ################################################################################
1343 class DBChange(object):
1344 def __init__(self, *args, **kwargs):
1348 return '<DBChange %s>' % self.changesname
1350 __all__.append('DBChange')
1353 def get_dbchange(filename, session=None):
1355 returns DBChange object for given C{filename}.
1357 @type filename: string
1358 @param filename: the name of the file
1360 @type session: Session
1361 @param session: Optional SQLA session object (a temporary one will be
1362 generated if not supplied)
1365 @return: DBChange object for the given filename (C{None} if not present)
1368 q = session.query(DBChange).filter_by(changesname=filename)
1372 except NoResultFound:
1375 __all__.append('get_dbchange')
1377 ################################################################################
1379 class Maintainer(ORMObject):
1380 def __init__(self, name = None):
1383 def properties(self):
1384 return ['name', 'maintainer_id']
1386 def not_null_constraints(self):
1389 def get_split_maintainer(self):
1390 if not hasattr(self, 'name') or self.name is None:
1391 return ('', '', '', '')
1393 return fix_maintainer(self.name.strip())
1395 __all__.append('Maintainer')
1398 def get_or_set_maintainer(name, session=None):
1400 Returns Maintainer object for given maintainer name.
1402 If no matching maintainer name is found, a row is inserted.
1405 @param name: The maintainer name to add
1407 @type session: SQLAlchemy
1408 @param session: Optional SQL session object (a temporary one will be
1409 generated if not supplied). If not passed, a commit will be performed at
1410 the end of the function, otherwise the caller is responsible for commiting.
1411 A flush will be performed either way.
1414 @return: the Maintainer object for the given maintainer
1417 q = session.query(Maintainer).filter_by(name=name)
1420 except NoResultFound:
1421 maintainer = Maintainer()
1422 maintainer.name = name
1423 session.add(maintainer)
1424 session.commit_or_flush()
1429 __all__.append('get_or_set_maintainer')
1432 def get_maintainer(maintainer_id, session=None):
1434 Return the name of the maintainer behind C{maintainer_id} or None if that
1435 maintainer_id is invalid.
1437 @type maintainer_id: int
1438 @param maintainer_id: the id of the maintainer
1441 @return: the Maintainer with this C{maintainer_id}
1444 return session.query(Maintainer).get(maintainer_id)
1446 __all__.append('get_maintainer')
1448 ################################################################################
1450 class NewComment(object):
1451 def __init__(self, *args, **kwargs):
1455 return '''<NewComment for '%s %s' (%s)>''' % (self.package, self.version, self.comment_id)
1457 __all__.append('NewComment')
1460 def has_new_comment(policy_queue, package, version, session=None):
1462 Returns true if the given combination of C{package}, C{version} has a comment.
1464 @type package: string
1465 @param package: name of the package
1467 @type version: string
1468 @param version: package version
1470 @type session: Session
1471 @param session: Optional SQLA session object (a temporary one will be
1472 generated if not supplied)
1478 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1479 q = q.filter_by(package=package)
1480 q = q.filter_by(version=version)
1482 return bool(q.count() > 0)
1484 __all__.append('has_new_comment')
1487 def get_new_comments(policy_queue, package=None, version=None, comment_id=None, session=None):
1489 Returns (possibly empty) list of NewComment objects for the given
1492 @type package: string (optional)
1493 @param package: name of the package
1495 @type version: string (optional)
1496 @param version: package version
1498 @type comment_id: int (optional)
1499 @param comment_id: An id of a comment
1501 @type session: Session
1502 @param session: Optional SQLA session object (a temporary one will be
1503 generated if not supplied)
1506 @return: A (possibly empty) list of NewComment objects will be returned
1509 q = session.query(NewComment).filter_by(policy_queue=policy_queue)
1510 if package is not None: q = q.filter_by(package=package)
1511 if version is not None: q = q.filter_by(version=version)
1512 if comment_id is not None: q = q.filter_by(comment_id=comment_id)
1516 __all__.append('get_new_comments')
1518 ################################################################################
1520 class Override(ORMObject):
1521 def __init__(self, package = None, suite = None, component = None, overridetype = None, \
1522 section = None, priority = None):
1523 self.package = package
1525 self.component = component
1526 self.overridetype = overridetype
1527 self.section = section
1528 self.priority = priority
1530 def properties(self):
1531 return ['package', 'suite', 'component', 'overridetype', 'section', \
1534 def not_null_constraints(self):
1535 return ['package', 'suite', 'component', 'overridetype', 'section']
1537 __all__.append('Override')
1540 def get_override(package, suite=None, component=None, overridetype=None, session=None):
1542 Returns Override object for the given parameters
1544 @type package: string
1545 @param package: The name of the package
1547 @type suite: string, list or None
1548 @param suite: The name of the suite (or suites if a list) to limit to. If
1549 None, don't limit. Defaults to None.
1551 @type component: string, list or None
1552 @param component: The name of the component (or components if a list) to
1553 limit to. If None, don't limit. Defaults to None.
1555 @type overridetype: string, list or None
1556 @param overridetype: The name of the overridetype (or overridetypes if a list) to
1557 limit to. If None, don't limit. Defaults to None.
1559 @type session: Session
1560 @param session: Optional SQLA session object (a temporary one will be
1561 generated if not supplied)
1564 @return: A (possibly empty) list of Override objects will be returned
1567 q = session.query(Override)
1568 q = q.filter_by(package=package)
1570 if suite is not None:
1571 if not isinstance(suite, list): suite = [suite]
1572 q = q.join(Suite).filter(Suite.suite_name.in_(suite))
1574 if component is not None:
1575 if not isinstance(component, list): component = [component]
1576 q = q.join(Component).filter(Component.component_name.in_(component))
1578 if overridetype is not None:
1579 if not isinstance(overridetype, list): overridetype = [overridetype]
1580 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype))
1584 __all__.append('get_override')
1587 ################################################################################
1589 class OverrideType(ORMObject):
1590 def __init__(self, overridetype = None):
1591 self.overridetype = overridetype
1593 def properties(self):
1594 return ['overridetype', 'overridetype_id', 'overrides_count']
1596 def not_null_constraints(self):
1597 return ['overridetype']
1599 __all__.append('OverrideType')
1602 def get_override_type(override_type, session=None):
1604 Returns OverrideType object for given C{override type}.
1606 @type override_type: string
1607 @param override_type: The name of the override type
1609 @type session: Session
1610 @param session: Optional SQLA session object (a temporary one will be
1611 generated if not supplied)
1614 @return: the database id for the given override type
1617 q = session.query(OverrideType).filter_by(overridetype=override_type)
1621 except NoResultFound:
1624 __all__.append('get_override_type')
1626 ################################################################################
1628 class PolicyQueue(object):
1629 def __init__(self, *args, **kwargs):
1633 return '<PolicyQueue %s>' % self.queue_name
1635 __all__.append('PolicyQueue')
1638 def get_policy_queue(queuename, session=None):
1640 Returns PolicyQueue object for given C{queue name}
1642 @type queuename: string
1643 @param queuename: The name of the queue
1645 @type session: Session
1646 @param session: Optional SQLA session object (a temporary one will be
1647 generated if not supplied)
1650 @return: PolicyQueue object for the given queue
1653 q = session.query(PolicyQueue).filter_by(queue_name=queuename)
1657 except NoResultFound:
1660 __all__.append('get_policy_queue')
1662 ################################################################################
1664 class PolicyQueueUpload(object):
1665 def __cmp__(self, other):
1666 ret = cmp(self.changes.source, other.changes.source)
1668 ret = apt_pkg.version_compare(self.changes.version, other.changes.version)
1670 if self.source is not None and other.source is None:
1672 elif self.source is None and other.source is not None:
1675 ret = cmp(self.changes.changesname, other.changes.changesname)
1678 __all__.append('PolicyQueueUpload')
1680 ################################################################################
1682 class PolicyQueueByhandFile(object):
1685 __all__.append('PolicyQueueByhandFile')
1687 ################################################################################
1689 class Priority(ORMObject):
1690 def __init__(self, priority = None, level = None):
1691 self.priority = priority
1694 def properties(self):
1695 return ['priority', 'priority_id', 'level', 'overrides_count']
1697 def not_null_constraints(self):
1698 return ['priority', 'level']
1700 def __eq__(self, val):
1701 if isinstance(val, str):
1702 return (self.priority == val)
1703 # This signals to use the normal comparison operator
1704 return NotImplemented
1706 def __ne__(self, val):
1707 if isinstance(val, str):
1708 return (self.priority != val)
1709 # This signals to use the normal comparison operator
1710 return NotImplemented
1712 __all__.append('Priority')
1715 def get_priority(priority, session=None):
1717 Returns Priority object for given C{priority name}.
1719 @type priority: string
1720 @param priority: The name of the priority
1722 @type session: Session
1723 @param session: Optional SQLA session object (a temporary one will be
1724 generated if not supplied)
1727 @return: Priority object for the given priority
1730 q = session.query(Priority).filter_by(priority=priority)
1734 except NoResultFound:
1737 __all__.append('get_priority')
1740 def get_priorities(session=None):
1742 Returns dictionary of priority names -> id mappings
1744 @type session: Session
1745 @param session: Optional SQL session object (a temporary one will be
1746 generated if not supplied)
1749 @return: dictionary of priority names -> id mappings
1753 q = session.query(Priority)
1755 ret[x.priority] = x.priority_id
1759 __all__.append('get_priorities')
1761 ################################################################################
1763 class Section(ORMObject):
1764 def __init__(self, section = None):
1765 self.section = section
1767 def properties(self):
1768 return ['section', 'section_id', 'overrides_count']
1770 def not_null_constraints(self):
1773 def __eq__(self, val):
1774 if isinstance(val, str):
1775 return (self.section == val)
1776 # This signals to use the normal comparison operator
1777 return NotImplemented
1779 def __ne__(self, val):
1780 if isinstance(val, str):
1781 return (self.section != val)
1782 # This signals to use the normal comparison operator
1783 return NotImplemented
1785 __all__.append('Section')
1788 def get_section(section, session=None):
1790 Returns Section object for given C{section name}.
1792 @type section: string
1793 @param section: The name of the section
1795 @type session: Session
1796 @param session: Optional SQLA session object (a temporary one will be
1797 generated if not supplied)
1800 @return: Section object for the given section name
1803 q = session.query(Section).filter_by(section=section)
1807 except NoResultFound:
1810 __all__.append('get_section')
1813 def get_sections(session=None):
1815 Returns dictionary of section names -> id mappings
1817 @type session: Session
1818 @param session: Optional SQL session object (a temporary one will be
1819 generated if not supplied)
1822 @return: dictionary of section names -> id mappings
1826 q = session.query(Section)
1828 ret[x.section] = x.section_id
1832 __all__.append('get_sections')
1834 ################################################################################
1836 class SignatureHistory(ORMObject):
1838 def from_signed_file(cls, signed_file):
1839 """signature history entry from signed file
1841 @type signed_file: L{daklib.gpg.SignedFile}
1842 @param signed_file: signed file
1844 @rtype: L{SignatureHistory}
1847 self.fingerprint = signed_file.primary_fingerprint
1848 self.signature_timestamp = signed_file.signature_timestamp
1849 self.contents_sha1 = signed_file.contents_sha1()
1852 __all__.append('SignatureHistory')
1854 ################################################################################
1856 class SrcContents(ORMObject):
1857 def __init__(self, file = None, source = None):
1859 self.source = source
1861 def properties(self):
1862 return ['file', 'source']
1864 __all__.append('SrcContents')
1866 ################################################################################
1868 from debian.debfile import Deb822
1870 # Temporary Deb822 subclass to fix bugs with : handling; see #597249
1871 class Dak822(Deb822):
1872 def _internal_parser(self, sequence, fields=None):
1873 # The key is non-whitespace, non-colon characters before any colon.
1874 key_part = r"^(?P<key>[^: \t\n\r\f\v]+)\s*:\s*"
1875 single = re.compile(key_part + r"(?P<data>\S.*?)\s*$")
1876 multi = re.compile(key_part + r"$")
1877 multidata = re.compile(r"^\s(?P<data>.+?)\s*$")
1879 wanted_field = lambda f: fields is None or f in fields
1881 if isinstance(sequence, basestring):
1882 sequence = sequence.splitlines()
1886 for line in self.gpg_stripped_paragraph(sequence):
1887 m = single.match(line)
1890 self[curkey] = content
1892 if not wanted_field(m.group('key')):
1896 curkey = m.group('key')
1897 content = m.group('data')
1900 m = multi.match(line)
1903 self[curkey] = content
1905 if not wanted_field(m.group('key')):
1909 curkey = m.group('key')
1913 m = multidata.match(line)
1915 content += '\n' + line # XXX not m.group('data')?
1919 self[curkey] = content
1922 class DBSource(ORMObject):
1923 def __init__(self, source = None, version = None, maintainer = None, \
1924 changedby = None, poolfile = None, install_date = None, fingerprint = None):
1925 self.source = source
1926 self.version = version
1927 self.maintainer = maintainer
1928 self.changedby = changedby
1929 self.poolfile = poolfile
1930 self.install_date = install_date
1931 self.fingerprint = fingerprint
1935 return self.source_id
1937 def properties(self):
1938 return ['source', 'source_id', 'maintainer', 'changedby', \
1939 'fingerprint', 'poolfile', 'version', 'suites_count', \
1940 'install_date', 'binaries_count', 'uploaders_count']
1942 def not_null_constraints(self):
1943 return ['source', 'version', 'install_date', 'maintainer', \
1944 'changedby', 'poolfile']
1946 def read_control_fields(self):
1948 Reads the control information from a dsc
1951 @return: fields is the dsc information in a dictionary form
1953 fullpath = self.poolfile.fullpath
1954 fields = Dak822(open(self.poolfile.fullpath, 'r'))
1957 metadata = association_proxy('key', 'value')
1959 def scan_contents(self):
1961 Returns a set of names for non directories. The path names are
1962 normalized after converting them from either utf-8 or iso8859-1
1965 fullpath = self.poolfile.fullpath
1966 from daklib.contents import UnpackedSource
1967 unpacked = UnpackedSource(fullpath)
1969 for name in unpacked.get_all_filenames():
1970 # enforce proper utf-8 encoding
1972 name.decode('utf-8')
1973 except UnicodeDecodeError:
1974 name = name.decode('iso8859-1').encode('utf-8')
1978 __all__.append('DBSource')
1981 def source_exists(source, source_version, suites = ["any"], session=None):
1983 Ensure that source exists somewhere in the archive for the binary
1984 upload being processed.
1985 1. exact match => 1.0-3
1986 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
1988 @type source: string
1989 @param source: source name
1991 @type source_version: string
1992 @param source_version: expected source version
1995 @param suites: list of suites to check in, default I{any}
1997 @type session: Session
1998 @param session: Optional SQLA session object (a temporary one will be
1999 generated if not supplied)
2002 @return: returns 1 if a source with expected version is found, otherwise 0
2009 from daklib.regexes import re_bin_only_nmu
2010 orig_source_version = re_bin_only_nmu.sub('', source_version)
2012 for suite in suites:
2013 q = session.query(DBSource).filter_by(source=source). \
2014 filter(DBSource.version.in_([source_version, orig_source_version]))
2016 # source must exist in 'suite' or a suite that is enhanced by 'suite'
2017 s = get_suite(suite, session)
2019 enhances_vcs = session.query(VersionCheck).filter(VersionCheck.suite==s).filter_by(check='Enhances')
2020 considered_suites = [ vc.reference for vc in enhances_vcs ]
2021 considered_suites.append(s)
2023 q = q.filter(DBSource.suites.any(Suite.suite_id.in_([s.suite_id for s in considered_suites])))
2028 # No source found so return not ok
2033 __all__.append('source_exists')
2036 def get_suites_source_in(source, session=None):
2038 Returns list of Suite objects which given C{source} name is in
2041 @param source: DBSource package name to search for
2044 @return: list of Suite objects for the given source
2047 return session.query(Suite).filter(Suite.sources.any(source=source)).all()
2049 __all__.append('get_suites_source_in')
2052 def get_sources_from_name(source, version=None, dm_upload_allowed=None, session=None):
2054 Returns list of DBSource objects for given C{source} name and other parameters
2057 @param source: DBSource package name to search for
2059 @type version: str or None
2060 @param version: DBSource version name to search for or None if not applicable
2062 @type dm_upload_allowed: bool
2063 @param dm_upload_allowed: If None, no effect. If True or False, only
2064 return packages with that dm_upload_allowed setting
2066 @type session: Session
2067 @param session: Optional SQL session object (a temporary one will be
2068 generated if not supplied)
2071 @return: list of DBSource objects for the given name (may be empty)
2074 q = session.query(DBSource).filter_by(source=source)
2076 if version is not None:
2077 q = q.filter_by(version=version)
2079 if dm_upload_allowed is not None:
2080 q = q.filter_by(dm_upload_allowed=dm_upload_allowed)
2084 __all__.append('get_sources_from_name')
2086 # FIXME: This function fails badly if it finds more than 1 source package and
2087 # its implementation is trivial enough to be inlined.
2089 def get_source_in_suite(source, suite_name, session=None):
2091 Returns a DBSource object for a combination of C{source} and C{suite_name}.
2093 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2094 - B{suite_name} - a suite name, eg. I{unstable}
2096 @type source: string
2097 @param source: source package name
2099 @type suite_name: string
2100 @param suite: the suite name
2103 @return: the version for I{source} in I{suite}
2106 suite = get_suite(suite_name, session)
2110 return suite.get_sources(source).one()
2111 except NoResultFound:
2114 __all__.append('get_source_in_suite')
2117 def import_metadata_into_db(obj, session=None):
2119 This routine works on either DBBinary or DBSource objects and imports
2120 their metadata into the database
2122 fields = obj.read_control_fields()
2123 for k in fields.keys():
2126 val = str(fields[k])
2127 except UnicodeEncodeError:
2128 # Fall back to UTF-8
2130 val = fields[k].encode('utf-8')
2131 except UnicodeEncodeError:
2132 # Finally try iso8859-1
2133 val = fields[k].encode('iso8859-1')
2134 # Otherwise we allow the exception to percolate up and we cause
2135 # a reject as someone is playing silly buggers
2137 obj.metadata[get_or_set_metadatakey(k, session)] = val
2139 session.commit_or_flush()
2141 __all__.append('import_metadata_into_db')
2143 ################################################################################
2145 class SrcFormat(object):
2146 def __init__(self, *args, **kwargs):
2150 return '<SrcFormat %s>' % (self.format_name)
2152 __all__.append('SrcFormat')
2154 ################################################################################
2156 SUITE_FIELDS = [ ('SuiteName', 'suite_name'),
2157 ('SuiteID', 'suite_id'),
2158 ('Version', 'version'),
2159 ('Origin', 'origin'),
2161 ('Description', 'description'),
2162 ('Untouchable', 'untouchable'),
2163 ('Announce', 'announce'),
2164 ('Codename', 'codename'),
2165 ('OverrideCodename', 'overridecodename'),
2166 ('ValidTime', 'validtime'),
2167 ('Priority', 'priority'),
2168 ('NotAutomatic', 'notautomatic'),
2169 ('CopyChanges', 'copychanges'),
2170 ('OverrideSuite', 'overridesuite')]
2172 # Why the heck don't we have any UNIQUE constraints in table suite?
2173 # TODO: Add UNIQUE constraints for appropriate columns.
2174 class Suite(ORMObject):
2175 def __init__(self, suite_name = None, version = None):
2176 self.suite_name = suite_name
2177 self.version = version
2179 def properties(self):
2180 return ['suite_name', 'version', 'sources_count', 'binaries_count', \
2183 def not_null_constraints(self):
2184 return ['suite_name']
2186 def __eq__(self, val):
2187 if isinstance(val, str):
2188 return (self.suite_name == val)
2189 # This signals to use the normal comparison operator
2190 return NotImplemented
2192 def __ne__(self, val):
2193 if isinstance(val, str):
2194 return (self.suite_name != val)
2195 # This signals to use the normal comparison operator
2196 return NotImplemented
2200 for disp, field in SUITE_FIELDS:
2201 val = getattr(self, field, None)
2203 ret.append("%s: %s" % (disp, val))
2205 return "\n".join(ret)
2207 def get_architectures(self, skipsrc=False, skipall=False):
2209 Returns list of Architecture objects
2211 @type skipsrc: boolean
2212 @param skipsrc: Whether to skip returning the 'source' architecture entry
2215 @type skipall: boolean
2216 @param skipall: Whether to skip returning the 'all' architecture entry
2220 @return: list of Architecture objects for the given name (may be empty)
2223 q = object_session(self).query(Architecture).with_parent(self)
2225 q = q.filter(Architecture.arch_string != 'source')
2227 q = q.filter(Architecture.arch_string != 'all')
2228 return q.order_by(Architecture.arch_string).all()
2230 def get_sources(self, source):
2232 Returns a query object representing DBSource that is part of C{suite}.
2234 - B{source} - source package name, eg. I{mailfilter}, I{bbdb}, I{glibc}
2236 @type source: string
2237 @param source: source package name
2239 @rtype: sqlalchemy.orm.query.Query
2240 @return: a query of DBSource
2244 session = object_session(self)
2245 return session.query(DBSource).filter_by(source = source). \
2248 def get_overridesuite(self):
2249 if self.overridesuite is None:
2252 return object_session(self).query(Suite).filter_by(suite_name=self.overridesuite).one()
2256 return os.path.join(self.archive.path, 'dists', self.suite_name)
2258 __all__.append('Suite')
2261 def get_suite(suite, session=None):
2263 Returns Suite object for given C{suite name}.
2266 @param suite: The name of the suite
2268 @type session: Session
2269 @param session: Optional SQLA session object (a temporary one will be
2270 generated if not supplied)
2273 @return: Suite object for the requested suite name (None if not present)
2276 q = session.query(Suite).filter_by(suite_name=suite)
2280 except NoResultFound:
2283 __all__.append('get_suite')
2285 ################################################################################
2288 def get_suite_architectures(suite, skipsrc=False, skipall=False, session=None):
2290 Returns list of Architecture objects for given C{suite} name. The list is
2291 empty if suite does not exist.
2294 @param suite: Suite name to search for
2296 @type skipsrc: boolean
2297 @param skipsrc: Whether to skip returning the 'source' architecture entry
2300 @type skipall: boolean
2301 @param skipall: Whether to skip returning the 'all' architecture entry
2304 @type session: Session
2305 @param session: Optional SQL session object (a temporary one will be
2306 generated if not supplied)
2309 @return: list of Architecture objects for the given name (may be empty)
2313 return get_suite(suite, session).get_architectures(skipsrc, skipall)
2314 except AttributeError:
2317 __all__.append('get_suite_architectures')
2319 ################################################################################
2321 class Uid(ORMObject):
2322 def __init__(self, uid = None, name = None):
2326 def __eq__(self, val):
2327 if isinstance(val, str):
2328 return (self.uid == val)
2329 # This signals to use the normal comparison operator
2330 return NotImplemented
2332 def __ne__(self, val):
2333 if isinstance(val, str):
2334 return (self.uid != val)
2335 # This signals to use the normal comparison operator
2336 return NotImplemented
2338 def properties(self):
2339 return ['uid', 'name', 'fingerprint']
2341 def not_null_constraints(self):
2344 __all__.append('Uid')
2347 def get_or_set_uid(uidname, session=None):
2349 Returns uid object for given uidname.
2351 If no matching uidname is found, a row is inserted.
2353 @type uidname: string
2354 @param uidname: The uid to add
2356 @type session: SQLAlchemy
2357 @param session: Optional SQL session object (a temporary one will be
2358 generated if not supplied). If not passed, a commit will be performed at
2359 the end of the function, otherwise the caller is responsible for commiting.
2362 @return: the uid object for the given uidname
2365 q = session.query(Uid).filter_by(uid=uidname)
2369 except NoResultFound:
2373 session.commit_or_flush()
2378 __all__.append('get_or_set_uid')
2381 def get_uid_from_fingerprint(fpr, session=None):
2382 q = session.query(Uid)
2383 q = q.join(Fingerprint).filter_by(fingerprint=fpr)
2387 except NoResultFound:
2390 __all__.append('get_uid_from_fingerprint')
2392 ################################################################################
2394 class MetadataKey(ORMObject):
2395 def __init__(self, key = None):
2398 def properties(self):
2401 def not_null_constraints(self):
2404 __all__.append('MetadataKey')
2407 def get_or_set_metadatakey(keyname, session=None):
2409 Returns MetadataKey object for given uidname.
2411 If no matching keyname is found, a row is inserted.
2413 @type uidname: string
2414 @param uidname: The keyname to add
2416 @type session: SQLAlchemy
2417 @param session: Optional SQL session object (a temporary one will be
2418 generated if not supplied). If not passed, a commit will be performed at
2419 the end of the function, otherwise the caller is responsible for commiting.
2422 @return: the metadatakey object for the given keyname
2425 q = session.query(MetadataKey).filter_by(key=keyname)
2429 except NoResultFound:
2430 ret = MetadataKey(keyname)
2432 session.commit_or_flush()
2436 __all__.append('get_or_set_metadatakey')
2438 ################################################################################
2440 class BinaryMetadata(ORMObject):
2441 def __init__(self, key = None, value = None, binary = None):
2444 self.binary = binary
2446 def properties(self):
2447 return ['binary', 'key', 'value']
2449 def not_null_constraints(self):
2452 __all__.append('BinaryMetadata')
2454 ################################################################################
2456 class SourceMetadata(ORMObject):
2457 def __init__(self, key = None, value = None, source = None):
2460 self.source = source
2462 def properties(self):
2463 return ['source', 'key', 'value']
2465 def not_null_constraints(self):
2468 __all__.append('SourceMetadata')
2470 ################################################################################
2472 class VersionCheck(ORMObject):
2473 def __init__(self, *args, **kwargs):
2476 def properties(self):
2477 #return ['suite_id', 'check', 'reference_id']
2480 def not_null_constraints(self):
2481 return ['suite', 'check', 'reference']
2483 __all__.append('VersionCheck')
2486 def get_version_checks(suite_name, check = None, session = None):
2487 suite = get_suite(suite_name, session)
2489 # Make sure that what we return is iterable so that list comprehensions
2490 # involving this don't cause a traceback
2492 q = session.query(VersionCheck).filter_by(suite=suite)
2494 q = q.filter_by(check=check)
2497 __all__.append('get_version_checks')
2499 ################################################################################
2501 class DBConn(object):
2503 database module init.
2507 def __init__(self, *args, **kwargs):
2508 self.__dict__ = self.__shared_state
2510 if not getattr(self, 'initialised', False):
2511 self.initialised = True
2512 self.debug = kwargs.has_key('debug')
2515 def __setuptables(self):
2518 'acl_architecture_map',
2519 'acl_fingerprint_map',
2526 'binaries_metadata',
2534 'external_overrides',
2535 'extra_src_references',
2537 'files_archive_map',
2543 # TODO: the maintainer column in table override should be removed.
2547 'policy_queue_upload',
2548 'policy_queue_upload_binaries_map',
2549 'policy_queue_byhand_file',
2552 'signature_history',
2561 'suite_architectures',
2562 'suite_build_queue_copy',
2563 'suite_src_formats',
2569 'almost_obsolete_all_associations',
2570 'almost_obsolete_src_associations',
2571 'any_associations_source',
2572 'bin_associations_binaries',
2573 'binaries_suite_arch',
2576 'newest_all_associations',
2577 'newest_any_associations',
2579 'newest_src_association',
2580 'obsolete_all_associations',
2581 'obsolete_any_associations',
2582 'obsolete_any_by_all_associations',
2583 'obsolete_src_associations',
2585 'src_associations_bin',
2586 'src_associations_src',
2587 'suite_arch_by_name',
2590 for table_name in tables:
2591 table = Table(table_name, self.db_meta, \
2592 autoload=True, useexisting=True)
2593 setattr(self, 'tbl_%s' % table_name, table)
2595 for view_name in views:
2596 view = Table(view_name, self.db_meta, autoload=True)
2597 setattr(self, 'view_%s' % view_name, view)
2599 def __setupmappers(self):
2600 mapper(Architecture, self.tbl_architecture,
2601 properties = dict(arch_id = self.tbl_architecture.c.id,
2602 suites = relation(Suite, secondary=self.tbl_suite_architectures,
2603 order_by=self.tbl_suite.c.suite_name,
2604 backref=backref('architectures', order_by=self.tbl_architecture.c.arch_string))),
2605 extension = validator)
2607 mapper(ACL, self.tbl_acl,
2609 architectures = relation(Architecture, secondary=self.tbl_acl_architecture_map, collection_class=set),
2610 fingerprints = relation(Fingerprint, secondary=self.tbl_acl_fingerprint_map, collection_class=set),
2611 match_keyring = relation(Keyring, primaryjoin=(self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id)),
2612 per_source = relation(ACLPerSource, collection_class=set),
2615 mapper(ACLPerSource, self.tbl_acl_per_source,
2617 acl = relation(ACL),
2618 fingerprint = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.fingerprint_id == self.tbl_fingerprint.c.id)),
2619 created_by = relation(Fingerprint, primaryjoin=(self.tbl_acl_per_source.c.created_by_id == self.tbl_fingerprint.c.id)),
2622 mapper(Archive, self.tbl_archive,
2623 properties = dict(archive_id = self.tbl_archive.c.id,
2624 archive_name = self.tbl_archive.c.name))
2626 mapper(ArchiveFile, self.tbl_files_archive_map,
2627 properties = dict(archive = relation(Archive, backref='files'),
2628 component = relation(Component),
2629 file = relation(PoolFile, backref='archives')))
2631 mapper(BuildQueue, self.tbl_build_queue,
2632 properties = dict(queue_id = self.tbl_build_queue.c.id,
2633 suite = relation(Suite, primaryjoin=(self.tbl_build_queue.c.suite_id==self.tbl_suite.c.id))))
2635 mapper(DBBinary, self.tbl_binaries,
2636 properties = dict(binary_id = self.tbl_binaries.c.id,
2637 package = self.tbl_binaries.c.package,
2638 version = self.tbl_binaries.c.version,
2639 maintainer_id = self.tbl_binaries.c.maintainer,
2640 maintainer = relation(Maintainer),
2641 source_id = self.tbl_binaries.c.source,
2642 source = relation(DBSource, backref='binaries'),
2643 arch_id = self.tbl_binaries.c.architecture,
2644 architecture = relation(Architecture),
2645 poolfile_id = self.tbl_binaries.c.file,
2646 poolfile = relation(PoolFile),
2647 binarytype = self.tbl_binaries.c.type,
2648 fingerprint_id = self.tbl_binaries.c.sig_fpr,
2649 fingerprint = relation(Fingerprint),
2650 install_date = self.tbl_binaries.c.install_date,
2651 suites = relation(Suite, secondary=self.tbl_bin_associations,
2652 backref=backref('binaries', lazy='dynamic')),
2653 extra_sources = relation(DBSource, secondary=self.tbl_extra_src_references,
2654 backref=backref('extra_binary_references', lazy='dynamic')),
2655 key = relation(BinaryMetadata, cascade='all',
2656 collection_class=attribute_mapped_collection('key'))),
2657 extension = validator)
2659 mapper(Component, self.tbl_component,
2660 properties = dict(component_id = self.tbl_component.c.id,
2661 component_name = self.tbl_component.c.name),
2662 extension = validator)
2664 mapper(DBConfig, self.tbl_config,
2665 properties = dict(config_id = self.tbl_config.c.id))
2667 mapper(DSCFile, self.tbl_dsc_files,
2668 properties = dict(dscfile_id = self.tbl_dsc_files.c.id,
2669 source_id = self.tbl_dsc_files.c.source,
2670 source = relation(DBSource),
2671 poolfile_id = self.tbl_dsc_files.c.file,
2672 poolfile = relation(PoolFile)))
2674 mapper(ExternalOverride, self.tbl_external_overrides,
2676 suite_id = self.tbl_external_overrides.c.suite,
2677 suite = relation(Suite),
2678 component_id = self.tbl_external_overrides.c.component,
2679 component = relation(Component)))
2681 mapper(PoolFile, self.tbl_files,
2682 properties = dict(file_id = self.tbl_files.c.id,
2683 filesize = self.tbl_files.c.size),
2684 extension = validator)
2686 mapper(Fingerprint, self.tbl_fingerprint,
2687 properties = dict(fingerprint_id = self.tbl_fingerprint.c.id,
2688 uid_id = self.tbl_fingerprint.c.uid,
2689 uid = relation(Uid),
2690 keyring_id = self.tbl_fingerprint.c.keyring,
2691 keyring = relation(Keyring),
2692 acl = relation(ACL)),
2693 extension = validator)
2695 mapper(Keyring, self.tbl_keyrings,
2696 properties = dict(keyring_name = self.tbl_keyrings.c.name,
2697 keyring_id = self.tbl_keyrings.c.id,
2698 acl = relation(ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id)))),
2700 mapper(DBChange, self.tbl_changes,
2701 properties = dict(change_id = self.tbl_changes.c.id,
2702 seen = self.tbl_changes.c.seen,
2703 source = self.tbl_changes.c.source,
2704 binaries = self.tbl_changes.c.binaries,
2705 architecture = self.tbl_changes.c.architecture,
2706 distribution = self.tbl_changes.c.distribution,
2707 urgency = self.tbl_changes.c.urgency,
2708 maintainer = self.tbl_changes.c.maintainer,
2709 changedby = self.tbl_changes.c.changedby,
2710 date = self.tbl_changes.c.date,
2711 version = self.tbl_changes.c.version))
2713 mapper(Maintainer, self.tbl_maintainer,
2714 properties = dict(maintainer_id = self.tbl_maintainer.c.id,
2715 maintains_sources = relation(DBSource, backref='maintainer',
2716 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.maintainer)),
2717 changed_sources = relation(DBSource, backref='changedby',
2718 primaryjoin=(self.tbl_maintainer.c.id==self.tbl_source.c.changedby))),
2719 extension = validator)
2721 mapper(NewComment, self.tbl_new_comments,
2722 properties = dict(comment_id = self.tbl_new_comments.c.id,
2723 policy_queue = relation(PolicyQueue)))
2725 mapper(Override, self.tbl_override,
2726 properties = dict(suite_id = self.tbl_override.c.suite,
2727 suite = relation(Suite, \
2728 backref=backref('overrides', lazy='dynamic')),
2729 package = self.tbl_override.c.package,
2730 component_id = self.tbl_override.c.component,
2731 component = relation(Component, \
2732 backref=backref('overrides', lazy='dynamic')),
2733 priority_id = self.tbl_override.c.priority,
2734 priority = relation(Priority, \
2735 backref=backref('overrides', lazy='dynamic')),
2736 section_id = self.tbl_override.c.section,
2737 section = relation(Section, \
2738 backref=backref('overrides', lazy='dynamic')),
2739 overridetype_id = self.tbl_override.c.type,
2740 overridetype = relation(OverrideType, \
2741 backref=backref('overrides', lazy='dynamic'))))
2743 mapper(OverrideType, self.tbl_override_type,
2744 properties = dict(overridetype = self.tbl_override_type.c.type,
2745 overridetype_id = self.tbl_override_type.c.id))
2747 mapper(PolicyQueue, self.tbl_policy_queue,
2748 properties = dict(policy_queue_id = self.tbl_policy_queue.c.id,
2749 suite = relation(Suite, primaryjoin=(self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id))))
2751 mapper(PolicyQueueUpload, self.tbl_policy_queue_upload,
2753 changes = relation(DBChange),
2754 policy_queue = relation(PolicyQueue, backref='uploads'),
2755 target_suite = relation(Suite),
2756 source = relation(DBSource),
2757 binaries = relation(DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map),
2760 mapper(PolicyQueueByhandFile, self.tbl_policy_queue_byhand_file,
2762 upload = relation(PolicyQueueUpload, backref='byhand'),
2766 mapper(Priority, self.tbl_priority,
2767 properties = dict(priority_id = self.tbl_priority.c.id))
2769 mapper(Section, self.tbl_section,
2770 properties = dict(section_id = self.tbl_section.c.id,
2771 section=self.tbl_section.c.section))
2773 mapper(SignatureHistory, self.tbl_signature_history)
2775 mapper(DBSource, self.tbl_source,
2776 properties = dict(source_id = self.tbl_source.c.id,
2777 version = self.tbl_source.c.version,
2778 maintainer_id = self.tbl_source.c.maintainer,
2779 poolfile_id = self.tbl_source.c.file,
2780 poolfile = relation(PoolFile),
2781 fingerprint_id = self.tbl_source.c.sig_fpr,
2782 fingerprint = relation(Fingerprint),
2783 changedby_id = self.tbl_source.c.changedby,
2784 srcfiles = relation(DSCFile,
2785 primaryjoin=(self.tbl_source.c.id==self.tbl_dsc_files.c.source)),
2786 suites = relation(Suite, secondary=self.tbl_src_associations,
2787 backref=backref('sources', lazy='dynamic')),
2788 uploaders = relation(Maintainer,
2789 secondary=self.tbl_src_uploaders),
2790 key = relation(SourceMetadata, cascade='all',
2791 collection_class=attribute_mapped_collection('key'))),
2792 extension = validator)
2794 mapper(SrcFormat, self.tbl_src_format,
2795 properties = dict(src_format_id = self.tbl_src_format.c.id,
2796 format_name = self.tbl_src_format.c.format_name))
2798 mapper(Suite, self.tbl_suite,
2799 properties = dict(suite_id = self.tbl_suite.c.id,
2800 policy_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id)),
2801 new_queue = relation(PolicyQueue, primaryjoin=(self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id)),
2802 copy_queues = relation(BuildQueue,
2803 secondary=self.tbl_suite_build_queue_copy),
2804 srcformats = relation(SrcFormat, secondary=self.tbl_suite_src_formats,
2805 backref=backref('suites', lazy='dynamic')),
2806 archive = relation(Archive, backref='suites'),
2807 acls = relation(ACL, secondary=self.tbl_suite_acl_map, collection_class=set),
2808 components = relation(Component, secondary=self.tbl_component_suite,
2809 order_by=self.tbl_component.c.ordering,
2810 backref=backref('suites'))),
2811 extension = validator)
2813 mapper(Uid, self.tbl_uid,
2814 properties = dict(uid_id = self.tbl_uid.c.id,
2815 fingerprint = relation(Fingerprint)),
2816 extension = validator)
2818 mapper(BinContents, self.tbl_bin_contents,
2820 binary = relation(DBBinary,
2821 backref=backref('contents', lazy='dynamic', cascade='all')),
2822 file = self.tbl_bin_contents.c.file))
2824 mapper(SrcContents, self.tbl_src_contents,
2826 source = relation(DBSource,
2827 backref=backref('contents', lazy='dynamic', cascade='all')),
2828 file = self.tbl_src_contents.c.file))
2830 mapper(MetadataKey, self.tbl_metadata_keys,
2832 key_id = self.tbl_metadata_keys.c.key_id,
2833 key = self.tbl_metadata_keys.c.key))
2835 mapper(BinaryMetadata, self.tbl_binaries_metadata,
2837 binary_id = self.tbl_binaries_metadata.c.bin_id,
2838 binary = relation(DBBinary),
2839 key_id = self.tbl_binaries_metadata.c.key_id,
2840 key = relation(MetadataKey),
2841 value = self.tbl_binaries_metadata.c.value))
2843 mapper(SourceMetadata, self.tbl_source_metadata,
2845 source_id = self.tbl_source_metadata.c.src_id,
2846 source = relation(DBSource),
2847 key_id = self.tbl_source_metadata.c.key_id,
2848 key = relation(MetadataKey),
2849 value = self.tbl_source_metadata.c.value))
2851 mapper(VersionCheck, self.tbl_version_check,
2853 suite_id = self.tbl_version_check.c.suite,
2854 suite = relation(Suite, primaryjoin=self.tbl_version_check.c.suite==self.tbl_suite.c.id),
2855 reference_id = self.tbl_version_check.c.reference,
2856 reference = relation(Suite, primaryjoin=self.tbl_version_check.c.reference==self.tbl_suite.c.id, lazy='joined')))
2858 ## Connection functions
2859 def __createconn(self):
2860 from config import Config
2862 if cnf.has_key("DB::Service"):
2863 connstr = "postgresql://service=%s" % cnf["DB::Service"]
2864 elif cnf.has_key("DB::Host"):
2866 connstr = "postgresql://%s" % cnf["DB::Host"]
2867 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2868 connstr += ":%s" % cnf["DB::Port"]
2869 connstr += "/%s" % cnf["DB::Name"]
2872 connstr = "postgresql:///%s" % cnf["DB::Name"]
2873 if cnf.has_key("DB::Port") and cnf["DB::Port"] != "-1":
2874 connstr += "?port=%s" % cnf["DB::Port"]
2876 engine_args = { 'echo': self.debug }
2877 if cnf.has_key('DB::PoolSize'):
2878 engine_args['pool_size'] = int(cnf['DB::PoolSize'])
2879 if cnf.has_key('DB::MaxOverflow'):
2880 engine_args['max_overflow'] = int(cnf['DB::MaxOverflow'])
2881 if sa_major_version != '0.5' and cnf.has_key('DB::Unicode') and \
2882 cnf['DB::Unicode'] == 'false':
2883 engine_args['use_native_unicode'] = False
2885 # Monkey patch a new dialect in in order to support service= syntax
2886 import sqlalchemy.dialects.postgresql
2887 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2
2888 class PGDialect_psycopg2_dak(PGDialect_psycopg2):
2889 def create_connect_args(self, url):
2890 if str(url).startswith('postgresql://service='):
2892 servicename = str(url)[21:]
2893 return (['service=%s' % servicename], {})
2895 return PGDialect_psycopg2.create_connect_args(self, url)
2897 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak
2900 self.db_pg = create_engine(connstr, **engine_args)
2901 self.db_meta = MetaData()
2902 self.db_meta.bind = self.db_pg
2903 self.db_smaker = sessionmaker(bind=self.db_pg,
2907 self.__setuptables()
2908 self.__setupmappers()
2910 except OperationalError as e:
2912 utils.fubar("Cannot connect to database (%s)" % str(e))
2914 self.pid = os.getpid()
2916 def session(self, work_mem = 0):
2918 Returns a new session object. If a work_mem parameter is provided a new
2919 transaction is started and the work_mem parameter is set for this
2920 transaction. The work_mem parameter is measured in MB. A default value
2921 will be used if the parameter is not set.
2923 # reinitialize DBConn in new processes
2924 if self.pid != os.getpid():
2927 session = self.db_smaker()
2929 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem)
2932 __all__.append('DBConn')