diff options
Diffstat (limited to 'mediagoblin/db')
-rw-r--r-- | mediagoblin/db/mixin.py | 11 | ||||
-rw-r--r-- | mediagoblin/db/mongo/migrations.py | 30 | ||||
-rw-r--r-- | mediagoblin/db/mongo/open.py | 4 | ||||
-rw-r--r-- | mediagoblin/db/open.py | 6 | ||||
-rw-r--r-- | mediagoblin/db/sql/convert.py | 131 | ||||
-rw-r--r-- | mediagoblin/db/sql/extratypes.py | 4 | ||||
-rw-r--r-- | mediagoblin/db/sql/migrations.py | 20 | ||||
-rw-r--r-- | mediagoblin/db/sql/models.py | 16 | ||||
-rw-r--r-- | mediagoblin/db/sql/models_v0.py | 320 | ||||
-rw-r--r-- | mediagoblin/db/sql/open.py | 22 | ||||
-rw-r--r-- | mediagoblin/db/sql_switch.py | 1 |
11 files changed, 510 insertions, 55 deletions
diff --git a/mediagoblin/db/mixin.py b/mediagoblin/db/mixin.py index 4f9e1b11..a5aded02 100644 --- a/mediagoblin/db/mixin.py +++ b/mediagoblin/db/mixin.py @@ -123,6 +123,17 @@ class MediaEntryMixin(object): """Return license dict for requested license""" return licenses.SUPPORTED_LICENSES[self.license or ""] + def exif_display_iter(self): + from mediagoblin.tools.exif import USEFUL_TAGS + + if not self.media_data: + return + exif_all = self.media_data.get("exif_all") + + for key in USEFUL_TAGS: + if key in exif_all: + yield key, exif_all[key] + class MediaCommentMixin(object): @property diff --git a/mediagoblin/db/mongo/migrations.py b/mediagoblin/db/mongo/migrations.py index 1984ecc4..569dec88 100644 --- a/mediagoblin/db/mongo/migrations.py +++ b/mediagoblin/db/mongo/migrations.py @@ -119,6 +119,7 @@ def media_type_image_to_multimedia_type_image(database): {'$set': {'media_type': 'mediagoblin.media_types.image'}}, multi=True) + @RegisterMigration(8) def mediaentry_add_license(database): """ @@ -140,6 +141,7 @@ def remove_calculated_html(database): drop_table_field(database, 'media_entries', 'description_html') drop_table_field(database, 'media_comments', 'content_html') + @RegisterMigration(10) def convert_video_media_data(database): """ @@ -154,6 +156,7 @@ def convert_video_media_data(database): document['media_data'] = document['media_data']['video'] collection.save(document) + @RegisterMigration(11) def convert_gps_media_data(database): """ @@ -170,7 +173,34 @@ def convert_gps_media_data(database): del document['media_data']['gps'] collection.save(document) + @RegisterMigration(12) +def convert_exif_media_data(database): + """ + Move media_data["exif"]["clean"] to media_data["exif_all"]. + Drop media_data["exif"]["useful"] + In preparation for media_data.exif_all + """ + collection = database['media_entries'] + target = collection.find( + {'media_data.exif.clean': {'$exists': True}}) + + for document in target: + media_data = document['media_data'] + + exif_all = media_data['exif'].pop('clean') + if len(exif_all): + media_data['exif_all'] = exif_all + + del media_data['exif']['useful'] + + assert len(media_data['exif']) == 0 + del media_data['exif'] + + collection.save(document) + + +@RegisterMigration(13) def user_add_wants_comment_notification(database): """ Add wants_comment_notification to user model diff --git a/mediagoblin/db/mongo/open.py b/mediagoblin/db/mongo/open.py index bedc497b..c4f37b42 100644 --- a/mediagoblin/db/mongo/open.py +++ b/mediagoblin/db/mongo/open.py @@ -21,6 +21,10 @@ from mediagoblin.db.mongo import models from mediagoblin.db.mongo.util import MigrationManager +def load_models(app_config): + pass + + def connect_database_from_config(app_config, use_pymongo=False): """ Connect to the main database, take config from app_config diff --git a/mediagoblin/db/open.py b/mediagoblin/db/open.py index 0163469f..f4c38511 100644 --- a/mediagoblin/db/open.py +++ b/mediagoblin/db/open.py @@ -21,7 +21,9 @@ except ImportError: if use_sql: from mediagoblin.db.sql.open import \ - setup_connection_and_db_from_config, check_db_migrations_current + setup_connection_and_db_from_config, check_db_migrations_current, \ + load_models else: from mediagoblin.db.mongo.open import \ - setup_connection_and_db_from_config, check_db_migrations_current + setup_connection_and_db_from_config, check_db_migrations_current, \ + load_models diff --git a/mediagoblin/db/sql/convert.py b/mediagoblin/db/sql/convert.py index d1492977..ac64cf8d 100644 --- a/mediagoblin/db/sql/convert.py +++ b/mediagoblin/db/sql/convert.py @@ -15,26 +15,27 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. from copy import copy +from itertools import chain, imap -from mediagoblin.init import setup_global_and_app_config, setup_database -from mediagoblin.db.mongo.util import ObjectId +from mediagoblin.init import setup_global_and_app_config -from mediagoblin.db.sql.models import (Base, User, MediaEntry, MediaComment, - Tag, MediaTag, MediaFile, MediaAttachmentFile, MigrationData) -from mediagoblin.media_types.image.models import ImageData -from mediagoblin.media_types.video.models import VideoData +from mediagoblin.db.sql.base import Session +from mediagoblin.db.sql.models_v0 import Base_v0 +from mediagoblin.db.sql.models_v0 import (User, MediaEntry, MediaComment, + Tag, MediaTag, MediaFile, MediaAttachmentFile, MigrationData, + ImageData, VideoData, AsciiData, AudioData) from mediagoblin.db.sql.open import setup_connection_and_db_from_config as \ sql_connect from mediagoblin.db.mongo.open import setup_connection_and_db_from_config as \ mongo_connect -from mediagoblin.db.sql.base import Session obj_id_table = dict() + def add_obj_ids(entry, new_entry): global obj_id_table - print "%r -> %r" % (entry._id, new_entry.id) + print "\t%r -> SQL id %r" % (entry._id, new_entry.id) obj_id_table[entry._id] = new_entry.id @@ -43,6 +44,7 @@ def copy_attrs(entry, new_entry, attr_list): val = entry[a] setattr(new_entry, a, val) + def copy_reference_attr(entry, new_entry, ref_attr): val = entry[ref_attr] val = obj_id_table[val] @@ -115,12 +117,9 @@ def convert_image(mk_db): {'media_type': 'mediagoblin.media_types.image'}).sort('created'): media_data = copy(media.media_data) - # TODO: Fix after exif is migrated - media_data.pop('exif', None) - if len(media_data): media_data_row = ImageData(**media_data) - media_data_row.media_entry = obj_id_table[media._id] + media_data_row.media_entry = obj_id_table[media['_id']] session.add(media_data_row) session.commit() @@ -133,7 +132,7 @@ def convert_video(mk_db): for media in mk_db.MediaEntry.find( {'media_type': 'mediagoblin.media_types.video'}).sort('created'): media_data_row = VideoData(**media.media_data) - media_data_row.media_entry = obj_id_table[media._id] + media_data_row.media_entry = obj_id_table[media['_id']] session.add(media_data_row) session.commit() @@ -178,53 +177,105 @@ def convert_media_comments(mk_db): copy_attrs(entry, new_entry, ('created', 'content',)) - copy_reference_attr(entry, new_entry, "media_entry") - copy_reference_attr(entry, new_entry, "author") - session.add(new_entry) - session.flush() - add_obj_ids(entry, new_entry) + try: + copy_reference_attr(entry, new_entry, "media_entry") + copy_reference_attr(entry, new_entry, "author") + except KeyError as e: + print('KeyError in convert_media_comments(): {0}'.format(e)) + else: + session.add(new_entry) + session.flush() + add_obj_ids(entry, new_entry) session.commit() session.close() -def convert_add_migration_versions(): +media_types_tables = ( + ("mediagoblin.media_types.image", (ImageData,)), + ("mediagoblin.media_types.video", (VideoData,)), + ("mediagoblin.media_types.ascii", (AsciiData,)), + ("mediagoblin.media_types.audio", (AudioData,)), + ) + + +def convert_add_migration_versions(dummy_sql_db): session = Session() - for name in ("__main__", - "mediagoblin.media_types.image", - "mediagoblin.media_types.video", - ): - m = MigrationData(name=name, version=0) + for name in chain(("__main__",), + imap(lambda e: e[0], media_types_tables)): + print "\tAdding %s" % (name,) + m = MigrationData(name=unicode(name), version=0) session.add(m) session.commit() session.close() +def cleanup_sql_tables(sql_db): + for mt, table_list in media_types_tables: + session = Session() + + count = session.query(MediaEntry.media_type). \ + filter_by(media_type=unicode(mt)).count() + print " %s: %d entries" % (mt, count) + + if count == 0: + print "\tAnalyzing tables" + for tab in table_list: + cnt2 = session.query(tab).count() + print "\t %s: %d entries" % (tab.__tablename__, cnt2) + assert cnt2 == 0 + + print "\tRemoving migration info" + mi = session.query(MigrationData).filter_by(name=unicode(mt)).one() + session.delete(mi) + session.commit() + session.close() + + print "\tDropping tables" + tables = [model.__table__ for model in table_list] + Base_v0.metadata.drop_all(sql_db.engine, tables=tables) + + session.close() + + +def print_header(title): + print "\n=== %s ===" % (title,) + + +convert_call_list = ( + ("Converting Users", convert_users), + ("Converting Media Entries", convert_media_entries), + ("Converting Media Data for Images", convert_image), + ("Cnnverting Media Data for Videos", convert_video), + ("Converting Tags for Media", convert_media_tags), + ("Converting Media Comments", convert_media_comments), + ) + +sql_call_list = ( + ("Filling Migration Tables", convert_add_migration_versions), + ("Analyzing/Cleaning SQL Data", cleanup_sql_tables), + ) + def run_conversion(config_name): global_config, app_config = setup_global_and_app_config(config_name) sql_conn, sql_db = sql_connect(app_config) mk_conn, mk_db = mongo_connect(app_config) - Base.metadata.create_all(sql_db.engine) - - convert_users(mk_db) - Session.remove() - convert_media_entries(mk_db) - Session.remove() - convert_image(mk_db) - Session.remove() - convert_video(mk_db) - Session.remove() - convert_media_tags(mk_db) - Session.remove() - convert_media_comments(mk_db) - Session.remove() - convert_add_migration_versions() - Session.remove() + Base_v0.metadata.create_all(sql_db.engine) + + for title, func in convert_call_list: + print_header(title) + func(mk_db) + Session.remove() + + for title, func in sql_call_list: + print_header(title) + func(sql_db) + Session.remove() if __name__ == '__main__': diff --git a/mediagoblin/db/sql/extratypes.py b/mediagoblin/db/sql/extratypes.py index 8e078f14..f2304af0 100644 --- a/mediagoblin/db/sql/extratypes.py +++ b/mediagoblin/db/sql/extratypes.py @@ -15,7 +15,7 @@ # along with this program. If not, see <http://www.gnu.org/licenses/>. -from sqlalchemy.types import TypeDecorator, Unicode, VARCHAR +from sqlalchemy.types import TypeDecorator, Unicode, TEXT import json @@ -50,7 +50,7 @@ class PathTupleWithSlashes(TypeDecorator): class JSONEncoded(TypeDecorator): "Represents an immutable structure as a json-encoded string." - impl = VARCHAR + impl = TEXT def process_bind_param(self, value, dialect): if value is not None: diff --git a/mediagoblin/db/sql/migrations.py b/mediagoblin/db/sql/migrations.py index 98d0d0aa..3b7ee8b4 100644 --- a/mediagoblin/db/sql/migrations.py +++ b/mediagoblin/db/sql/migrations.py @@ -1,5 +1,5 @@ # GNU MediaGoblin -- federated, autonomous media hosting -# Copyright (C) 2011 MediaGoblin contributors. See AUTHORS. +# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by @@ -14,4 +14,22 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. +from sqlalchemy import MetaData, Table + +from mediagoblin.db.sql.util import RegisterMigration + + MIGRATIONS = {} + + +@RegisterMigration(1, MIGRATIONS) +def ogg_to_webm_audio(db_conn): + metadata = MetaData(bind=db_conn.bind) + + file_keynames = Table('core__file_keynames', metadata, autoload=True, + autoload_with=db_conn.bind) + + db_conn.execute( + file_keynames.update().where(file_keynames.c.name=='ogg'). + values(name='webm_audio') + ) diff --git a/mediagoblin/db/sql/models.py b/mediagoblin/db/sql/models.py index edf3eb0d..ba28ab7b 100644 --- a/mediagoblin/db/sql/models.py +++ b/mediagoblin/db/sql/models.py @@ -91,10 +91,11 @@ class MediaEntry(Base, MediaEntryMixin): __tablename__ = "core__media_entries" id = Column(Integer, primary_key=True) - uploader = Column(Integer, ForeignKey('core__users.id'), nullable=False) + uploader = Column(Integer, ForeignKey(User.id), nullable=False, index=True) title = Column(Unicode, nullable=False) slug = Column(Unicode) - created = Column(DateTime, nullable=False, default=datetime.datetime.now) + created = Column(DateTime, nullable=False, default=datetime.datetime.now, + index=True) description = Column(UnicodeText) # ?? media_type = Column(Unicode, nullable=False) state = Column(Unicode, default=u'unprocessed', nullable=False) @@ -189,8 +190,9 @@ class MediaEntry(Base, MediaEntryMixin): media_entry=self.id).first() # No media data, so actually add a new one - if not media_data: + if media_data is None: media_data = self.media_data_table( + media_entry=self.id, **kwargs) session.add(media_data) # Update old media data @@ -292,8 +294,8 @@ class MediaTag(Base): id = Column(Integer, primary_key=True) media_entry = Column( Integer, ForeignKey(MediaEntry.id), - nullable=False) - tag = Column(Integer, ForeignKey('core__tags.id'), nullable=False) + nullable=False, index=True) + tag = Column(Integer, ForeignKey(Tag.id), nullable=False, index=True) name = Column(Unicode) # created = Column(DateTime, nullable=False, default=datetime.datetime.now) @@ -324,8 +326,8 @@ class MediaComment(Base, MediaCommentMixin): id = Column(Integer, primary_key=True) media_entry = Column( - Integer, ForeignKey('core__media_entries.id'), nullable=False) - author = Column(Integer, ForeignKey('core__users.id'), nullable=False) + Integer, ForeignKey(MediaEntry.id), nullable=False, index=True) + author = Column(Integer, ForeignKey(User.id), nullable=False) created = Column(DateTime, nullable=False, default=datetime.datetime.now) content = Column(UnicodeText, nullable=False) diff --git a/mediagoblin/db/sql/models_v0.py b/mediagoblin/db/sql/models_v0.py new file mode 100644 index 00000000..06f87d28 --- /dev/null +++ b/mediagoblin/db/sql/models_v0.py @@ -0,0 +1,320 @@ +# GNU MediaGoblin -- federated, autonomous media hosting +# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see <http://www.gnu.org/licenses/>. + +""" +TODO: indexes on foreignkeys, where useful. +""" + + +import datetime +import sys + +from sqlalchemy import ( + Column, Integer, Unicode, UnicodeText, DateTime, Boolean, ForeignKey, + UniqueConstraint, PrimaryKeyConstraint, SmallInteger, Float) +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import relationship, backref +from sqlalchemy.orm.collections import attribute_mapped_collection +from sqlalchemy.ext.associationproxy import association_proxy +from sqlalchemy.util import memoized_property + +from mediagoblin.db.sql.extratypes import PathTupleWithSlashes, JSONEncoded +from mediagoblin.db.sql.base import GMGTableBase +from mediagoblin.db.sql.base import Session + + +Base_v0 = declarative_base(cls=GMGTableBase) + + +class User(Base_v0): + """ + TODO: We should consider moving some rarely used fields + into some sort of "shadow" table. + """ + __tablename__ = "core__users" + + id = Column(Integer, primary_key=True) + username = Column(Unicode, nullable=False, unique=True) + email = Column(Unicode, nullable=False) + created = Column(DateTime, nullable=False, default=datetime.datetime.now) + pw_hash = Column(Unicode, nullable=False) + email_verified = Column(Boolean, default=False) + status = Column(Unicode, default=u"needs_email_verification", nullable=False) + verification_key = Column(Unicode) + is_admin = Column(Boolean, default=False, nullable=False) + url = Column(Unicode) + bio = Column(UnicodeText) # ?? + fp_verification_key = Column(Unicode) + fp_token_expire = Column(DateTime) + + ## TODO + # plugin data would be in a separate model + + +class MediaEntry(Base_v0): + """ + TODO: Consider fetching the media_files using join + """ + __tablename__ = "core__media_entries" + + id = Column(Integer, primary_key=True) + uploader = Column(Integer, ForeignKey(User.id), nullable=False, index=True) + title = Column(Unicode, nullable=False) + slug = Column(Unicode) + created = Column(DateTime, nullable=False, default=datetime.datetime.now, + index=True) + description = Column(UnicodeText) # ?? + media_type = Column(Unicode, nullable=False) + state = Column(Unicode, default=u'unprocessed', nullable=False) + # or use sqlalchemy.types.Enum? + license = Column(Unicode) + + fail_error = Column(Unicode) + fail_metadata = Column(JSONEncoded) + + queued_media_file = Column(PathTupleWithSlashes) + + queued_task_id = Column(Unicode) + + __table_args__ = ( + UniqueConstraint('uploader', 'slug'), + {}) + + get_uploader = relationship(User) + + media_files_helper = relationship("MediaFile", + collection_class=attribute_mapped_collection("name"), + cascade="all, delete-orphan" + ) + + attachment_files_helper = relationship("MediaAttachmentFile", + cascade="all, delete-orphan", + order_by="MediaAttachmentFile.created" + ) + + tags_helper = relationship("MediaTag", + cascade="all, delete-orphan" + ) + + def media_data_init(self, **kwargs): + """ + Initialize or update the contents of a media entry's media_data row + """ + session = Session() + + media_data = session.query(self.media_data_table).filter_by( + media_entry=self.id).first() + + # No media data, so actually add a new one + if media_data is None: + media_data = self.media_data_table( + media_entry=self.id, + **kwargs) + session.add(media_data) + # Update old media data + else: + for field, value in kwargs.iteritems(): + setattr(media_data, field, value) + + @memoized_property + def media_data_table(self): + # TODO: memoize this + models_module = self.media_type + '.models' + __import__(models_module) + return sys.modules[models_module].DATA_MODEL + + +class FileKeynames(Base_v0): + """ + keywords for various places. + currently the MediaFile keys + """ + __tablename__ = "core__file_keynames" + id = Column(Integer, primary_key=True) + name = Column(Unicode, unique=True) + + def __repr__(self): + return "<FileKeyname %r: %r>" % (self.id, self.name) + + @classmethod + def find_or_new(cls, name): + t = cls.query.filter_by(name=name).first() + if t is not None: + return t + return cls(name=name) + + +class MediaFile(Base_v0): + """ + TODO: Highly consider moving "name" into a new table. + TODO: Consider preloading said table in software + """ + __tablename__ = "core__mediafiles" + + media_entry = Column( + Integer, ForeignKey(MediaEntry.id), + nullable=False) + name_id = Column(SmallInteger, ForeignKey(FileKeynames.id), nullable=False) + file_path = Column(PathTupleWithSlashes) + + __table_args__ = ( + PrimaryKeyConstraint('media_entry', 'name_id'), + {}) + + def __repr__(self): + return "<MediaFile %s: %r>" % (self.name, self.file_path) + + name_helper = relationship(FileKeynames, lazy="joined", innerjoin=True) + name = association_proxy('name_helper', 'name', + creator=FileKeynames.find_or_new + ) + + +class MediaAttachmentFile(Base_v0): + __tablename__ = "core__attachment_files" + + id = Column(Integer, primary_key=True) + media_entry = Column( + Integer, ForeignKey(MediaEntry.id), + nullable=False) + name = Column(Unicode, nullable=False) + filepath = Column(PathTupleWithSlashes) + created = Column(DateTime, nullable=False, default=datetime.datetime.now) + + +class Tag(Base_v0): + __tablename__ = "core__tags" + + id = Column(Integer, primary_key=True) + slug = Column(Unicode, nullable=False, unique=True) + + def __repr__(self): + return "<Tag %r: %r>" % (self.id, self.slug) + + @classmethod + def find_or_new(cls, slug): + t = cls.query.filter_by(slug=slug).first() + if t is not None: + return t + return cls(slug=slug) + + +class MediaTag(Base_v0): + __tablename__ = "core__media_tags" + + id = Column(Integer, primary_key=True) + media_entry = Column( + Integer, ForeignKey(MediaEntry.id), + nullable=False, index=True) + tag = Column(Integer, ForeignKey(Tag.id), nullable=False, index=True) + name = Column(Unicode) + # created = Column(DateTime, nullable=False, default=datetime.datetime.now) + + __table_args__ = ( + UniqueConstraint('tag', 'media_entry'), + {}) + + tag_helper = relationship(Tag) + slug = association_proxy('tag_helper', 'slug', + creator=Tag.find_or_new + ) + + def __init__(self, name=None, slug=None): + Base_v0.__init__(self) + if name is not None: + self.name = name + if slug is not None: + self.tag_helper = Tag.find_or_new(slug) + + +class MediaComment(Base_v0): + __tablename__ = "core__media_comments" + + id = Column(Integer, primary_key=True) + media_entry = Column( + Integer, ForeignKey(MediaEntry.id), nullable=False, index=True) + author = Column(Integer, ForeignKey(User.id), nullable=False) + created = Column(DateTime, nullable=False, default=datetime.datetime.now) + content = Column(UnicodeText, nullable=False) + + get_author = relationship(User) + + +class ImageData(Base_v0): + __tablename__ = "image__mediadata" + + # The primary key *and* reference to the main media_entry + media_entry = Column(Integer, ForeignKey('core__media_entries.id'), + primary_key=True) + get_media_entry = relationship("MediaEntry", + backref=backref("image__media_data", cascade="all, delete-orphan")) + + width = Column(Integer) + height = Column(Integer) + exif_all = Column(JSONEncoded) + gps_longitude = Column(Float) + gps_latitude = Column(Float) + gps_altitude = Column(Float) + gps_direction = Column(Float) + + +class VideoData(Base_v0): + __tablename__ = "video__mediadata" + + # The primary key *and* reference to the main media_entry + media_entry = Column(Integer, ForeignKey('core__media_entries.id'), + primary_key=True) + get_media_entry = relationship("MediaEntry", + backref=backref("video__media_data", cascade="all, delete-orphan")) + + width = Column(SmallInteger) + height = Column(SmallInteger) + + +class AsciiData(Base_v0): + __tablename__ = "ascii__mediadata" + + # The primary key *and* reference to the main media_entry + media_entry = Column(Integer, ForeignKey('core__media_entries.id'), + primary_key=True) + get_media_entry = relationship("MediaEntry", + backref=backref("ascii__media_data", cascade="all, delete-orphan")) + + +class AudioData(Base_v0): + __tablename__ = "audio__mediadata" + + # The primary key *and* reference to the main media_entry + media_entry = Column(Integer, ForeignKey('core__media_entries.id'), + primary_key=True) + get_media_entry = relationship("MediaEntry", + backref=backref("audio__media_data", cascade="all, delete-orphan")) + + +###################################################### +# Special, migrations-tracking table +# +# Not listed in MODELS because this is special and not +# really migrated, but used for migrations (for now) +###################################################### + +class MigrationData(Base_v0): + __tablename__ = "core__migrations" + + name = Column(Unicode, primary_key=True) + version = Column(Integer, nullable=False, default=0) + +###################################################### diff --git a/mediagoblin/db/sql/open.py b/mediagoblin/db/sql/open.py index b1f389e8..ce5f0604 100644 --- a/mediagoblin/db/sql/open.py +++ b/mediagoblin/db/sql/open.py @@ -18,8 +18,9 @@ from sqlalchemy import create_engine import logging -from mediagoblin.db.sql.base import Session -from mediagoblin.db.sql.models import Base +from mediagoblin.db.sql.base import Base, Session + +_log = logging.getLogger(__name__) class DatabaseMaster(object): @@ -36,14 +37,29 @@ class DatabaseMaster(object): Session.add(obj) Session.flush() + def check_session_clean(self): + for dummy in Session(): + _log.warn("STRANGE: There are elements in the sql session. " + "Please report this and help us track this down.") + break + def reset_after_request(self): Session.rollback() Session.remove() +def load_models(app_config): + import mediagoblin.db.sql.models + + if True: + for media_type in app_config['media_types']: + _log.debug("Loading %s.models", media_type) + __import__(media_type + ".models") + + def setup_connection_and_db_from_config(app_config): engine = create_engine(app_config['sql_engine']) - logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) + # logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) Session.configure(bind=engine) return "dummy conn", DatabaseMaster(engine) diff --git a/mediagoblin/db/sql_switch.py b/mediagoblin/db/sql_switch.py new file mode 100644 index 00000000..571adbdb --- /dev/null +++ b/mediagoblin/db/sql_switch.py @@ -0,0 +1 @@ +use_sql = True |