aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/db
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/db')
-rw-r--r--mediagoblin/db/base.py12
-rw-r--r--mediagoblin/db/extratypes.py30
-rw-r--r--mediagoblin/db/migration_tools.py55
-rw-r--r--mediagoblin/db/migrations.py439
-rw-r--r--mediagoblin/db/mixin.py86
-rw-r--r--mediagoblin/db/models.py463
-rw-r--r--mediagoblin/db/models_v0.py23
-rw-r--r--mediagoblin/db/open.py4
-rw-r--r--mediagoblin/db/util.py3
9 files changed, 1052 insertions, 63 deletions
diff --git a/mediagoblin/db/base.py b/mediagoblin/db/base.py
index 699a503a..c0cefdc2 100644
--- a/mediagoblin/db/base.py
+++ b/mediagoblin/db/base.py
@@ -24,18 +24,6 @@ Session = scoped_session(sessionmaker())
class GMGTableBase(object):
query = Session.query_property()
- @classmethod
- def find(cls, query_dict):
- return cls.query.filter_by(**query_dict)
-
- @classmethod
- def find_one(cls, query_dict):
- return cls.query.filter_by(**query_dict).first()
-
- @classmethod
- def one(cls, query_dict):
- return cls.find(query_dict).one()
-
def get(self, key):
return getattr(self, key)
diff --git a/mediagoblin/db/extratypes.py b/mediagoblin/db/extratypes.py
index f2304af0..8e04d58d 100644
--- a/mediagoblin/db/extratypes.py
+++ b/mediagoblin/db/extratypes.py
@@ -15,6 +15,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+from sqlalchemy.ext.mutable import Mutable
from sqlalchemy.types import TypeDecorator, Unicode, TEXT
import json
@@ -38,7 +39,7 @@ class PathTupleWithSlashes(TypeDecorator):
return value
-# The following class and only this one class is in very
+# The following two classes and only these two classes is in very
# large parts based on example code from sqlalchemy.
#
# The original copyright notice and license follows:
@@ -61,3 +62,30 @@ class JSONEncoded(TypeDecorator):
if value is not None:
value = json.loads(value)
return value
+
+
+class MutationDict(Mutable, dict):
+ @classmethod
+ def coerce(cls, key, value):
+ "Convert plain dictionaries to MutationDict."
+
+ if not isinstance(value, MutationDict):
+ if isinstance(value, dict):
+ return MutationDict(value)
+
+ # this call will raise ValueError
+ return Mutable.coerce(key, value)
+ else:
+ return value
+
+ def __setitem__(self, key, value):
+ "Detect dictionary set events and emit change events."
+
+ dict.__setitem__(self, key, value)
+ self.changed()
+
+ def __delitem__(self, key):
+ "Detect dictionary del events and emit change events."
+
+ dict.__delitem__(self, key)
+ self.changed()
diff --git a/mediagoblin/db/migration_tools.py b/mediagoblin/db/migration_tools.py
index c0c7e998..e39070c3 100644
--- a/mediagoblin/db/migration_tools.py
+++ b/mediagoblin/db/migration_tools.py
@@ -16,6 +16,7 @@
from mediagoblin.tools.common import simple_printer
from sqlalchemy import Table
+from sqlalchemy.sql import select
class TableAlreadyExists(Exception):
pass
@@ -29,7 +30,7 @@ class MigrationManager(object):
to the latest migrations, etc.
"""
- def __init__(self, name, models, migration_registry, session,
+ def __init__(self, name, models, foundations, migration_registry, session,
printer=simple_printer):
"""
Args:
@@ -40,6 +41,7 @@ class MigrationManager(object):
"""
self.name = unicode(name)
self.models = models
+ self.foundations = foundations
self.session = session
self.migration_registry = migration_registry
self._sorted_migrations = None
@@ -140,6 +142,18 @@ class MigrationManager(object):
self.session.bind,
tables=[model.__table__ for model in self.models])
+ def populate_table_foundations(self):
+ """
+ Create the table foundations (default rows) as layed out in FOUNDATIONS
+ in mediagoblin.db.models
+ """
+ for Model, rows in self.foundations.items():
+ self.printer(u' + Laying foundations for %s table\n' %
+ (Model.__name__))
+ for parameters in rows:
+ new_row = Model(**parameters)
+ self.session.add(new_row)
+
def create_new_migration_record(self):
"""
Create a new migration record for this migration set
@@ -175,8 +189,7 @@ class MigrationManager(object):
if self.name == u'__main__':
return u"main mediagoblin tables"
else:
- # TODO: Use the friendlier media manager "human readable" name
- return u'media type "%s"' % self.name
+ return u'plugin "%s"' % self.name
def init_or_migrate(self):
"""
@@ -203,9 +216,9 @@ class MigrationManager(object):
self.init_tables()
# auto-set at latest migration number
- self.create_new_migration_record()
-
+ self.create_new_migration_record()
self.printer(u"done.\n")
+ self.populate_table_foundations()
self.set_current_migration()
return u'inited'
@@ -274,3 +287,35 @@ def inspect_table(metadata, table_name):
"""Simple helper to get a ref to an already existing table"""
return Table(table_name, metadata, autoload=True,
autoload_with=metadata.bind)
+
+def replace_table_hack(db, old_table, replacement_table):
+ """
+ A function to fully replace a current table with a new one for migrati-
+ -ons. This is necessary because some changes are made tricky in some situa-
+ -tion, for example, dropping a boolean column in sqlite is impossible w/o
+ this method
+
+ :param old_table A ref to the old table, gotten through
+ inspect_table
+
+ :param replacement_table A ref to the new table, gotten through
+ inspect_table
+
+ Users are encouraged to sqlalchemy-migrate replace table solutions, unless
+ that is not possible... in which case, this solution works,
+ at least for sqlite.
+ """
+ surviving_columns = replacement_table.columns.keys()
+ old_table_name = old_table.name
+ for row in db.execute(select(
+ [column for column in old_table.columns
+ if column.name in surviving_columns])):
+
+ db.execute(replacement_table.insert().values(**row))
+ db.commit()
+
+ old_table.drop()
+ db.commit()
+
+ replacement_table.rename(old_table_name)
+ db.commit()
diff --git a/mediagoblin/db/migrations.py b/mediagoblin/db/migrations.py
index 2c553396..426080a2 100644
--- a/mediagoblin/db/migrations.py
+++ b/mediagoblin/db/migrations.py
@@ -19,14 +19,18 @@ import uuid
from sqlalchemy import (MetaData, Table, Column, Boolean, SmallInteger,
Integer, Unicode, UnicodeText, DateTime,
- ForeignKey)
+ ForeignKey, Date)
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import and_
from migrate.changeset.constraint import UniqueConstraint
-from mediagoblin.db.migration_tools import RegisterMigration, inspect_table
-from mediagoblin.db.models import MediaEntry, Collection, User
+
+from mediagoblin.db.extratypes import JSONEncoded, MutationDict
+from mediagoblin.db.migration_tools import (
+ RegisterMigration, inspect_table, replace_table_hack)
+from mediagoblin.db.models import (MediaEntry, Collection, MediaComment, User,
+ Privilege)
MIGRATIONS = {}
@@ -287,3 +291,432 @@ def unique_collections_slug(db):
constraint.create()
db.commit()
+
+@RegisterMigration(11, MIGRATIONS)
+def drop_token_related_User_columns(db):
+ """
+ Drop unneeded columns from the User table after switching to using
+ itsdangerous tokens for email and forgot password verification.
+ """
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, 'core__users')
+
+ verification_key = user_table.columns['verification_key']
+ fp_verification_key = user_table.columns['fp_verification_key']
+ fp_token_expire = user_table.columns['fp_token_expire']
+
+ verification_key.drop()
+ fp_verification_key.drop()
+ fp_token_expire.drop()
+
+ db.commit()
+
+
+class CommentSubscription_v0(declarative_base()):
+ __tablename__ = 'core__comment_subscriptions'
+ id = Column(Integer, primary_key=True)
+
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+ media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False)
+
+ user_id = Column(Integer, ForeignKey(User.id), nullable=False)
+
+ notify = Column(Boolean, nullable=False, default=True)
+ send_email = Column(Boolean, nullable=False, default=True)
+
+
+class Notification_v0(declarative_base()):
+ __tablename__ = 'core__notifications'
+ id = Column(Integer, primary_key=True)
+ type = Column(Unicode)
+
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+ user_id = Column(Integer, ForeignKey(User.id), nullable=False,
+ index=True)
+ seen = Column(Boolean, default=lambda: False, index=True)
+
+
+class CommentNotification_v0(Notification_v0):
+ __tablename__ = 'core__comment_notifications'
+ id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
+
+ subject_id = Column(Integer, ForeignKey(MediaComment.id))
+
+
+class ProcessingNotification_v0(Notification_v0):
+ __tablename__ = 'core__processing_notifications'
+
+ id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
+
+ subject_id = Column(Integer, ForeignKey(MediaEntry.id))
+
+
+@RegisterMigration(12, MIGRATIONS)
+def add_new_notification_tables(db):
+ metadata = MetaData(bind=db.bind)
+
+ user_table = inspect_table(metadata, 'core__users')
+ mediaentry_table = inspect_table(metadata, 'core__media_entries')
+ mediacomment_table = inspect_table(metadata, 'core__media_comments')
+
+ CommentSubscription_v0.__table__.create(db.bind)
+
+ Notification_v0.__table__.create(db.bind)
+ CommentNotification_v0.__table__.create(db.bind)
+ ProcessingNotification_v0.__table__.create(db.bind)
+
+ db.commit()
+
+
+@RegisterMigration(13, MIGRATIONS)
+def pw_hash_nullable(db):
+ """Make pw_hash column nullable"""
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, "core__users")
+
+ user_table.c.pw_hash.alter(nullable=True)
+
+ # sqlite+sqlalchemy seems to drop this constraint during the
+ # migration, so we add it back here for now a bit manually.
+ if db.bind.url.drivername == 'sqlite':
+ constraint = UniqueConstraint('username', table=user_table)
+ constraint.create()
+
+ db.commit()
+
+
+# oauth1 migrations
+class Client_v0(declarative_base()):
+ """
+ Model representing a client - Used for API Auth
+ """
+ __tablename__ = "core__clients"
+
+ id = Column(Unicode, nullable=True, primary_key=True)
+ secret = Column(Unicode, nullable=False)
+ expirey = Column(DateTime, nullable=True)
+ application_type = Column(Unicode, nullable=False)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+ # optional stuff
+ redirect_uri = Column(JSONEncoded, nullable=True)
+ logo_url = Column(Unicode, nullable=True)
+ application_name = Column(Unicode, nullable=True)
+ contacts = Column(JSONEncoded, nullable=True)
+
+ def __repr__(self):
+ if self.application_name:
+ return "<Client {0} - {1}>".format(self.application_name, self.id)
+ else:
+ return "<Client {0}>".format(self.id)
+
+class RequestToken_v0(declarative_base()):
+ """
+ Model for representing the request tokens
+ """
+ __tablename__ = "core__request_tokens"
+
+ token = Column(Unicode, primary_key=True)
+ secret = Column(Unicode, nullable=False)
+ client = Column(Unicode, ForeignKey(Client_v0.id))
+ user = Column(Integer, ForeignKey(User.id), nullable=True)
+ used = Column(Boolean, default=False)
+ authenticated = Column(Boolean, default=False)
+ verifier = Column(Unicode, nullable=True)
+ callback = Column(Unicode, nullable=False, default=u"oob")
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+class AccessToken_v0(declarative_base()):
+ """
+ Model for representing the access tokens
+ """
+ __tablename__ = "core__access_tokens"
+
+ token = Column(Unicode, nullable=False, primary_key=True)
+ secret = Column(Unicode, nullable=False)
+ user = Column(Integer, ForeignKey(User.id))
+ request_token = Column(Unicode, ForeignKey(RequestToken_v0.token))
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+
+class NonceTimestamp_v0(declarative_base()):
+ """
+ A place the timestamp and nonce can be stored - this is for OAuth1
+ """
+ __tablename__ = "core__nonce_timestamps"
+
+ nonce = Column(Unicode, nullable=False, primary_key=True)
+ timestamp = Column(DateTime, nullable=False, primary_key=True)
+
+
+@RegisterMigration(14, MIGRATIONS)
+def create_oauth1_tables(db):
+ """ Creates the OAuth1 tables """
+
+ Client_v0.__table__.create(db.bind)
+ RequestToken_v0.__table__.create(db.bind)
+ AccessToken_v0.__table__.create(db.bind)
+ NonceTimestamp_v0.__table__.create(db.bind)
+
+ db.commit()
+
+
+@RegisterMigration(15, MIGRATIONS)
+def wants_notifications(db):
+ """Add a wants_notifications field to User model"""
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, "core__users")
+ col = Column('wants_notifications', Boolean, default=True)
+ col.create(user_table)
+ db.commit()
+
+
+
+@RegisterMigration(16, MIGRATIONS)
+def upload_limits(db):
+ """Add user upload limit columns"""
+ metadata = MetaData(bind=db.bind)
+
+ user_table = inspect_table(metadata, 'core__users')
+ media_entry_table = inspect_table(metadata, 'core__media_entries')
+
+ col = Column('uploaded', Integer, default=0)
+ col.create(user_table)
+
+ col = Column('upload_limit', Integer)
+ col.create(user_table)
+
+ col = Column('file_size', Integer, default=0)
+ col.create(media_entry_table)
+
+ db.commit()
+
+
+@RegisterMigration(17, MIGRATIONS)
+def add_file_metadata(db):
+ """Add file_metadata to MediaFile"""
+ metadata = MetaData(bind=db.bind)
+ media_file_table = inspect_table(metadata, "core__mediafiles")
+
+ col = Column('file_metadata', MutationDict.as_mutable(JSONEncoded))
+ col.create(media_file_table)
+
+ db.commit()
+
+###################
+# Moderation tables
+###################
+
+class ReportBase_v0(declarative_base()):
+ __tablename__ = 'core__reports'
+ id = Column(Integer, primary_key=True)
+ reporter_id = Column(Integer, ForeignKey(User.id), nullable=False)
+ report_content = Column(UnicodeText)
+ reported_user_id = Column(Integer, ForeignKey(User.id), nullable=False)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ discriminator = Column('type', Unicode(50))
+ resolver_id = Column(Integer, ForeignKey(User.id))
+ resolved = Column(DateTime)
+ result = Column(UnicodeText)
+ __mapper_args__ = {'polymorphic_on': discriminator}
+
+
+class CommentReport_v0(ReportBase_v0):
+ __tablename__ = 'core__reports_on_comments'
+ __mapper_args__ = {'polymorphic_identity': 'comment_report'}
+
+ id = Column('id',Integer, ForeignKey('core__reports.id'),
+ primary_key=True)
+ comment_id = Column(Integer, ForeignKey(MediaComment.id), nullable=True)
+
+
+class MediaReport_v0(ReportBase_v0):
+ __tablename__ = 'core__reports_on_media'
+ __mapper_args__ = {'polymorphic_identity': 'media_report'}
+
+ id = Column('id',Integer, ForeignKey('core__reports.id'), primary_key=True)
+ media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=True)
+
+
+class UserBan_v0(declarative_base()):
+ __tablename__ = 'core__user_bans'
+ user_id = Column(Integer, ForeignKey(User.id), nullable=False,
+ primary_key=True)
+ expiration_date = Column(Date)
+ reason = Column(UnicodeText, nullable=False)
+
+
+class Privilege_v0(declarative_base()):
+ __tablename__ = 'core__privileges'
+ id = Column(Integer, nullable=False, primary_key=True, unique=True)
+ privilege_name = Column(Unicode, nullable=False, unique=True)
+
+
+class PrivilegeUserAssociation_v0(declarative_base()):
+ __tablename__ = 'core__privileges_users'
+ privilege_id = Column(
+ 'core__privilege_id',
+ Integer,
+ ForeignKey(User.id),
+ primary_key=True)
+ user_id = Column(
+ 'core__user_id',
+ Integer,
+ ForeignKey(Privilege.id),
+ primary_key=True)
+
+
+PRIVILEGE_FOUNDATIONS_v0 = [{'privilege_name':u'admin'},
+ {'privilege_name':u'moderator'},
+ {'privilege_name':u'uploader'},
+ {'privilege_name':u'reporter'},
+ {'privilege_name':u'commenter'},
+ {'privilege_name':u'active'}]
+
+
+# vR1 stands for "version Rename 1". This only exists because we need
+# to deal with dropping some booleans and it's otherwise impossible
+# with sqlite.
+
+class User_vR1(declarative_base()):
+ __tablename__ = 'rename__users'
+ id = Column(Integer, primary_key=True)
+ username = Column(Unicode, nullable=False, unique=True)
+ email = Column(Unicode, nullable=False)
+ pw_hash = Column(Unicode)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ wants_comment_notification = Column(Boolean, default=True)
+ wants_notifications = Column(Boolean, default=True)
+ license_preference = Column(Unicode)
+ url = Column(Unicode)
+ bio = Column(UnicodeText) # ??
+ uploaded = Column(Integer, default=0)
+ upload_limit = Column(Integer)
+
+
+@RegisterMigration(18, MIGRATIONS)
+def create_moderation_tables(db):
+
+ # First, we will create the new tables in the database.
+ #--------------------------------------------------------------------------
+ ReportBase_v0.__table__.create(db.bind)
+ CommentReport_v0.__table__.create(db.bind)
+ MediaReport_v0.__table__.create(db.bind)
+ UserBan_v0.__table__.create(db.bind)
+ Privilege_v0.__table__.create(db.bind)
+ PrivilegeUserAssociation_v0.__table__.create(db.bind)
+
+ db.commit()
+
+ # Then initialize the tables that we will later use
+ #--------------------------------------------------------------------------
+ metadata = MetaData(bind=db.bind)
+ privileges_table= inspect_table(metadata, "core__privileges")
+ user_table = inspect_table(metadata, 'core__users')
+ user_privilege_assoc = inspect_table(
+ metadata, 'core__privileges_users')
+
+ # This section initializes the default Privilege foundations, that
+ # would be created through the FOUNDATIONS system in a new instance
+ #--------------------------------------------------------------------------
+ for parameters in PRIVILEGE_FOUNDATIONS_v0:
+ db.execute(privileges_table.insert().values(**parameters))
+
+ db.commit()
+
+ # This next section takes the information from the old is_admin and status
+ # columns and converts those to the new privilege system
+ #--------------------------------------------------------------------------
+ admin_users_ids, active_users_ids, inactive_users_ids = (
+ db.execute(
+ user_table.select().where(
+ user_table.c.is_admin==True)).fetchall(),
+ db.execute(
+ user_table.select().where(
+ user_table.c.is_admin==False).where(
+ user_table.c.status==u"active")).fetchall(),
+ db.execute(
+ user_table.select().where(
+ user_table.c.is_admin==False).where(
+ user_table.c.status!=u"active")).fetchall())
+
+ # Get the ids for each of the privileges so we can reference them ~~~~~~~~~
+ (admin_privilege_id, uploader_privilege_id,
+ reporter_privilege_id, commenter_privilege_id,
+ active_privilege_id) = [
+ db.execute(privileges_table.select().where(
+ privileges_table.c.privilege_name==privilege_name)).first()['id']
+ for privilege_name in
+ [u"admin",u"uploader",u"reporter",u"commenter",u"active"]
+ ]
+
+ # Give each user the appopriate privileges depending whether they are an
+ # admin, an active user or an inactive user ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ for admin_user in admin_users_ids:
+ admin_user_id = admin_user['id']
+ for privilege_id in [admin_privilege_id, uploader_privilege_id,
+ reporter_privilege_id, commenter_privilege_id,
+ active_privilege_id]:
+ db.execute(user_privilege_assoc.insert().values(
+ core__privilege_id=admin_user_id,
+ core__user_id=privilege_id))
+
+ for active_user in active_users_ids:
+ active_user_id = active_user['id']
+ for privilege_id in [uploader_privilege_id, reporter_privilege_id,
+ commenter_privilege_id, active_privilege_id]:
+ db.execute(user_privilege_assoc.insert().values(
+ core__privilege_id=active_user_id,
+ core__user_id=privilege_id))
+
+ for inactive_user in inactive_users_ids:
+ inactive_user_id = inactive_user['id']
+ for privilege_id in [uploader_privilege_id, reporter_privilege_id,
+ commenter_privilege_id]:
+ db.execute(user_privilege_assoc.insert().values(
+ core__privilege_id=inactive_user_id,
+ core__user_id=privilege_id))
+
+ db.commit()
+
+ # And then, once the information is taken from is_admin & status columns
+ # we drop all of the vestigial columns from the User table.
+ #--------------------------------------------------------------------------
+ if db.bind.url.drivername == 'sqlite':
+ # SQLite has some issues that make it *impossible* to drop boolean
+ # columns. So, the following code is a very hacky workaround which
+ # makes it possible. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ User_vR1.__table__.create(db.bind)
+ db.commit()
+ new_user_table = inspect_table(metadata, 'rename__users')
+ replace_table_hack(db, user_table, new_user_table)
+ else:
+ # If the db is not run using SQLite, this process is much simpler ~~~~~
+
+ status = user_table.columns['status']
+ email_verified = user_table.columns['email_verified']
+ is_admin = user_table.columns['is_admin']
+ status.drop()
+ email_verified.drop()
+ is_admin.drop()
+
+ db.commit()
+@RegisterMigration(19, MIGRATIONS)
+def drop_MediaEntry_collected(db):
+ """
+ Drop unused MediaEntry.collected column
+ """
+ metadata = MetaData(bind=db.bind)
+
+ media_collected= inspect_table(metadata, 'core__media_entries')
+ media_collected = media_collected.columns['collected']
+
+ media_collected.drop()
+
+ db.commit()
diff --git a/mediagoblin/db/mixin.py b/mediagoblin/db/mixin.py
index 388bac89..25ce6642 100644
--- a/mediagoblin/db/mixin.py
+++ b/mediagoblin/db/mixin.py
@@ -28,30 +28,24 @@ real objects.
"""
import uuid
+import re
+from datetime import datetime
from werkzeug.utils import cached_property
from mediagoblin import mg_globals
-from mediagoblin.auth import lib as auth_lib
-from mediagoblin.media_types import get_media_managers, FileTypeNotSupported
+from mediagoblin.media_types import FileTypeNotSupported
from mediagoblin.tools import common, licenses
+from mediagoblin.tools.pluginapi import hook_handle
from mediagoblin.tools.text import cleaned_markdown_conversion
from mediagoblin.tools.url import slugify
class UserMixin(object):
- def check_login(self, password):
- """
- See if a user can login with this password
- """
- return auth_lib.bcrypt_check_password(
- password, self.pw_hash)
-
@property
def bio_html(self):
return cleaned_markdown_conversion(self.bio)
-
class GenerateSlugMixin(object):
"""
Mixin to add a generate_slug method to objects.
@@ -208,14 +202,14 @@ class MediaEntryMixin(GenerateSlugMixin):
Raises FileTypeNotSupported in case no such manager is enabled
"""
- # TODO, we should be able to make this a simple lookup rather
- # than iterating through all media managers.
- for media_type, manager in get_media_managers():
- if media_type == self.media_type:
- return manager(self)
+ manager = hook_handle(('media_manager', self.media_type))
+ if manager:
+ return manager(self)
+
# Not found? Then raise an error
raise FileTypeNotSupported(
- "MediaManager not in enabled types. Check media_types in config?")
+ "MediaManager not in enabled types. Check media_type plugins are"
+ " enabled in config?")
def get_fail_exception(self):
"""
@@ -229,15 +223,60 @@ class MediaEntryMixin(GenerateSlugMixin):
return licenses.get_license_by_url(self.license or "")
def exif_display_iter(self):
- from mediagoblin.tools.exif import USEFUL_TAGS
+ if not self.media_data:
+ return
+ exif_all = self.media_data.get("exif_all")
+ for key in exif_all:
+ label = re.sub('(.)([A-Z][a-z]+)', r'\1 \2', key)
+ yield label.replace('EXIF', '').replace('Image', ''), exif_all[key]
+
+ def exif_display_data_short(self):
+ """Display a very short practical version of exif info"""
if not self.media_data:
return
+
exif_all = self.media_data.get("exif_all")
- for key in USEFUL_TAGS:
- if key in exif_all:
- yield key, exif_all[key]
+ exif_short = {}
+
+ if 'Image DateTimeOriginal' in exif_all:
+ # format date taken
+ takendate = datetime.datetime.strptime(
+ exif_all['Image DateTimeOriginal']['printable'],
+ '%Y:%m:%d %H:%M:%S').date()
+ taken = takendate.strftime('%B %d %Y')
+
+ exif_short.update({'Date Taken': taken})
+
+ aperture = None
+ if 'EXIF FNumber' in exif_all:
+ fnum = str(exif_all['EXIF FNumber']['printable']).split('/')
+
+ # calculate aperture
+ if len(fnum) == 2:
+ aperture = "f/%.1f" % (float(fnum[0])/float(fnum[1]))
+ elif fnum[0] != 'None':
+ aperture = "f/%s" % (fnum[0])
+
+ if aperture:
+ exif_short.update({'Aperture': aperture})
+
+ short_keys = [
+ ('Camera', 'Image Model', None),
+ ('Exposure', 'EXIF ExposureTime', lambda x: '%s sec' % x),
+ ('ISO Speed', 'EXIF ISOSpeedRatings', None),
+ ('Focal Length', 'EXIF FocalLength', lambda x: '%s mm' % x)]
+
+ for label, key, fmt_func in short_keys:
+ try:
+ val = fmt_func(exif_all[key]['printable']) if fmt_func \
+ else exif_all[key]['printable']
+ exif_short.update({label: val})
+ except KeyError:
+ pass
+
+ return exif_short
class MediaCommentMixin(object):
@@ -249,6 +288,13 @@ class MediaCommentMixin(object):
"""
return cleaned_markdown_conversion(self.content)
+ def __repr__(self):
+ return '<{klass} #{id} {author} "{comment}">'.format(
+ klass=self.__class__.__name__,
+ id=self.id,
+ author=self.get_author,
+ comment=self.content)
+
class CollectionMixin(GenerateSlugMixin):
def check_slug_used(self, slug):
diff --git a/mediagoblin/db/models.py b/mediagoblin/db/models.py
index 2412706e..b750375d 100644
--- a/mediagoblin/db/models.py
+++ b/mediagoblin/db/models.py
@@ -23,16 +23,18 @@ import datetime
from sqlalchemy import Column, Integer, Unicode, UnicodeText, DateTime, \
Boolean, ForeignKey, UniqueConstraint, PrimaryKeyConstraint, \
- SmallInteger
-from sqlalchemy.orm import relationship, backref
+ SmallInteger, Date
+from sqlalchemy.orm import relationship, backref, with_polymorphic
from sqlalchemy.orm.collections import attribute_mapped_collection
from sqlalchemy.sql.expression import desc
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.util import memoized_property
-from mediagoblin.db.extratypes import PathTupleWithSlashes, JSONEncoded
+from mediagoblin.db.extratypes import (PathTupleWithSlashes, JSONEncoded,
+ MutationDict)
from mediagoblin.db.base import Base, DictReadAttrProxy
-from mediagoblin.db.mixin import UserMixin, MediaEntryMixin, MediaCommentMixin, CollectionMixin, CollectionItemMixin
+from mediagoblin.db.mixin import UserMixin, MediaEntryMixin, \
+ MediaCommentMixin, CollectionMixin, CollectionItemMixin
from mediagoblin.tools.files import delete_media_files
from mediagoblin.tools.common import import_component
@@ -46,6 +48,7 @@ from migrate import changeset
_log = logging.getLogger(__name__)
+
class User(Base, UserMixin):
"""
TODO: We should consider moving some rarely used fields
@@ -55,21 +58,22 @@ class User(Base, UserMixin):
id = Column(Integer, primary_key=True)
username = Column(Unicode, nullable=False, unique=True)
+ # Note: no db uniqueness constraint on email because it's not
+ # reliable (many email systems case insensitive despite against
+ # the RFC) and because it would be a mess to implement at this
+ # point.
email = Column(Unicode, nullable=False)
+ pw_hash = Column(Unicode)
created = Column(DateTime, nullable=False, default=datetime.datetime.now)
- pw_hash = Column(Unicode, nullable=False)
- email_verified = Column(Boolean, default=False)
- status = Column(Unicode, default=u"needs_email_verification", nullable=False)
# Intented to be nullable=False, but migrations would not work for it
# set to nullable=True implicitly.
wants_comment_notification = Column(Boolean, default=True)
+ wants_notifications = Column(Boolean, default=True)
license_preference = Column(Unicode)
- verification_key = Column(Unicode)
- is_admin = Column(Boolean, default=False, nullable=False)
url = Column(Unicode)
bio = Column(UnicodeText) # ??
- fp_verification_key = Column(Unicode)
- fp_token_expire = Column(DateTime)
+ uploaded = Column(Integer, default=0)
+ upload_limit = Column(Integer)
## TODO
# plugin data would be in a separate model
@@ -78,8 +82,8 @@ class User(Base, UserMixin):
return '<{0} #{1} {2} {3} "{4}">'.format(
self.__class__.__name__,
self.id,
- 'verified' if self.email_verified else 'non-verified',
- 'admin' if self.is_admin else 'user',
+ 'verified' if self.has_privilege(u'active') else 'non-verified',
+ 'admin' if self.has_privilege(u'admin') else 'user',
self.username)
def delete(self, **kwargs):
@@ -101,6 +105,102 @@ class User(Base, UserMixin):
super(User, self).delete(**kwargs)
_log.info('Deleted user "{0}" account'.format(self.username))
+ def has_privilege(self,*priv_names):
+ """
+ This method checks to make sure a user has all the correct privileges
+ to access a piece of content.
+
+ :param priv_names A variable number of unicode objects which rep-
+ -resent the different privileges which may give
+ the user access to this content. If you pass
+ multiple arguments, the user will be granted
+ access if they have ANY of the privileges
+ passed.
+ """
+ if len(priv_names) == 1:
+ priv = Privilege.query.filter(
+ Privilege.privilege_name==priv_names[0]).one()
+ return (priv in self.all_privileges)
+ elif len(priv_names) > 1:
+ return self.has_privilege(priv_names[0]) or \
+ self.has_privilege(*priv_names[1:])
+ return False
+
+ def is_banned(self):
+ """
+ Checks if this user is banned.
+
+ :returns True if self is banned
+ :returns False if self is not
+ """
+ return UserBan.query.get(self.id) is not None
+
+
+class Client(Base):
+ """
+ Model representing a client - Used for API Auth
+ """
+ __tablename__ = "core__clients"
+
+ id = Column(Unicode, nullable=True, primary_key=True)
+ secret = Column(Unicode, nullable=False)
+ expirey = Column(DateTime, nullable=True)
+ application_type = Column(Unicode, nullable=False)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+ # optional stuff
+ redirect_uri = Column(JSONEncoded, nullable=True)
+ logo_url = Column(Unicode, nullable=True)
+ application_name = Column(Unicode, nullable=True)
+ contacts = Column(JSONEncoded, nullable=True)
+
+ def __repr__(self):
+ if self.application_name:
+ return "<Client {0} - {1}>".format(self.application_name, self.id)
+ else:
+ return "<Client {0}>".format(self.id)
+
+class RequestToken(Base):
+ """
+ Model for representing the request tokens
+ """
+ __tablename__ = "core__request_tokens"
+
+ token = Column(Unicode, primary_key=True)
+ secret = Column(Unicode, nullable=False)
+ client = Column(Unicode, ForeignKey(Client.id))
+ user = Column(Integer, ForeignKey(User.id), nullable=True)
+ used = Column(Boolean, default=False)
+ authenticated = Column(Boolean, default=False)
+ verifier = Column(Unicode, nullable=True)
+ callback = Column(Unicode, nullable=False, default=u"oob")
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+class AccessToken(Base):
+ """
+ Model for representing the access tokens
+ """
+ __tablename__ = "core__access_tokens"
+
+ token = Column(Unicode, nullable=False, primary_key=True)
+ secret = Column(Unicode, nullable=False)
+ user = Column(Integer, ForeignKey(User.id))
+ request_token = Column(Unicode, ForeignKey(RequestToken.token))
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+
+class NonceTimestamp(Base):
+ """
+ A place the timestamp and nonce can be stored - this is for OAuth1
+ """
+ __tablename__ = "core__nonce_timestamps"
+
+ nonce = Column(Unicode, nullable=False, primary_key=True)
+ timestamp = Column(DateTime, nullable=False, primary_key=True)
+
class MediaEntry(Base, MediaEntryMixin):
"""
@@ -119,7 +219,7 @@ class MediaEntry(Base, MediaEntryMixin):
state = Column(Unicode, default=u'unprocessed', nullable=False)
# or use sqlalchemy.types.Enum?
license = Column(Unicode)
- collected = Column(Integer, default=0)
+ file_size = Column(Integer, default=0)
fail_error = Column(Unicode)
fail_metadata = Column(JSONEncoded)
@@ -194,6 +294,35 @@ class MediaEntry(Base, MediaEntryMixin):
if media is not None:
return media.url_for_self(urlgen)
+ def get_file_metadata(self, file_key, metadata_key=None):
+ """
+ Return the file_metadata dict of a MediaFile. If metadata_key is given,
+ return the value of the key.
+ """
+ media_file = MediaFile.query.filter_by(media_entry=self.id,
+ name=unicode(file_key)).first()
+
+ if media_file:
+ if metadata_key:
+ return media_file.file_metadata.get(metadata_key, None)
+
+ return media_file.file_metadata
+
+ def set_file_metadata(self, file_key, **kwargs):
+ """
+ Update the file_metadata of a MediaFile.
+ """
+ media_file = MediaFile.query.filter_by(media_entry=self.id,
+ name=unicode(file_key)).first()
+
+ file_metadata = media_file.file_metadata or {}
+
+ for key, value in kwargs.iteritems():
+ file_metadata[key] = value
+
+ media_file.file_metadata = file_metadata
+ media_file.save()
+
@property
def media_data(self):
return getattr(self, self.media_data_ref)
@@ -290,6 +419,7 @@ class MediaFile(Base):
nullable=False)
name_id = Column(SmallInteger, ForeignKey(FileKeynames.id), nullable=False)
file_path = Column(PathTupleWithSlashes)
+ file_metadata = Column(MutationDict.as_mutable(JSONEncoded))
__table_args__ = (
PrimaryKeyConstraint('media_entry', 'name_id'),
@@ -388,6 +518,10 @@ class MediaComment(Base, MediaCommentMixin):
backref=backref("posted_comments",
lazy="dynamic",
cascade="all, delete-orphan"))
+ get_entry = relationship(MediaEntry,
+ backref=backref("comments",
+ lazy="dynamic",
+ cascade="all, delete-orphan"))
# Cascade: Comments are somewhat owned by their MediaEntry.
# So do the full thing.
@@ -480,10 +614,307 @@ class ProcessingMetaData(Base):
return DictReadAttrProxy(self)
+class CommentSubscription(Base):
+ __tablename__ = 'core__comment_subscriptions'
+ id = Column(Integer, primary_key=True)
+
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+ media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False)
+ media_entry = relationship(MediaEntry,
+ backref=backref('comment_subscriptions',
+ cascade='all, delete-orphan'))
+
+ user_id = Column(Integer, ForeignKey(User.id), nullable=False)
+ user = relationship(User,
+ backref=backref('comment_subscriptions',
+ cascade='all, delete-orphan'))
+
+ notify = Column(Boolean, nullable=False, default=True)
+ send_email = Column(Boolean, nullable=False, default=True)
+
+ def __repr__(self):
+ return ('<{classname} #{id}: {user} {media} notify: '
+ '{notify} email: {email}>').format(
+ id=self.id,
+ classname=self.__class__.__name__,
+ user=self.user,
+ media=self.media_entry,
+ notify=self.notify,
+ email=self.send_email)
+
+
+class Notification(Base):
+ __tablename__ = 'core__notifications'
+ id = Column(Integer, primary_key=True)
+ type = Column(Unicode)
+
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+ user_id = Column(Integer, ForeignKey('core__users.id'), nullable=False,
+ index=True)
+ seen = Column(Boolean, default=lambda: False, index=True)
+ user = relationship(
+ User,
+ backref=backref('notifications', cascade='all, delete-orphan'))
+
+ __mapper_args__ = {
+ 'polymorphic_identity': 'notification',
+ 'polymorphic_on': type
+ }
+
+ def __repr__(self):
+ return '<{klass} #{id}: {user}: {subject} ({seen})>'.format(
+ id=self.id,
+ klass=self.__class__.__name__,
+ user=self.user,
+ subject=getattr(self, 'subject', None),
+ seen='unseen' if not self.seen else 'seen')
+
+
+class CommentNotification(Notification):
+ __tablename__ = 'core__comment_notifications'
+ id = Column(Integer, ForeignKey(Notification.id), primary_key=True)
+
+ subject_id = Column(Integer, ForeignKey(MediaComment.id))
+ subject = relationship(
+ MediaComment,
+ backref=backref('comment_notifications', cascade='all, delete-orphan'))
+
+ __mapper_args__ = {
+ 'polymorphic_identity': 'comment_notification'
+ }
+
+
+class ProcessingNotification(Notification):
+ __tablename__ = 'core__processing_notifications'
+
+ id = Column(Integer, ForeignKey(Notification.id), primary_key=True)
+
+ subject_id = Column(Integer, ForeignKey(MediaEntry.id))
+ subject = relationship(
+ MediaEntry,
+ backref=backref('processing_notifications',
+ cascade='all, delete-orphan'))
+
+ __mapper_args__ = {
+ 'polymorphic_identity': 'processing_notification'
+ }
+
+with_polymorphic(
+ Notification,
+ [ProcessingNotification, CommentNotification])
+
+class ReportBase(Base):
+ """
+ This is the basic report object which the other reports are based off of.
+
+ :keyword reporter_id Holds the id of the user who created
+ the report, as an Integer column.
+ :keyword report_content Hold the explanation left by the repor-
+ -ter to indicate why they filed the
+ report in the first place, as a
+ Unicode column.
+ :keyword reported_user_id Holds the id of the user who created
+ the content which was reported, as
+ an Integer column.
+ :keyword created Holds a datetime column of when the re-
+ -port was filed.
+ :keyword discriminator This column distinguishes between the
+ different types of reports.
+ :keyword resolver_id Holds the id of the moderator/admin who
+ resolved the report.
+ :keyword resolved Holds the DateTime object which descri-
+ -bes when this report was resolved
+ :keyword result Holds the UnicodeText column of the
+ resolver's reasons for resolving
+ the report this way. Some of this
+ is auto-generated
+ """
+ __tablename__ = 'core__reports'
+ id = Column(Integer, primary_key=True)
+ reporter_id = Column(Integer, ForeignKey(User.id), nullable=False)
+ reporter = relationship(
+ User,
+ backref=backref("reports_filed_by",
+ lazy="dynamic",
+ cascade="all, delete-orphan"),
+ primaryjoin="User.id==ReportBase.reporter_id")
+ report_content = Column(UnicodeText)
+ reported_user_id = Column(Integer, ForeignKey(User.id), nullable=False)
+ reported_user = relationship(
+ User,
+ backref=backref("reports_filed_on",
+ lazy="dynamic",
+ cascade="all, delete-orphan"),
+ primaryjoin="User.id==ReportBase.reported_user_id")
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now())
+ discriminator = Column('type', Unicode(50))
+ resolver_id = Column(Integer, ForeignKey(User.id))
+ resolver = relationship(
+ User,
+ backref=backref("reports_resolved_by",
+ lazy="dynamic",
+ cascade="all, delete-orphan"),
+ primaryjoin="User.id==ReportBase.resolver_id")
+
+ resolved = Column(DateTime)
+ result = Column(UnicodeText)
+ __mapper_args__ = {'polymorphic_on': discriminator}
+
+ def is_comment_report(self):
+ return self.discriminator=='comment_report'
+
+ def is_media_entry_report(self):
+ return self.discriminator=='media_report'
+
+ def is_archived_report(self):
+ return self.resolved is not None
+
+ def archive(self,resolver_id, resolved, result):
+ self.resolver_id = resolver_id
+ self.resolved = resolved
+ self.result = result
+
+
+class CommentReport(ReportBase):
+ """
+ Reports that have been filed on comments.
+ :keyword comment_id Holds the integer value of the reported
+ comment's ID
+ """
+ __tablename__ = 'core__reports_on_comments'
+ __mapper_args__ = {'polymorphic_identity': 'comment_report'}
+
+ id = Column('id',Integer, ForeignKey('core__reports.id'),
+ primary_key=True)
+ comment_id = Column(Integer, ForeignKey(MediaComment.id), nullable=True)
+ comment = relationship(
+ MediaComment, backref=backref("reports_filed_on",
+ lazy="dynamic"))
+
+
+class MediaReport(ReportBase):
+ """
+ Reports that have been filed on media entries
+ :keyword media_entry_id Holds the integer value of the reported
+ media entry's ID
+ """
+ __tablename__ = 'core__reports_on_media'
+ __mapper_args__ = {'polymorphic_identity': 'media_report'}
+
+ id = Column('id',Integer, ForeignKey('core__reports.id'),
+ primary_key=True)
+ media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=True)
+ media_entry = relationship(
+ MediaEntry,
+ backref=backref("reports_filed_on",
+ lazy="dynamic"))
+
+class UserBan(Base):
+ """
+ Holds the information on a specific user's ban-state. As long as one of
+ these is attached to a user, they are banned from accessing mediagoblin.
+ When they try to log in, they are greeted with a page that tells them
+ the reason why they are banned and when (if ever) the ban will be
+ lifted
+
+ :keyword user_id Holds the id of the user this object is
+ attached to. This is a one-to-one
+ relationship.
+ :keyword expiration_date Holds the date that the ban will be lifted.
+ If this is null, the ban is permanent
+ unless a moderator manually lifts it.
+ :keyword reason Holds the reason why the user was banned.
+ """
+ __tablename__ = 'core__user_bans'
+
+ user_id = Column(Integer, ForeignKey(User.id), nullable=False,
+ primary_key=True)
+ expiration_date = Column(Date)
+ reason = Column(UnicodeText, nullable=False)
+
+
+class Privilege(Base):
+ """
+ The Privilege table holds all of the different privileges a user can hold.
+ If a user 'has' a privilege, the User object is in a relationship with the
+ privilege object.
+
+ :keyword privilege_name Holds a unicode object that is the recognizable
+ name of this privilege. This is the column
+ used for identifying whether or not a user
+ has a necessary privilege or not.
+
+ """
+ __tablename__ = 'core__privileges'
+
+ id = Column(Integer, nullable=False, primary_key=True)
+ privilege_name = Column(Unicode, nullable=False, unique=True)
+ all_users = relationship(
+ User,
+ backref='all_privileges',
+ secondary="core__privileges_users")
+
+ def __init__(self, privilege_name):
+ '''
+ Currently consructors are required for tables that are initialized thru
+ the FOUNDATIONS system. This is because they need to be able to be con-
+ -structed by a list object holding their arg*s
+ '''
+ self.privilege_name = privilege_name
+
+ def __repr__(self):
+ return "<Privilege %s>" % (self.privilege_name)
+
+
+class PrivilegeUserAssociation(Base):
+ '''
+ This table holds the many-to-many relationship between User and Privilege
+ '''
+
+ __tablename__ = 'core__privileges_users'
+
+ privilege_id = Column(
+ 'core__privilege_id',
+ Integer,
+ ForeignKey(User.id),
+ primary_key=True)
+ user_id = Column(
+ 'core__user_id',
+ Integer,
+ ForeignKey(Privilege.id),
+ primary_key=True)
+
MODELS = [
- User, MediaEntry, Tag, MediaTag, MediaComment, Collection, CollectionItem, MediaFile, FileKeynames,
- MediaAttachmentFile, ProcessingMetaData]
+ User, MediaEntry, Tag, MediaTag, MediaComment, Collection, CollectionItem,
+ MediaFile, FileKeynames, MediaAttachmentFile, ProcessingMetaData,
+ Notification, CommentNotification, ProcessingNotification, Client,
+ CommentSubscription, ReportBase, CommentReport, MediaReport, UserBan,
+ Privilege, PrivilegeUserAssociation,
+ RequestToken, AccessToken, NonceTimestamp]
+"""
+ Foundations are the default rows that are created immediately after the tables
+ are initialized. Each entry to this dictionary should be in the format of:
+ ModelConstructorObject:List of Dictionaries
+ (Each Dictionary represents a row on the Table to be created, containing each
+ of the columns' names as a key string, and each of the columns' values as a
+ value)
+
+ ex. [NOTE THIS IS NOT BASED OFF OF OUR USER TABLE]
+ user_foundations = [{'name':u'Joanna', 'age':24},
+ {'name':u'Andrea', 'age':41}]
+
+ FOUNDATIONS = {User:user_foundations}
+"""
+privilege_foundations = [{'privilege_name':u'admin'},
+ {'privilege_name':u'moderator'},
+ {'privilege_name':u'uploader'},
+ {'privilege_name':u'reporter'},
+ {'privilege_name':u'commenter'},
+ {'privilege_name':u'active'}]
+FOUNDATIONS = {Privilege:privilege_foundations}
######################################################
# Special, migrations-tracking table
diff --git a/mediagoblin/db/models_v0.py b/mediagoblin/db/models_v0.py
index ec51a1f5..bdedec2e 100644
--- a/mediagoblin/db/models_v0.py
+++ b/mediagoblin/db/models_v0.py
@@ -18,6 +18,29 @@
TODO: indexes on foreignkeys, where useful.
"""
+###########################################################################
+# WHAT IS THIS FILE?
+# ------------------
+#
+# Upon occasion, someone runs into this file and wonders why we have
+# both a models.py and a models_v0.py.
+#
+# The short of it is: you can ignore this file.
+#
+# The long version is, in two parts:
+#
+# - We used to use MongoDB, then we switched to SQL and SQLAlchemy.
+# We needed to convert peoples' databases; the script we had would
+# switch them to the first version right after Mongo, convert over
+# all their tables, then run any migrations that were added after.
+#
+# - That script is now removed, but there is some discussion of
+# writing a test that would set us at the first SQL migration and
+# run everything after. If we wrote that, this file would still be
+# useful. But for now, it's legacy!
+#
+###########################################################################
+
import datetime
import sys
diff --git a/mediagoblin/db/open.py b/mediagoblin/db/open.py
index 0b1679fb..4ff0945f 100644
--- a/mediagoblin/db/open.py
+++ b/mediagoblin/db/open.py
@@ -52,10 +52,6 @@ class DatabaseMaster(object):
def load_models(app_config):
import mediagoblin.db.models
- for media_type in app_config['media_types']:
- _log.debug("Loading %s.models", media_type)
- __import__(media_type + ".models")
-
for plugin in mg_globals.global_config.get('plugins', {}).keys():
_log.debug("Loading %s.models", plugin)
try:
diff --git a/mediagoblin/db/util.py b/mediagoblin/db/util.py
index 6ffec44d..7a0a3a73 100644
--- a/mediagoblin/db/util.py
+++ b/mediagoblin/db/util.py
@@ -24,7 +24,7 @@ from mediagoblin.db.models import MediaEntry, Tag, MediaTag, Collection
def atomic_update(table, query_dict, update_values):
- table.find(query_dict).update(update_values,
+ table.query.filter_by(**query_dict).update(update_values,
synchronize_session=False)
Session.commit()
@@ -67,7 +67,6 @@ def check_collection_slug_used(creator_id, slug, ignore_c_id):
does_exist = Session.query(Collection.id).filter(filt).first() is not None
return does_exist
-
if __name__ == '__main__':
from mediagoblin.db.open import setup_connection_and_db_from_config