aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/db
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/db')
-rw-r--r--mediagoblin/db/base.py12
-rw-r--r--mediagoblin/db/migration_tools.py3
-rw-r--r--mediagoblin/db/migrations.py94
-rw-r--r--mediagoblin/db/mixin.py85
-rw-r--r--mediagoblin/db/models.py115
-rw-r--r--mediagoblin/db/models_v0.py23
-rw-r--r--mediagoblin/db/open.py4
-rw-r--r--mediagoblin/db/util.py2
8 files changed, 290 insertions, 48 deletions
diff --git a/mediagoblin/db/base.py b/mediagoblin/db/base.py
index 699a503a..c0cefdc2 100644
--- a/mediagoblin/db/base.py
+++ b/mediagoblin/db/base.py
@@ -24,18 +24,6 @@ Session = scoped_session(sessionmaker())
class GMGTableBase(object):
query = Session.query_property()
- @classmethod
- def find(cls, query_dict):
- return cls.query.filter_by(**query_dict)
-
- @classmethod
- def find_one(cls, query_dict):
- return cls.query.filter_by(**query_dict).first()
-
- @classmethod
- def one(cls, query_dict):
- return cls.find(query_dict).one()
-
def get(self, key):
return getattr(self, key)
diff --git a/mediagoblin/db/migration_tools.py b/mediagoblin/db/migration_tools.py
index c0c7e998..aa22ef94 100644
--- a/mediagoblin/db/migration_tools.py
+++ b/mediagoblin/db/migration_tools.py
@@ -175,8 +175,7 @@ class MigrationManager(object):
if self.name == u'__main__':
return u"main mediagoblin tables"
else:
- # TODO: Use the friendlier media manager "human readable" name
- return u'media type "%s"' % self.name
+ return u'plugin "%s"' % self.name
def init_or_migrate(self):
"""
diff --git a/mediagoblin/db/migrations.py b/mediagoblin/db/migrations.py
index 2c553396..fe4ffb3e 100644
--- a/mediagoblin/db/migrations.py
+++ b/mediagoblin/db/migrations.py
@@ -26,7 +26,7 @@ from sqlalchemy.sql import and_
from migrate.changeset.constraint import UniqueConstraint
from mediagoblin.db.migration_tools import RegisterMigration, inspect_table
-from mediagoblin.db.models import MediaEntry, Collection, User
+from mediagoblin.db.models import MediaEntry, Collection, User, MediaComment
MIGRATIONS = {}
@@ -287,3 +287,95 @@ def unique_collections_slug(db):
constraint.create()
db.commit()
+
+@RegisterMigration(11, MIGRATIONS)
+def drop_token_related_User_columns(db):
+ """
+ Drop unneeded columns from the User table after switching to using
+ itsdangerous tokens for email and forgot password verification.
+ """
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, 'core__users')
+
+ verification_key = user_table.columns['verification_key']
+ fp_verification_key = user_table.columns['fp_verification_key']
+ fp_token_expire = user_table.columns['fp_token_expire']
+
+ verification_key.drop()
+ fp_verification_key.drop()
+ fp_token_expire.drop()
+
+ db.commit()
+
+
+class CommentSubscription_v0(declarative_base()):
+ __tablename__ = 'core__comment_subscriptions'
+ id = Column(Integer, primary_key=True)
+
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+ media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False)
+
+ user_id = Column(Integer, ForeignKey(User.id), nullable=False)
+
+ notify = Column(Boolean, nullable=False, default=True)
+ send_email = Column(Boolean, nullable=False, default=True)
+
+
+class Notification_v0(declarative_base()):
+ __tablename__ = 'core__notifications'
+ id = Column(Integer, primary_key=True)
+ type = Column(Unicode)
+
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+ user_id = Column(Integer, ForeignKey(User.id), nullable=False,
+ index=True)
+ seen = Column(Boolean, default=lambda: False, index=True)
+
+
+class CommentNotification_v0(Notification_v0):
+ __tablename__ = 'core__comment_notifications'
+ id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
+
+ subject_id = Column(Integer, ForeignKey(MediaComment.id))
+
+
+class ProcessingNotification_v0(Notification_v0):
+ __tablename__ = 'core__processing_notifications'
+
+ id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
+
+ subject_id = Column(Integer, ForeignKey(MediaEntry.id))
+
+
+@RegisterMigration(12, MIGRATIONS)
+def add_new_notification_tables(db):
+ metadata = MetaData(bind=db.bind)
+
+ user_table = inspect_table(metadata, 'core__users')
+ mediaentry_table = inspect_table(metadata, 'core__media_entries')
+ mediacomment_table = inspect_table(metadata, 'core__media_comments')
+
+ CommentSubscription_v0.__table__.create(db.bind)
+
+ Notification_v0.__table__.create(db.bind)
+ CommentNotification_v0.__table__.create(db.bind)
+ ProcessingNotification_v0.__table__.create(db.bind)
+
+
+@RegisterMigration(13, MIGRATIONS)
+def pw_hash_nullable(db):
+ """Make pw_hash column nullable"""
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, "core__users")
+
+ user_table.c.pw_hash.alter(nullable=True)
+
+ # sqlite+sqlalchemy seems to drop this constraint during the
+ # migration, so we add it back here for now a bit manually.
+ if db.bind.url.drivername == 'sqlite':
+ constraint = UniqueConstraint('username', table=user_table)
+ constraint.create()
+
+ db.commit()
diff --git a/mediagoblin/db/mixin.py b/mediagoblin/db/mixin.py
index 388bac89..57b27d83 100644
--- a/mediagoblin/db/mixin.py
+++ b/mediagoblin/db/mixin.py
@@ -28,25 +28,20 @@ real objects.
"""
import uuid
+import re
+from datetime import datetime
from werkzeug.utils import cached_property
from mediagoblin import mg_globals
-from mediagoblin.auth import lib as auth_lib
-from mediagoblin.media_types import get_media_managers, FileTypeNotSupported
+from mediagoblin.media_types import FileTypeNotSupported
from mediagoblin.tools import common, licenses
+from mediagoblin.tools.pluginapi import hook_handle
from mediagoblin.tools.text import cleaned_markdown_conversion
from mediagoblin.tools.url import slugify
class UserMixin(object):
- def check_login(self, password):
- """
- See if a user can login with this password
- """
- return auth_lib.bcrypt_check_password(
- password, self.pw_hash)
-
@property
def bio_html(self):
return cleaned_markdown_conversion(self.bio)
@@ -208,14 +203,14 @@ class MediaEntryMixin(GenerateSlugMixin):
Raises FileTypeNotSupported in case no such manager is enabled
"""
- # TODO, we should be able to make this a simple lookup rather
- # than iterating through all media managers.
- for media_type, manager in get_media_managers():
- if media_type == self.media_type:
- return manager(self)
+ manager = hook_handle(('media_manager', self.media_type))
+ if manager:
+ return manager(self)
+
# Not found? Then raise an error
raise FileTypeNotSupported(
- "MediaManager not in enabled types. Check media_types in config?")
+ "MediaManager not in enabled types. Check media_type plugins are"
+ " enabled in config?")
def get_fail_exception(self):
"""
@@ -229,15 +224,60 @@ class MediaEntryMixin(GenerateSlugMixin):
return licenses.get_license_by_url(self.license or "")
def exif_display_iter(self):
- from mediagoblin.tools.exif import USEFUL_TAGS
+ if not self.media_data:
+ return
+ exif_all = self.media_data.get("exif_all")
+ for key in exif_all:
+ label = re.sub('(.)([A-Z][a-z]+)', r'\1 \2', key)
+ yield label.replace('EXIF', '').replace('Image', ''), exif_all[key]
+
+ def exif_display_data_short(self):
+ """Display a very short practical version of exif info"""
if not self.media_data:
return
+
exif_all = self.media_data.get("exif_all")
- for key in USEFUL_TAGS:
- if key in exif_all:
- yield key, exif_all[key]
+ exif_short = {}
+
+ if 'Image DateTimeOriginal' in exif_all:
+ # format date taken
+ takendate = datetime.datetime.strptime(
+ exif_all['Image DateTimeOriginal']['printable'],
+ '%Y:%m:%d %H:%M:%S').date()
+ taken = takendate.strftime('%B %d %Y')
+
+ exif_short.update({'Date Taken': taken})
+
+ aperture = None
+ if 'EXIF FNumber' in exif_all:
+ fnum = str(exif_all['EXIF FNumber']['printable']).split('/')
+
+ # calculate aperture
+ if len(fnum) == 2:
+ aperture = "f/%.1f" % (float(fnum[0])/float(fnum[1]))
+ elif fnum[0] != 'None':
+ aperture = "f/%s" % (fnum[0])
+
+ if aperture:
+ exif_short.update({'Aperture': aperture})
+
+ short_keys = [
+ ('Camera', 'Image Model', None),
+ ('Exposure', 'EXIF ExposureTime', lambda x: '%s sec' % x),
+ ('ISO Speed', 'EXIF ISOSpeedRatings', None),
+ ('Focal Length', 'EXIF FocalLength', lambda x: '%s mm' % x)]
+
+ for label, key, fmt_func in short_keys:
+ try:
+ val = fmt_func(exif_all[key]['printable']) if fmt_func \
+ else exif_all[key]['printable']
+ exif_short.update({label: val})
+ except KeyError:
+ pass
+
+ return exif_short
class MediaCommentMixin(object):
@@ -249,6 +289,13 @@ class MediaCommentMixin(object):
"""
return cleaned_markdown_conversion(self.content)
+ def __repr__(self):
+ return '<{klass} #{id} {author} "{comment}">'.format(
+ klass=self.__class__.__name__,
+ id=self.id,
+ author=self.get_author,
+ comment=self.content)
+
class CollectionMixin(GenerateSlugMixin):
def check_slug_used(self, slug):
diff --git a/mediagoblin/db/models.py b/mediagoblin/db/models.py
index 2b925983..826d47ba 100644
--- a/mediagoblin/db/models.py
+++ b/mediagoblin/db/models.py
@@ -24,15 +24,17 @@ import datetime
from sqlalchemy import Column, Integer, Unicode, UnicodeText, DateTime, \
Boolean, ForeignKey, UniqueConstraint, PrimaryKeyConstraint, \
SmallInteger
-from sqlalchemy.orm import relationship, backref
+from sqlalchemy.orm import relationship, backref, with_polymorphic
from sqlalchemy.orm.collections import attribute_mapped_collection
from sqlalchemy.sql.expression import desc
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.util import memoized_property
+
from mediagoblin.db.extratypes import PathTupleWithSlashes, JSONEncoded
from mediagoblin.db.base import Base, DictReadAttrProxy
-from mediagoblin.db.mixin import UserMixin, MediaEntryMixin, MediaCommentMixin, CollectionMixin, CollectionItemMixin
+from mediagoblin.db.mixin import UserMixin, MediaEntryMixin, \
+ MediaCommentMixin, CollectionMixin, CollectionItemMixin
from mediagoblin.tools.files import delete_media_files
from mediagoblin.tools.common import import_component
@@ -60,20 +62,17 @@ class User(Base, UserMixin):
# the RFC) and because it would be a mess to implement at this
# point.
email = Column(Unicode, nullable=False)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
- pw_hash = Column(Unicode, nullable=False)
+ pw_hash = Column(Unicode)
email_verified = Column(Boolean, default=False)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
status = Column(Unicode, default=u"needs_email_verification", nullable=False)
# Intented to be nullable=False, but migrations would not work for it
# set to nullable=True implicitly.
wants_comment_notification = Column(Boolean, default=True)
license_preference = Column(Unicode)
- verification_key = Column(Unicode)
is_admin = Column(Boolean, default=False, nullable=False)
url = Column(Unicode)
bio = Column(UnicodeText) # ??
- fp_verification_key = Column(Unicode)
- fp_token_expire = Column(DateTime)
## TODO
# plugin data would be in a separate model
@@ -392,6 +391,10 @@ class MediaComment(Base, MediaCommentMixin):
backref=backref("posted_comments",
lazy="dynamic",
cascade="all, delete-orphan"))
+ get_entry = relationship(MediaEntry,
+ backref=backref("comments",
+ lazy="dynamic",
+ cascade="all, delete-orphan"))
# Cascade: Comments are somewhat owned by their MediaEntry.
# So do the full thing.
@@ -484,9 +487,103 @@ class ProcessingMetaData(Base):
return DictReadAttrProxy(self)
+class CommentSubscription(Base):
+ __tablename__ = 'core__comment_subscriptions'
+ id = Column(Integer, primary_key=True)
+
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+ media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False)
+ media_entry = relationship(MediaEntry,
+ backref=backref('comment_subscriptions',
+ cascade='all, delete-orphan'))
+
+ user_id = Column(Integer, ForeignKey(User.id), nullable=False)
+ user = relationship(User,
+ backref=backref('comment_subscriptions',
+ cascade='all, delete-orphan'))
+
+ notify = Column(Boolean, nullable=False, default=True)
+ send_email = Column(Boolean, nullable=False, default=True)
+
+ def __repr__(self):
+ return ('<{classname} #{id}: {user} {media} notify: '
+ '{notify} email: {email}>').format(
+ id=self.id,
+ classname=self.__class__.__name__,
+ user=self.user,
+ media=self.media_entry,
+ notify=self.notify,
+ email=self.send_email)
+
+
+class Notification(Base):
+ __tablename__ = 'core__notifications'
+ id = Column(Integer, primary_key=True)
+ type = Column(Unicode)
+
+ created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+
+ user_id = Column(Integer, ForeignKey('core__users.id'), nullable=False,
+ index=True)
+ seen = Column(Boolean, default=lambda: False, index=True)
+ user = relationship(
+ User,
+ backref=backref('notifications', cascade='all, delete-orphan'))
+
+ __mapper_args__ = {
+ 'polymorphic_identity': 'notification',
+ 'polymorphic_on': type
+ }
+
+ def __repr__(self):
+ return '<{klass} #{id}: {user}: {subject} ({seen})>'.format(
+ id=self.id,
+ klass=self.__class__.__name__,
+ user=self.user,
+ subject=getattr(self, 'subject', None),
+ seen='unseen' if not self.seen else 'seen')
+
+
+class CommentNotification(Notification):
+ __tablename__ = 'core__comment_notifications'
+ id = Column(Integer, ForeignKey(Notification.id), primary_key=True)
+
+ subject_id = Column(Integer, ForeignKey(MediaComment.id))
+ subject = relationship(
+ MediaComment,
+ backref=backref('comment_notifications', cascade='all, delete-orphan'))
+
+ __mapper_args__ = {
+ 'polymorphic_identity': 'comment_notification'
+ }
+
+
+class ProcessingNotification(Notification):
+ __tablename__ = 'core__processing_notifications'
+
+ id = Column(Integer, ForeignKey(Notification.id), primary_key=True)
+
+ subject_id = Column(Integer, ForeignKey(MediaEntry.id))
+ subject = relationship(
+ MediaEntry,
+ backref=backref('processing_notifications',
+ cascade='all, delete-orphan'))
+
+ __mapper_args__ = {
+ 'polymorphic_identity': 'processing_notification'
+ }
+
+
+with_polymorphic(
+ Notification,
+ [ProcessingNotification, CommentNotification])
+
MODELS = [
- User, MediaEntry, Tag, MediaTag, MediaComment, Collection, CollectionItem, MediaFile, FileKeynames,
- MediaAttachmentFile, ProcessingMetaData]
+ User, MediaEntry, Tag, MediaTag, MediaComment, Collection, CollectionItem,
+ MediaFile, FileKeynames, MediaAttachmentFile, ProcessingMetaData,
+ Notification, CommentNotification, ProcessingNotification,
+ CommentSubscription]
######################################################
diff --git a/mediagoblin/db/models_v0.py b/mediagoblin/db/models_v0.py
index ec51a1f5..bdedec2e 100644
--- a/mediagoblin/db/models_v0.py
+++ b/mediagoblin/db/models_v0.py
@@ -18,6 +18,29 @@
TODO: indexes on foreignkeys, where useful.
"""
+###########################################################################
+# WHAT IS THIS FILE?
+# ------------------
+#
+# Upon occasion, someone runs into this file and wonders why we have
+# both a models.py and a models_v0.py.
+#
+# The short of it is: you can ignore this file.
+#
+# The long version is, in two parts:
+#
+# - We used to use MongoDB, then we switched to SQL and SQLAlchemy.
+# We needed to convert peoples' databases; the script we had would
+# switch them to the first version right after Mongo, convert over
+# all their tables, then run any migrations that were added after.
+#
+# - That script is now removed, but there is some discussion of
+# writing a test that would set us at the first SQL migration and
+# run everything after. If we wrote that, this file would still be
+# useful. But for now, it's legacy!
+#
+###########################################################################
+
import datetime
import sys
diff --git a/mediagoblin/db/open.py b/mediagoblin/db/open.py
index 0b1679fb..4ff0945f 100644
--- a/mediagoblin/db/open.py
+++ b/mediagoblin/db/open.py
@@ -52,10 +52,6 @@ class DatabaseMaster(object):
def load_models(app_config):
import mediagoblin.db.models
- for media_type in app_config['media_types']:
- _log.debug("Loading %s.models", media_type)
- __import__(media_type + ".models")
-
for plugin in mg_globals.global_config.get('plugins', {}).keys():
_log.debug("Loading %s.models", plugin)
try:
diff --git a/mediagoblin/db/util.py b/mediagoblin/db/util.py
index 6ffec44d..8431361a 100644
--- a/mediagoblin/db/util.py
+++ b/mediagoblin/db/util.py
@@ -24,7 +24,7 @@ from mediagoblin.db.models import MediaEntry, Tag, MediaTag, Collection
def atomic_update(table, query_dict, update_values):
- table.find(query_dict).update(update_values,
+ table.query.filter_by(**query_dict).update(update_values,
synchronize_session=False)
Session.commit()