aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/db
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/db')
-rw-r--r--mediagoblin/db/__init__.py33
-rw-r--r--mediagoblin/db/migrations.py188
-rw-r--r--mediagoblin/db/mixin.py20
-rw-r--r--mediagoblin/db/models.py220
-rw-r--r--mediagoblin/db/util.py15
5 files changed, 409 insertions, 67 deletions
diff --git a/mediagoblin/db/__init__.py b/mediagoblin/db/__init__.py
index 27ca4b06..719b56e7 100644
--- a/mediagoblin/db/__init__.py
+++ b/mediagoblin/db/__init__.py
@@ -14,36 +14,3 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""
-Database Abstraction/Wrapper Layer
-==================================
-
-This submodule is for most of the db specific stuff.
-
-There are two main ideas here:
-
-1. Open up a small possibility to replace mongo by another
- db. This means, that all direct mongo accesses should
- happen in the db submodule. While all the rest uses an
- API defined by this submodule.
-
- Currently this API happens to be basicly mongo.
- Which means, that the abstraction/wrapper layer is
- extremely thin.
-
-2. Give the rest of the app a simple and easy way to get most of
- their db needs. Which often means some simple import
- from db.util.
-
-What does that mean?
-
-* Never import mongo directly outside of this submodule.
-
-* Inside this submodule you can do whatever is needed. The
- API border is exactly at the submodule layer. Nowhere
- else.
-
-* helper functions can be moved in here. They become part
- of the db.* API
-
-"""
diff --git a/mediagoblin/db/migrations.py b/mediagoblin/db/migrations.py
index b3cea871..349d16d5 100644
--- a/mediagoblin/db/migrations.py
+++ b/mediagoblin/db/migrations.py
@@ -21,18 +21,19 @@ import six
from sqlalchemy import (MetaData, Table, Column, Boolean, SmallInteger,
Integer, Unicode, UnicodeText, DateTime,
- ForeignKey, Date)
+ ForeignKey, Date, Index)
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import and_
from sqlalchemy.schema import UniqueConstraint
-
from mediagoblin.db.extratypes import JSONEncoded, MutationDict
from mediagoblin.db.migration_tools import (
RegisterMigration, inspect_table, replace_table_hack)
-from mediagoblin.db.models import (MediaEntry, Collection, MediaComment, User,
- Privilege)
+from mediagoblin.db.models import (MediaEntry, Collection, MediaComment, User,
+ Privilege)
+from mediagoblin.db.extratypes import JSONEncoded, MutationDict
+
MIGRATIONS = {}
@@ -467,7 +468,6 @@ def create_oauth1_tables(db):
db.commit()
-
@RegisterMigration(15, MIGRATIONS)
def wants_notifications(db):
"""Add a wants_notifications field to User model"""
@@ -661,8 +661,8 @@ def create_moderation_tables(db):
# admin, an active user or an inactive user ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
for admin_user in admin_users_ids:
admin_user_id = admin_user['id']
- for privilege_id in [admin_privilege_id, uploader_privilege_id,
- reporter_privilege_id, commenter_privilege_id,
+ for privilege_id in [admin_privilege_id, uploader_privilege_id,
+ reporter_privilege_id, commenter_privilege_id,
active_privilege_id]:
db.execute(user_privilege_assoc.insert().values(
core__privilege_id=admin_user_id,
@@ -670,7 +670,7 @@ def create_moderation_tables(db):
for active_user in active_users_ids:
active_user_id = active_user['id']
- for privilege_id in [uploader_privilege_id, reporter_privilege_id,
+ for privilege_id in [uploader_privilege_id, reporter_privilege_id,
commenter_privilege_id, active_privilege_id]:
db.execute(user_privilege_assoc.insert().values(
core__privilege_id=active_user_id,
@@ -678,7 +678,7 @@ def create_moderation_tables(db):
for inactive_user in inactive_users_ids:
inactive_user_id = inactive_user['id']
- for privilege_id in [uploader_privilege_id, reporter_privilege_id,
+ for privilege_id in [uploader_privilege_id, reporter_privilege_id,
commenter_privilege_id]:
db.execute(user_privilege_assoc.insert().values(
core__privilege_id=inactive_user_id,
@@ -709,6 +709,8 @@ def create_moderation_tables(db):
is_admin.drop()
db.commit()
+
+
@RegisterMigration(19, MIGRATIONS)
def drop_MediaEntry_collected(db):
"""
@@ -722,3 +724,171 @@ def drop_MediaEntry_collected(db):
media_collected.drop()
db.commit()
+
+
+@RegisterMigration(20, MIGRATIONS)
+def add_metadata_column(db):
+ metadata = MetaData(bind=db.bind)
+
+ media_entry = inspect_table(metadata, 'core__media_entries')
+
+ col = Column('media_metadata', MutationDict.as_mutable(JSONEncoded),
+ default=MutationDict())
+ col.create(media_entry)
+
+ db.commit()
+
+
+class PrivilegeUserAssociation_R1(declarative_base()):
+ __tablename__ = 'rename__privileges_users'
+ user = Column(
+ "user",
+ Integer,
+ ForeignKey(User.id),
+ primary_key=True)
+ privilege = Column(
+ "privilege",
+ Integer,
+ ForeignKey(Privilege.id),
+ primary_key=True)
+
+@RegisterMigration(21, MIGRATIONS)
+def fix_privilege_user_association_table(db):
+ """
+ There was an error in the PrivilegeUserAssociation table that allowed for a
+ dangerous sql error. We need to the change the name of the columns to be
+ unique, and properly referenced.
+ """
+ metadata = MetaData(bind=db.bind)
+
+ privilege_user_assoc = inspect_table(
+ metadata, 'core__privileges_users')
+
+ # This whole process is more complex if we're dealing with sqlite
+ if db.bind.url.drivername == 'sqlite':
+ PrivilegeUserAssociation_R1.__table__.create(db.bind)
+ db.commit()
+
+ new_privilege_user_assoc = inspect_table(
+ metadata, 'rename__privileges_users')
+ result = db.execute(privilege_user_assoc.select())
+ for row in result:
+ # The columns were improperly named before, so we switch the columns
+ user_id, priv_id = row['core__privilege_id'], row['core__user_id']
+ db.execute(new_privilege_user_assoc.insert().values(
+ user=user_id,
+ privilege=priv_id))
+
+ db.commit()
+
+ privilege_user_assoc.drop()
+ new_privilege_user_assoc.rename('core__privileges_users')
+
+ # much simpler if postgres though!
+ else:
+ privilege_user_assoc.c.core__user_id.alter(name="privilege")
+ privilege_user_assoc.c.core__privilege_id.alter(name="user")
+
+ db.commit()
+
+
+@RegisterMigration(22, MIGRATIONS)
+def add_index_username_field(db):
+ """
+ This migration has been found to be doing the wrong thing. See
+ the documentation in migration 23 (revert_username_index) below
+ which undoes this for those databases that did run this migration.
+
+ Old description:
+ This indexes the User.username field which is frequently queried
+ for example a user logging in. This solves the issue #894
+ """
+ ## This code is left commented out *on purpose!*
+ ##
+ ## We do not normally allow commented out code like this in
+ ## MediaGoblin but this is a special case: since this migration has
+ ## been nullified but with great work to set things back below,
+ ## this is commented out for historical clarity.
+ #
+ # metadata = MetaData(bind=db.bind)
+ # user_table = inspect_table(metadata, "core__users")
+ #
+ # new_index = Index("ix_core__users_uploader", user_table.c.username)
+ # new_index.create()
+ #
+ # db.commit()
+ pass
+
+
+@RegisterMigration(23, MIGRATIONS)
+def revert_username_index(db):
+ """
+ Revert the stuff we did in migration 22 above.
+
+ There were a couple of problems with what we did:
+ - There was never a need for this migration! The unique
+ constraint had an implicit b-tree index, so it wasn't really
+ needed. (This is my (Chris Webber's) fault for suggesting it
+ needed to happen without knowing what's going on... my bad!)
+ - On top of that, databases created after the models.py was
+ changed weren't the same as those that had been run through
+ migration 22 above.
+
+ As such, we're setting things back to the way they were before,
+ but as it turns out, that's tricky to do!
+ """
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, "core__users")
+ indexes = dict(
+ [(index.name, index) for index in user_table.indexes])
+
+ # index from unnecessary migration
+ users_uploader_index = indexes.get(u'ix_core__users_uploader')
+ # index created from models.py after (unique=True, index=True)
+ # was set in models.py
+ users_username_index = indexes.get(u'ix_core__users_username')
+
+ if users_uploader_index is None and users_username_index is None:
+ # We don't need to do anything.
+ # The database isn't in a state where it needs fixing
+ #
+ # (ie, either went through the previous borked migration or
+ # was initialized with a models.py where core__users was both
+ # unique=True and index=True)
+ return
+
+ if db.bind.url.drivername == 'sqlite':
+ # Again, sqlite has problems. So this is tricky.
+
+ # Yes, this is correct to use User_vR1! Nothing has changed
+ # between the *correct* version of this table and migration 18.
+ User_vR1.__table__.create(db.bind)
+ db.commit()
+ new_user_table = inspect_table(metadata, 'rename__users')
+ replace_table_hack(db, user_table, new_user_table)
+
+ else:
+ # If the db is not run using SQLite, we don't need to do crazy
+ # table copying.
+
+ # Remove whichever of the not-used indexes are in place
+ if users_uploader_index is not None:
+ users_uploader_index.drop()
+ if users_username_index is not None:
+ users_username_index.drop()
+
+ # Given we're removing indexes then adding a unique constraint
+ # which *we know might fail*, thus probably rolling back the
+ # session, let's commit here.
+ db.commit()
+
+ try:
+ # Add the unique constraint
+ constraint = UniqueConstraint(
+ 'username', table=user_table)
+ constraint.create()
+ except ProgrammingError:
+ # constraint already exists, no need to add
+ db.rollback()
+
+ db.commit()
diff --git a/mediagoblin/db/mixin.py b/mediagoblin/db/mixin.py
index 048cc07c..1f2e7ec3 100644
--- a/mediagoblin/db/mixin.py
+++ b/mediagoblin/db/mixin.py
@@ -202,6 +202,17 @@ class MediaEntryMixin(GenerateSlugMixin):
thumb_url = mg_globals.app.staticdirector(manager[u'default_thumb'])
return thumb_url
+ @property
+ def original_url(self):
+ """ Returns the URL for the original image
+ will return self.thumb_url if original url doesn't exist"""
+ if u"original" not in self.media_files:
+ return self.thumb_url
+
+ return mg_globals.app.public_store.file_url(
+ self.media_files[u"original"]
+ )
+
@cached_property
def media_manager(self):
"""Returns the MEDIA_MANAGER of the media's media_type
@@ -248,7 +259,7 @@ class MediaEntryMixin(GenerateSlugMixin):
if 'Image DateTimeOriginal' in exif_all:
# format date taken
- takendate = datetime.datetime.strptime(
+ takendate = datetime.strptime(
exif_all['Image DateTimeOriginal']['printable'],
'%Y:%m:%d %H:%M:%S').date()
taken = takendate.strftime('%B %d %Y')
@@ -294,6 +305,13 @@ class MediaCommentMixin(object):
"""
return cleaned_markdown_conversion(self.content)
+ def __unicode__(self):
+ return u'<{klass} #{id} {author} "{comment}">'.format(
+ klass=self.__class__.__name__,
+ id=self.id,
+ author=self.get_author,
+ comment=self.content)
+
def __repr__(self):
return '<{klass} #{id} {author} "{comment}">'.format(
klass=self.__class__.__name__,
diff --git a/mediagoblin/db/models.py b/mediagoblin/db/models.py
index d3d1ec4b..5a07effe 100644
--- a/mediagoblin/db/models.py
+++ b/mediagoblin/db/models.py
@@ -101,25 +101,26 @@ class User(Base, UserMixin):
super(User, self).delete(**kwargs)
_log.info('Deleted user "{0}" account'.format(self.username))
- def has_privilege(self,*priv_names):
+ def has_privilege(self, privilege, allow_admin=True):
"""
This method checks to make sure a user has all the correct privileges
to access a piece of content.
- :param priv_names A variable number of unicode objects which rep-
- -resent the different privileges which may give
- the user access to this content. If you pass
- multiple arguments, the user will be granted
- access if they have ANY of the privileges
- passed.
+ :param privilege A unicode object which represent the different
+ privileges which may give the user access to
+ content.
+
+ :param allow_admin If this is set to True the then if the user is
+ an admin, then this will always return True
+ even if the user hasn't been given the
+ privilege. (defaults to True)
"""
- if len(priv_names) == 1:
- priv = Privilege.query.filter(
- Privilege.privilege_name==priv_names[0]).one()
- return (priv in self.all_privileges)
- elif len(priv_names) > 1:
- return self.has_privilege(priv_names[0]) or \
- self.has_privilege(*priv_names[1:])
+ priv = Privilege.query.filter_by(privilege_name=privilege).one()
+ if priv in self.all_privileges:
+ return True
+ elif allow_admin and self.has_privilege(u'admin', allow_admin=False):
+ return True
+
return False
def is_banned(self):
@@ -132,6 +133,48 @@ class User(Base, UserMixin):
return UserBan.query.get(self.id) is not None
+ def serialize(self, request):
+ user = {
+ "id": "acct:{0}@{1}".format(self.username, request.host),
+ "preferredUsername": self.username,
+ "displayName": "{0}@{1}".format(self.username, request.host),
+ "objectType": "person",
+ "pump_io": {
+ "shared": False,
+ "followed": False,
+ },
+ "links": {
+ "self": {
+ "href": request.urlgen(
+ "mediagoblin.federation.user.profile",
+ username=self.username,
+ qualified=True
+ ),
+ },
+ "activity-inbox": {
+ "href": request.urlgen(
+ "mediagoblin.federation.inbox",
+ username=self.username,
+ qualified=True
+ )
+ },
+ "activity-outbox": {
+ "href": request.urlgen(
+ "mediagoblin.federation.feed",
+ username=self.username,
+ qualified=True
+ )
+ },
+ },
+ }
+
+ if self.bio:
+ user.update({"summary": self.bio})
+ if self.url:
+ user.update({"url": self.url})
+
+ return user
+
class Client(Base):
"""
Model representing a client - Used for API Auth
@@ -197,7 +240,6 @@ class NonceTimestamp(Base):
nonce = Column(Unicode, nullable=False, primary_key=True)
timestamp = Column(DateTime, nullable=False, primary_key=True)
-
class MediaEntry(Base, MediaEntryMixin):
"""
TODO: Consider fetching the media_files using join
@@ -260,6 +302,8 @@ class MediaEntry(Base, MediaEntryMixin):
cascade="all, delete-orphan"
)
collections = association_proxy("collections_helper", "in_collection")
+ media_metadata = Column(MutationDict.as_mutable(JSONEncoded),
+ default=MutationDict())
## TODO
# fail_error
@@ -382,6 +426,80 @@ class MediaEntry(Base, MediaEntryMixin):
# pass through commit=False/True in kwargs
super(MediaEntry, self).delete(**kwargs)
+ @property
+ def objectType(self):
+ """ Converts media_type to pump-like type - don't use internally """
+ return self.media_type.split(".")[-1]
+
+ def serialize(self, request, show_comments=True):
+ """ Unserialize MediaEntry to object """
+ author = self.get_uploader
+ context = {
+ "id": self.id,
+ "author": author.serialize(request),
+ "objectType": self.objectType,
+ "url": self.url_for_self(request.urlgen),
+ "image": {
+ "url": request.host_url + self.thumb_url[1:],
+ },
+ "fullImage":{
+ "url": request.host_url + self.original_url[1:],
+ },
+ "published": self.created.isoformat(),
+ "updated": self.created.isoformat(),
+ "pump_io": {
+ "shared": False,
+ },
+ "links": {
+ "self": {
+ "href": request.urlgen(
+ "mediagoblin.federation.object",
+ objectType=self.objectType,
+ id=self.id,
+ qualified=True
+ ),
+ },
+
+ }
+ }
+
+ if self.title:
+ context["displayName"] = self.title
+
+ if self.description:
+ context["content"] = self.description
+
+ if self.license:
+ context["license"] = self.license
+
+ if show_comments:
+ comments = [comment.serialize(request) for comment in self.get_comments()]
+ total = len(comments)
+ context["replies"] = {
+ "totalItems": total,
+ "items": comments,
+ "url": request.urlgen(
+ "mediagoblin.federation.object.comments",
+ objectType=self.objectType,
+ id=self.id,
+ qualified=True
+ ),
+ }
+
+ return context
+
+ def unserialize(self, data):
+ """ Takes API objects and unserializes on existing MediaEntry """
+ if "displayName" in data:
+ self.title = data["displayName"]
+
+ if "content" in data:
+ self.description = data["content"]
+
+ if "license" in data:
+ self.license = data["license"]
+
+ return True
class FileKeynames(Base):
"""
@@ -528,6 +646,47 @@ class MediaComment(Base, MediaCommentMixin):
lazy="dynamic",
cascade="all, delete-orphan"))
+ def serialize(self, request):
+ """ Unserialize to python dictionary for API """
+ media = MediaEntry.query.filter_by(id=self.media_entry).first()
+ author = self.get_author
+ context = {
+ "id": self.id,
+ "objectType": "comment",
+ "content": self.content,
+ "inReplyTo": media.serialize(request, show_comments=False),
+ "author": author.serialize(request)
+ }
+
+ return context
+
+ def unserialize(self, data):
+ """ Takes API objects and unserializes on existing comment """
+ # Do initial checks to verify the object is correct
+ required_attributes = ["content", "inReplyTo"]
+ for attr in required_attributes:
+ if attr not in data:
+ return False
+
+ # Validate inReplyTo has ID
+ if "id" not in data["inReplyTo"]:
+ return False
+
+ # Validate that the ID is correct
+ try:
+ media_id = int(data["inReplyTo"]["id"])
+ except ValueError:
+ return False
+
+ media = MediaEntry.query.filter_by(id=media_id).first()
+ if media is None:
+ return False
+
+ self.media_entry = media.id
+ self.content = data["content"]
+ return True
+
+
class Collection(Base, CollectionMixin):
"""An 'album' or 'set' of media by a user.
@@ -563,6 +722,14 @@ class Collection(Base, CollectionMixin):
return CollectionItem.query.filter_by(
collection=self.id).order_by(order_col)
+ def __repr__(self):
+ safe_title = self.title.encode('ascii', 'replace')
+ return '<{classname} #{id}: {title} by {creator}>'.format(
+ id=self.id,
+ classname=self.__class__.__name__,
+ creator=self.creator,
+ title=safe_title)
+
class CollectionItem(Base, CollectionItemMixin):
__tablename__ = "core__collection_items"
@@ -592,6 +759,13 @@ class CollectionItem(Base, CollectionItemMixin):
"""A dict like view on this object"""
return DictReadAttrProxy(self)
+ def __repr__(self):
+ return '<{classname} #{id}: Entry {entry} in {collection}>'.format(
+ id=self.id,
+ classname=self.__class__.__name__,
+ collection=self.collection,
+ entry=self.media_entry)
+
class ProcessingMetaData(Base):
__tablename__ = 'core__processing_metadata'
@@ -667,6 +841,14 @@ class Notification(Base):
subject=getattr(self, 'subject', None),
seen='unseen' if not self.seen else 'seen')
+ def __unicode__(self):
+ return u'<{klass} #{id}: {user}: {subject} ({seen})>'.format(
+ id=self.id,
+ klass=self.__class__.__name__,
+ user=self.user,
+ subject=getattr(self, 'subject', None),
+ seen='unseen' if not self.seen else 'seen')
+
class CommentNotification(Notification):
__tablename__ = 'core__comment_notifications'
@@ -871,13 +1053,13 @@ class PrivilegeUserAssociation(Base):
__tablename__ = 'core__privileges_users'
- privilege_id = Column(
- 'core__privilege_id',
+ user = Column(
+ "user",
Integer,
ForeignKey(User.id),
primary_key=True)
- user_id = Column(
- 'core__user_id',
+ privilege = Column(
+ "privilege",
Integer,
ForeignKey(Privilege.id),
primary_key=True)
diff --git a/mediagoblin/db/util.py b/mediagoblin/db/util.py
index aba9c59c..515fd6cd 100644
--- a/mediagoblin/db/util.py
+++ b/mediagoblin/db/util.py
@@ -76,11 +76,16 @@ def check_db_up_to_date():
dbdatas = gather_database_data(mgg.global_config.get('plugins', {}).keys())
for dbdata in dbdatas:
- migration_manager = dbdata.make_migration_manager(Session())
- if migration_manager.database_current_migration is None or \
- migration_manager.migrations_to_run():
- sys.exit("Your database is not up to date. Please run "
- "'gmg dbupdate' before starting MediaGoblin.")
+ session = Session()
+ try:
+ migration_manager = dbdata.make_migration_manager(session)
+ if migration_manager.database_current_migration is None or \
+ migration_manager.migrations_to_run():
+ sys.exit("Your database is not up to date. Please run "
+ "'gmg dbupdate' before starting MediaGoblin.")
+ finally:
+ Session.rollback()
+ Session.remove()
if __name__ == '__main__':