aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/db
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/db')
-rw-r--r--mediagoblin/db/__init__.py1
-rw-r--r--mediagoblin/db/base.py164
-rw-r--r--mediagoblin/db/migration_tools.py115
-rw-r--r--mediagoblin/db/migrations.py1471
-rw-r--r--mediagoblin/db/migrations/README57
-rw-r--r--mediagoblin/db/migrations/env.py64
-rw-r--r--mediagoblin/db/migrations/script.py.mako24
-rw-r--r--mediagoblin/db/migrations/versions/.gitkeep0
-rw-r--r--mediagoblin/db/migrations/versions/101510e3a713_removes_graveyard_items_from_.py60
-rw-r--r--mediagoblin/db/migrations/versions/228916769bd2_ensure_report_object_id_is_nullable.py33
-rw-r--r--mediagoblin/db/migrations/versions/3145accb8fe3_remove_tombstone_comment_wrappers.py44
-rw-r--r--mediagoblin/db/migrations/versions/4066b9f8b84a_use_comment_link_ids_notifications.py103
-rw-r--r--mediagoblin/db/migrations/versions/52bf0ccbedc1_initial_revision.py422
-rw-r--r--mediagoblin/db/migrations/versions/8429e33fdf7_remove_the_graveyard_objects_from_.py62
-rw-r--r--mediagoblin/db/mixin.py352
-rw-r--r--mediagoblin/db/models.py1164
-rw-r--r--mediagoblin/db/open.py134
-rw-r--r--mediagoblin/db/util.py25
18 files changed, 3964 insertions, 331 deletions
diff --git a/mediagoblin/db/__init__.py b/mediagoblin/db/__init__.py
index 719b56e7..621845ba 100644
--- a/mediagoblin/db/__init__.py
+++ b/mediagoblin/db/__init__.py
@@ -13,4 +13,3 @@
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
diff --git a/mediagoblin/db/base.py b/mediagoblin/db/base.py
index c0cefdc2..c59b0ebf 100644
--- a/mediagoblin/db/base.py
+++ b/mediagoblin/db/base.py
@@ -13,16 +13,60 @@
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
+import six
+import copy
from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import scoped_session, sessionmaker, object_session
+from sqlalchemy import inspect
+
+from mediagoblin.tools.transition import DISABLE_GLOBALS
+
+if not DISABLE_GLOBALS:
+ from sqlalchemy.orm import scoped_session, sessionmaker
+ Session = scoped_session(sessionmaker())
+
+class FakeCursor(object):
-Session = scoped_session(sessionmaker())
+ def __init__ (self, cursor, mapper, filter=None):
+ self.cursor = cursor
+ self.mapper = mapper
+ self.filter = filter
+ def count(self):
+ return self.cursor.count()
+
+ def __copy__(self):
+ # Or whatever the function is named to make
+ # copy.copy happy?
+ return FakeCursor(copy.copy(self.cursor), self.mapper, self.filter)
+
+ def __iter__(self):
+ return six.moves.filter(self.filter, six.moves.map(self.mapper, self.cursor))
+
+ def __getitem__(self, key):
+ return self.mapper(self.cursor[key])
+
+ def slice(self, *args, **kwargs):
+ r = self.cursor.slice(*args, **kwargs)
+ return list(six.moves.filter(self.filter, six.moves.map(self.mapper, r)))
class GMGTableBase(object):
- query = Session.query_property()
+ # Deletion types
+ HARD_DELETE = "hard-deletion"
+ SOFT_DELETE = "soft-deletion"
+
+ deletion_mode = HARD_DELETE
+
+ @property
+ def _session(self):
+ return inspect(self).session
+
+ @property
+ def _app(self):
+ return self._session.bind.app
+
+ if not DISABLE_GLOBALS:
+ query = Session.query_property()
def get(self, key):
return getattr(self, key)
@@ -31,16 +75,116 @@ class GMGTableBase(object):
# The key *has* to exist on sql.
return getattr(self, key)
- def save(self):
- sess = object_session(self)
- if sess is None:
+ def save(self, commit=True):
+ sess = self._session
+ if sess is None and not DISABLE_GLOBALS:
sess = Session()
+ assert sess is not None, "Can't save, %r has a detached session" % self
sess.add(self)
- sess.commit()
+ if commit:
+ sess.commit()
+ else:
+ sess.flush()
+
+ def delete(self, commit=True, deletion=None):
+ """ Delete the object either using soft or hard deletion """
+ # Get the setting in the model args if none has been specified.
+ if deletion is None:
+ deletion = self.deletion_mode
+
+ # If the item is in any collection then it should be removed, this will
+ # cause issues if it isn't. See #5382.
+ # Import here to prevent cyclic imports.
+ from mediagoblin.db.models import CollectionItem, GenericModelReference, \
+ Report, Notification, Comment
+
+ # Some of the models don't have an "id" field which means they can't be
+ # used with GMR, these models won't be in collections because they
+ # can't be. We can skip all of this.
+ if hasattr(self, "id"):
+ # First find the GenericModelReference for this object
+ gmr = GenericModelReference.query.filter_by(
+ obj_pk=self.id,
+ model_type=self.__tablename__
+ ).first()
+
+ # If there is no gmr then we've got lucky, a GMR is a requirement of
+ # being in a collection.
+ if gmr is not None:
+ # Delete collections found
+ items = CollectionItem.query.filter_by(
+ object_id=gmr.id
+ )
+ items.delete()
+
+ # Delete notifications found
+ notifications = Notification.query.filter_by(
+ object_id=gmr.id
+ )
+ notifications.delete()
+
+ # Delete this as a comment
+ comments = Comment.query.filter_by(
+ comment_id=gmr.id
+ )
+ comments.delete()
+
+ # Set None on reports found
+ reports = Report.query.filter_by(
+ object_id=gmr.id
+ )
+ for report in reports:
+ report.object_id = None
+ report.save(commit=commit)
+
+ # Hand off to the correct deletion function.
+ if deletion == self.HARD_DELETE:
+ return self.hard_delete(commit=commit)
+ elif deletion == self.SOFT_DELETE:
+ return self.soft_delete(commit=commit)
+ else:
+ raise ValueError(
+ "Invalid deletion mode {mode!r}".format(
+ mode=deletion
+ )
+ )
+
+ def soft_delete(self, commit):
+ # Create the graveyard version of this model
+ # Importing this here due to cyclic imports
+ from mediagoblin.db.models import User, Graveyard, GenericModelReference
+
+ tombstone = Graveyard()
+ if getattr(self, "public_id", None) is not None:
+ tombstone.public_id = self.public_id
+
+ # This is a special case, we don't want to save any actor if the thing
+ # being soft deleted is a User model as this would create circular
+ # ForeignKeys
+ if not isinstance(self, User):
+ tombstone.actor = User.query.filter_by(
+ id=self.actor
+ ).first()
+ tombstone.object_type = self.object_type
+ tombstone.save(commit=False)
+
+ # There will be a lot of places where the GenericForeignKey will point
+ # to the model, we want to remap those to our tombstone.
+ gmrs = GenericModelReference.query.filter_by(
+ obj_pk=self.id,
+ model_type=self.__tablename__
+ ).update({
+ "obj_pk": tombstone.id,
+ "model_type": tombstone.__tablename__,
+ })
+
+
+ # Now we can go ahead and actually delete the model.
+ return self.hard_delete(commit=commit)
- def delete(self, commit=True):
+ def hard_delete(self, commit):
"""Delete the object and commit the change immediately by default"""
- sess = object_session(self)
+ sess = self._session
assert sess is not None, "Not going to delete detached %r" % self
sess.delete(self)
if commit:
diff --git a/mediagoblin/db/migration_tools.py b/mediagoblin/db/migration_tools.py
index e39070c3..f4273fa0 100644
--- a/mediagoblin/db/migration_tools.py
+++ b/mediagoblin/db/migration_tools.py
@@ -14,10 +14,24 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import unicode_literals
+
+import logging
+import os
+import pkg_resources
+
+from alembic import command
+from alembic.config import Config
+from alembic.migration import MigrationContext
+
+from mediagoblin.db.base import Base
from mediagoblin.tools.common import simple_printer
from sqlalchemy import Table
from sqlalchemy.sql import select
+log = logging.getLogger(__name__)
+
+
class TableAlreadyExists(Exception):
pass
@@ -30,7 +44,7 @@ class MigrationManager(object):
to the latest migrations, etc.
"""
- def __init__(self, name, models, foundations, migration_registry, session,
+ def __init__(self, name, models, migration_registry, session,
printer=simple_printer):
"""
Args:
@@ -39,9 +53,8 @@ class MigrationManager(object):
- migration_registry: where we should find all migrations to
run
"""
- self.name = unicode(name)
+ self.name = name
self.models = models
- self.foundations = foundations
self.session = session
self.migration_registry = migration_registry
self._sorted_migrations = None
@@ -112,14 +125,14 @@ class MigrationManager(object):
def migrations_to_run(self):
"""
Get a list of migrations to run still, if any.
-
+
Note that this will fail if there's no migration record for
this class!
"""
assert self.database_current_migration is not None
db_current_migration = self.database_current_migration
-
+
return [
(migration_number, migration_func)
for migration_number, migration_func in self.sorted_migrations
@@ -142,18 +155,6 @@ class MigrationManager(object):
self.session.bind,
tables=[model.__table__ for model in self.models])
- def populate_table_foundations(self):
- """
- Create the table foundations (default rows) as layed out in FOUNDATIONS
- in mediagoblin.db.models
- """
- for Model, rows in self.foundations.items():
- self.printer(u' + Laying foundations for %s table\n' %
- (Model.__name__))
- for parameters in rows:
- new_row = Model(**parameters)
- self.session.add(new_row)
-
def create_new_migration_record(self):
"""
Create a new migration record for this migration set
@@ -184,7 +185,7 @@ class MigrationManager(object):
migration_number, migration_func.func_name))
return u'migrated'
-
+
def name_for_printing(self):
if self.name == u'__main__':
return u"main mediagoblin tables"
@@ -218,7 +219,6 @@ class MigrationManager(object):
# auto-set at latest migration number
self.create_new_migration_record()
self.printer(u"done.\n")
- self.populate_table_foundations()
self.set_current_migration()
return u'inited'
@@ -230,7 +230,7 @@ class MigrationManager(object):
for migration_number, migration_func in migrations_to_run:
self.printer(
u' + Running migration %s, "%s"... ' % (
- migration_number, migration_func.func_name))
+ migration_number, migration_func.__name__))
migration_func(self.session)
self.set_current_migration(migration_number)
self.printer('done.\n')
@@ -263,6 +263,8 @@ class RegisterMigration(object):
assert migration_number > 0, "Migration number must be > 0!"
assert migration_number not in migration_registry, \
"Duplicate migration numbers detected! That's not allowed!"
+ assert migration_number <= 44, ('Alembic should be used for '
+ 'new migrations')
self.migration_number = migration_number
self.migration_registry = migration_registry
@@ -295,7 +297,7 @@ def replace_table_hack(db, old_table, replacement_table):
-tion, for example, dropping a boolean column in sqlite is impossible w/o
this method
- :param old_table A ref to the old table, gotten through
+ :param old_table A ref to the old table, gotten through
inspect_table
:param replacement_table A ref to the new table, gotten through
@@ -319,3 +321,74 @@ def replace_table_hack(db, old_table, replacement_table):
replacement_table.rename(old_table_name)
db.commit()
+
+def model_iteration_hack(db, query):
+ """
+ This will return either the query you gave if it's postgres or in the case
+ of sqlite it will return a list with all the results. This is because in
+ migrations it seems sqlite can't deal with concurrent quries so if you're
+ iterating over models and doing a commit inside the loop, you will run into
+ an exception which says you've closed the connection on your iteration
+ query. This fixes it.
+
+ NB: This loads all of the query reuslts into memeory, there isn't a good
+ way around this, we're assuming sqlite users have small databases.
+ """
+ # If it's SQLite just return all the objects
+ if db.bind.url.drivername == "sqlite":
+ return [obj for obj in db.execute(query)]
+
+ # Postgres return the query as it knows how to deal with it.
+ return db.execute(query)
+
+
+def populate_table_foundations(session, foundations, name,
+ printer=simple_printer):
+ """
+ Create the table foundations (default rows) as layed out in FOUNDATIONS
+ in mediagoblin.db.models
+ """
+ printer(u'Laying foundations for %s:\n' % name)
+ for Model, rows in foundations.items():
+ printer(u' + Laying foundations for %s table\n' %
+ (Model.__name__))
+ for parameters in rows:
+ new_row = Model(**parameters)
+ session.add(new_row)
+
+ session.commit()
+
+
+def build_alembic_config(global_config, cmd_options, session):
+ """
+ Build up a config that the alembic tooling can use based on our
+ configuration. Initialize the database session appropriately
+ as well.
+ """
+ root_dir = os.path.abspath(os.path.dirname(os.path.dirname(
+ os.path.dirname(__file__))))
+ alembic_cfg_path = os.path.join(root_dir, 'alembic.ini')
+ cfg = Config(alembic_cfg_path,
+ cmd_opts=cmd_options)
+ cfg.attributes["session"] = session
+
+ version_locations = [
+ pkg_resources.resource_filename(
+ "mediagoblin.db", os.path.join("migrations", "versions")),
+ ]
+
+ cfg.set_main_option("sqlalchemy.url", str(session.get_bind().url))
+
+ for plugin in global_config.get("plugins", []):
+ plugin_migrations = pkg_resources.resource_filename(
+ plugin, "migrations")
+ is_migrations_dir = (os.path.exists(plugin_migrations) and
+ os.path.isdir(plugin_migrations))
+ if is_migrations_dir:
+ version_locations.append(plugin_migrations)
+
+ cfg.set_main_option(
+ "version_locations",
+ " ".join(version_locations))
+
+ return cfg
diff --git a/mediagoblin/db/migrations.py b/mediagoblin/db/migrations.py
index 8e0b5096..55d64294 100644
--- a/mediagoblin/db/migrations.py
+++ b/mediagoblin/db/migrations.py
@@ -14,25 +14,43 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import print_function
+
import datetime
import uuid
+import six
+
+try:
+ import migrate
+except ImportError:
+ # Apparently sqlalchemy-migrate is not installed, so we assume
+ # we must not need it
+ # TODO: Better error handling here, or require sqlalchemy-migrate
+ print("sqlalchemy-migrate not found... assuming we don't need it")
+ print("I hope you aren't running the legacy migrations!")
+
+import pytz
+import dateutil.tz
from sqlalchemy import (MetaData, Table, Column, Boolean, SmallInteger,
Integer, Unicode, UnicodeText, DateTime,
- ForeignKey, Date)
+ ForeignKey, Date, Index)
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import and_
-from migrate.changeset.constraint import UniqueConstraint
-
+from sqlalchemy.schema import UniqueConstraint
+from mediagoblin import oauth
+from mediagoblin.tools import crypto
from mediagoblin.db.extratypes import JSONEncoded, MutationDict
from mediagoblin.db.migration_tools import (
- RegisterMigration, inspect_table, replace_table_hack)
-from mediagoblin.db.models import (MediaEntry, Collection, MediaComment, User,
- Privilege)
+ RegisterMigration, inspect_table, replace_table_hack, model_iteration_hack)
+from mediagoblin.db.models import (MediaEntry, Collection, Comment, User,
+ Privilege, Generator, LocalUser, Location,
+ Client, RequestToken, AccessToken)
from mediagoblin.db.extratypes import JSONEncoded, MutationDict
+
MIGRATIONS = {}
@@ -249,7 +267,7 @@ def mediaentry_new_slug_era(db):
for row in db.execute(media_table.select()):
# no slug, try setting to an id
if not row.slug:
- append_garbage_till_unique(row, unicode(row.id))
+ append_garbage_till_unique(row, six.text_type(row.id))
# has "=" or ":" in it... we're getting rid of those
elif u"=" in row.slug or u":" in row.slug:
append_garbage_till_unique(
@@ -278,7 +296,7 @@ def unique_collections_slug(db):
existing_slugs[row.creator].append(row.slug)
for row_id in slugs_to_change:
- new_slug = unicode(uuid.uuid4())
+ new_slug = six.text_type(uuid.uuid4())
db.execute(collection_table.update().
where(collection_table.c.id == row_id).
values(slug=new_slug))
@@ -343,7 +361,7 @@ class CommentNotification_v0(Notification_v0):
__tablename__ = 'core__comment_notifications'
id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
- subject_id = Column(Integer, ForeignKey(MediaComment.id))
+ subject_id = Column(Integer, ForeignKey(Comment.id))
class ProcessingNotification_v0(Notification_v0):
@@ -466,7 +484,6 @@ def create_oauth1_tables(db):
db.commit()
-
@RegisterMigration(15, MIGRATIONS)
def wants_notifications(db):
"""Add a wants_notifications field to User model"""
@@ -533,7 +550,7 @@ class CommentReport_v0(ReportBase_v0):
id = Column('id',Integer, ForeignKey('core__reports.id'),
primary_key=True)
- comment_id = Column(Integer, ForeignKey(MediaComment.id), nullable=True)
+ comment_id = Column(Integer, ForeignKey(Comment.id), nullable=True)
class MediaReport_v0(ReportBase_v0):
@@ -579,7 +596,6 @@ PRIVILEGE_FOUNDATIONS_v0 = [{'privilege_name':u'admin'},
{'privilege_name':u'commenter'},
{'privilege_name':u'active'}]
-
# vR1 stands for "version Rename 1". This only exists because we need
# to deal with dropping some booleans and it's otherwise impossible
# with sqlite.
@@ -660,8 +676,8 @@ def create_moderation_tables(db):
# admin, an active user or an inactive user ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
for admin_user in admin_users_ids:
admin_user_id = admin_user['id']
- for privilege_id in [admin_privilege_id, uploader_privilege_id,
- reporter_privilege_id, commenter_privilege_id,
+ for privilege_id in [admin_privilege_id, uploader_privilege_id,
+ reporter_privilege_id, commenter_privilege_id,
active_privilege_id]:
db.execute(user_privilege_assoc.insert().values(
core__privilege_id=admin_user_id,
@@ -669,7 +685,7 @@ def create_moderation_tables(db):
for active_user in active_users_ids:
active_user_id = active_user['id']
- for privilege_id in [uploader_privilege_id, reporter_privilege_id,
+ for privilege_id in [uploader_privilege_id, reporter_privilege_id,
commenter_privilege_id, active_privilege_id]:
db.execute(user_privilege_assoc.insert().values(
core__privilege_id=active_user_id,
@@ -677,7 +693,7 @@ def create_moderation_tables(db):
for inactive_user in inactive_users_ids:
inactive_user_id = inactive_user['id']
- for privilege_id in [uploader_privilege_id, reporter_privilege_id,
+ for privilege_id in [uploader_privilege_id, reporter_privilege_id,
commenter_privilege_id]:
db.execute(user_privilege_assoc.insert().values(
core__privilege_id=inactive_user_id,
@@ -789,3 +805,1426 @@ def fix_privilege_user_association_table(db):
privilege_user_assoc.c.core__privilege_id.alter(name="user")
db.commit()
+
+
+@RegisterMigration(22, MIGRATIONS)
+def add_index_username_field(db):
+ """
+ This migration has been found to be doing the wrong thing. See
+ the documentation in migration 23 (revert_username_index) below
+ which undoes this for those databases that did run this migration.
+
+ Old description:
+ This indexes the User.username field which is frequently queried
+ for example a user logging in. This solves the issue #894
+ """
+ ## This code is left commented out *on purpose!*
+ ##
+ ## We do not normally allow commented out code like this in
+ ## MediaGoblin but this is a special case: since this migration has
+ ## been nullified but with great work to set things back below,
+ ## this is commented out for historical clarity.
+ #
+ # metadata = MetaData(bind=db.bind)
+ # user_table = inspect_table(metadata, "core__users")
+ #
+ # new_index = Index("ix_core__users_uploader", user_table.c.username)
+ # new_index.create()
+ #
+ # db.commit()
+ pass
+
+
+@RegisterMigration(23, MIGRATIONS)
+def revert_username_index(db):
+ """
+ Revert the stuff we did in migration 22 above.
+
+ There were a couple of problems with what we did:
+ - There was never a need for this migration! The unique
+ constraint had an implicit b-tree index, so it wasn't really
+ needed. (This is my (Chris Webber's) fault for suggesting it
+ needed to happen without knowing what's going on... my bad!)
+ - On top of that, databases created after the models.py was
+ changed weren't the same as those that had been run through
+ migration 22 above.
+
+ As such, we're setting things back to the way they were before,
+ but as it turns out, that's tricky to do!
+ """
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, "core__users")
+ indexes = dict(
+ [(index.name, index) for index in user_table.indexes])
+
+ # index from unnecessary migration
+ users_uploader_index = indexes.get(u'ix_core__users_uploader')
+ # index created from models.py after (unique=True, index=True)
+ # was set in models.py
+ users_username_index = indexes.get(u'ix_core__users_username')
+
+ if users_uploader_index is None and users_username_index is None:
+ # We don't need to do anything.
+ # The database isn't in a state where it needs fixing
+ #
+ # (ie, either went through the previous borked migration or
+ # was initialized with a models.py where core__users was both
+ # unique=True and index=True)
+ return
+
+ if db.bind.url.drivername == 'sqlite':
+ # Again, sqlite has problems. So this is tricky.
+
+ # Yes, this is correct to use User_vR1! Nothing has changed
+ # between the *correct* version of this table and migration 18.
+ User_vR1.__table__.create(db.bind)
+ db.commit()
+ new_user_table = inspect_table(metadata, 'rename__users')
+ replace_table_hack(db, user_table, new_user_table)
+
+ else:
+ # If the db is not run using SQLite, we don't need to do crazy
+ # table copying.
+
+ # Remove whichever of the not-used indexes are in place
+ if users_uploader_index is not None:
+ users_uploader_index.drop()
+ if users_username_index is not None:
+ users_username_index.drop()
+
+ # Given we're removing indexes then adding a unique constraint
+ # which *we know might fail*, thus probably rolling back the
+ # session, let's commit here.
+ db.commit()
+
+ try:
+ # Add the unique constraint
+ constraint = UniqueConstraint(
+ 'username', table=user_table)
+ constraint.create()
+ except ProgrammingError:
+ # constraint already exists, no need to add
+ db.rollback()
+
+ db.commit()
+
+class Generator_R0(declarative_base()):
+ __tablename__ = "core__generators"
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode, nullable=False)
+ published = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ object_type = Column(Unicode, nullable=False)
+
+class ActivityIntermediator_R0(declarative_base()):
+ __tablename__ = "core__activity_intermediators"
+ id = Column(Integer, primary_key=True)
+ type = Column(Unicode, nullable=False)
+
+ # These are needed for migration 29
+ TABLENAMES = {
+ "user": "core__users",
+ "media": "core__media_entries",
+ "comment": "core__media_comments",
+ "collection": "core__collections",
+ }
+
+class Activity_R0(declarative_base()):
+ __tablename__ = "core__activities"
+ id = Column(Integer, primary_key=True)
+ actor = Column(Integer, ForeignKey(User.id), nullable=False)
+ published = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ verb = Column(Unicode, nullable=False)
+ content = Column(Unicode, nullable=True)
+ title = Column(Unicode, nullable=True)
+ generator = Column(Integer, ForeignKey(Generator_R0.id), nullable=True)
+ object = Column(Integer,
+ ForeignKey(ActivityIntermediator_R0.id),
+ nullable=False)
+ target = Column(Integer,
+ ForeignKey(ActivityIntermediator_R0.id),
+ nullable=True)
+
+
+@RegisterMigration(24, MIGRATIONS)
+def activity_migration(db):
+ """
+ Creates everything to create activities in GMG
+ - Adds Activity, ActivityIntermediator and Generator table
+ - Creates GMG service generator for activities produced by the server
+ - Adds the activity_as_object and activity_as_target to objects/targets
+ - Retroactively adds activities for what we can acurately work out
+ """
+ # Set constants we'll use later
+ FOREIGN_KEY = "core__activity_intermediators.id"
+ ACTIVITY_COLUMN = "activity"
+
+ # Create the new tables.
+ ActivityIntermediator_R0.__table__.create(db.bind)
+ Generator_R0.__table__.create(db.bind)
+ Activity_R0.__table__.create(db.bind)
+ db.commit()
+
+ # Initiate the tables we want to use later
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, "core__users")
+ activity_table = inspect_table(metadata, "core__activities")
+ generator_table = inspect_table(metadata, "core__generators")
+ collection_table = inspect_table(metadata, "core__collections")
+ media_entry_table = inspect_table(metadata, "core__media_entries")
+ media_comments_table = inspect_table(metadata, "core__media_comments")
+ ai_table = inspect_table(metadata, "core__activity_intermediators")
+
+
+ # Create the foundations for Generator
+ db.execute(generator_table.insert().values(
+ name="GNU Mediagoblin",
+ object_type="service",
+ published=datetime.datetime.now(),
+ updated=datetime.datetime.now()
+ ))
+ db.commit()
+
+ # Get the ID of that generator
+ gmg_generator = db.execute(generator_table.select(
+ generator_table.c.name==u"GNU Mediagoblin")).first()
+
+
+ # Now we want to modify the tables which MAY have an activity at some point
+ media_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ media_col.create(media_entry_table)
+
+ user_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ user_col.create(user_table)
+
+ comments_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ comments_col.create(media_comments_table)
+
+ collection_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ collection_col.create(collection_table)
+ db.commit()
+
+
+ # Now we want to retroactively add what activities we can
+ # first we'll add activities when people uploaded media.
+ # these can't have content as it's not fesible to get the
+ # correct content strings.
+ for media in db.execute(media_entry_table.select()):
+ # Now we want to create the intermedaitory
+ db_ai = db.execute(ai_table.insert().values(
+ type="media",
+ ))
+ db_ai = db.execute(ai_table.select(
+ ai_table.c.id==db_ai.inserted_primary_key[0]
+ )).first()
+
+ # Add the activity
+ activity = {
+ "verb": "create",
+ "actor": media.uploader,
+ "published": media.created,
+ "updated": media.created,
+ "generator": gmg_generator.id,
+ "object": db_ai.id
+ }
+ db.execute(activity_table.insert().values(**activity))
+
+ # Add the AI to the media.
+ db.execute(media_entry_table.update().values(
+ activity=db_ai.id
+ ).where(media_entry_table.c.id==media.id))
+
+ # Now we want to add all the comments people made
+ for comment in db.execute(media_comments_table.select()):
+ # Get the MediaEntry for the comment
+ media_entry = db.execute(
+ media_entry_table.select(
+ media_entry_table.c.id==comment.media_entry
+ )).first()
+
+ # Create an AI for target
+ db_ai_media = db.execute(ai_table.select(
+ ai_table.c.id==media_entry.activity
+ )).first().id
+
+ db.execute(
+ media_comments_table.update().values(
+ activity=db_ai_media
+ ).where(media_comments_table.c.id==media_entry.id))
+
+ # Now create the AI for the comment
+ db_ai_comment = db.execute(ai_table.insert().values(
+ type="comment"
+ )).inserted_primary_key[0]
+
+ activity = {
+ "verb": "comment",
+ "actor": comment.author,
+ "published": comment.created,
+ "updated": comment.created,
+ "generator": gmg_generator.id,
+ "object": db_ai_comment,
+ "target": db_ai_media,
+ }
+
+ # Now add the comment object
+ db.execute(activity_table.insert().values(**activity))
+
+ # Now add activity to comment
+ db.execute(media_comments_table.update().values(
+ activity=db_ai_comment
+ ).where(media_comments_table.c.id==comment.id))
+
+ # Create 'create' activities for all collections
+ for collection in db.execute(collection_table.select()):
+ # create AI
+ db_ai = db.execute(ai_table.insert().values(
+ type="collection"
+ ))
+ db_ai = db.execute(ai_table.select(
+ ai_table.c.id==db_ai.inserted_primary_key[0]
+ )).first()
+
+ # Now add link the collection to the AI
+ db.execute(collection_table.update().values(
+ activity=db_ai.id
+ ).where(collection_table.c.id==collection.id))
+
+ activity = {
+ "verb": "create",
+ "actor": collection.creator,
+ "published": collection.created,
+ "updated": collection.created,
+ "generator": gmg_generator.id,
+ "object": db_ai.id,
+ }
+
+ db.execute(activity_table.insert().values(**activity))
+
+ # Now add the activity to the collection
+ db.execute(collection_table.update().values(
+ activity=db_ai.id
+ ).where(collection_table.c.id==collection.id))
+
+ db.commit()
+
+class Location_V0(declarative_base()):
+ __tablename__ = "core__locations"
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode)
+ position = Column(MutationDict.as_mutable(JSONEncoded))
+ address = Column(MutationDict.as_mutable(JSONEncoded))
+
+@RegisterMigration(25, MIGRATIONS)
+def add_location_model(db):
+ """ Add location model """
+ metadata = MetaData(bind=db.bind)
+
+ # Create location table
+ Location_V0.__table__.create(db.bind)
+ db.commit()
+
+ # Inspect the tables we need
+ user = inspect_table(metadata, "core__users")
+ collections = inspect_table(metadata, "core__collections")
+ media_entry = inspect_table(metadata, "core__media_entries")
+ media_comments = inspect_table(metadata, "core__media_comments")
+
+ # Now add location support to the various models
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(user)
+
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(collections)
+
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(media_entry)
+
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(media_comments)
+
+ db.commit()
+
+@RegisterMigration(26, MIGRATIONS)
+def datetime_to_utc(db):
+ """ Convert datetime stamps to UTC """
+ # Get the server's timezone, this is what the database has stored
+ server_timezone = dateutil.tz.tzlocal()
+
+ ##
+ # Look up all the timestamps and convert them to UTC
+ ##
+ metadata = MetaData(bind=db.bind)
+
+ def dt_to_utc(dt):
+ # Add the current timezone
+ dt = dt.replace(tzinfo=server_timezone)
+
+ # Convert to UTC
+ return dt.astimezone(pytz.UTC)
+
+ # Convert the User model
+ user_table = inspect_table(metadata, "core__users")
+ for user in db.execute(user_table.select()):
+ db.execute(user_table.update().values(
+ created=dt_to_utc(user.created)
+ ).where(user_table.c.id==user.id))
+
+ # Convert Client
+ client_table = inspect_table(metadata, "core__clients")
+ for client in db.execute(client_table.select()):
+ db.execute(client_table.update().values(
+ created=dt_to_utc(client.created),
+ updated=dt_to_utc(client.updated)
+ ).where(client_table.c.id==client.id))
+
+ # Convert RequestToken
+ rt_table = inspect_table(metadata, "core__request_tokens")
+ for request_token in db.execute(rt_table.select()):
+ db.execute(rt_table.update().values(
+ created=dt_to_utc(request_token.created),
+ updated=dt_to_utc(request_token.updated)
+ ).where(rt_table.c.token==request_token.token))
+
+ # Convert AccessToken
+ at_table = inspect_table(metadata, "core__access_tokens")
+ for access_token in db.execute(at_table.select()):
+ db.execute(at_table.update().values(
+ created=dt_to_utc(access_token.created),
+ updated=dt_to_utc(access_token.updated)
+ ).where(at_table.c.token==access_token.token))
+
+ # Convert MediaEntry
+ media_table = inspect_table(metadata, "core__media_entries")
+ for media in db.execute(media_table.select()):
+ db.execute(media_table.update().values(
+ created=dt_to_utc(media.created)
+ ).where(media_table.c.id==media.id))
+
+ # Convert Media Attachment File
+ media_attachment_table = inspect_table(metadata, "core__attachment_files")
+ for ma in db.execute(media_attachment_table.select()):
+ db.execute(media_attachment_table.update().values(
+ created=dt_to_utc(ma.created)
+ ).where(media_attachment_table.c.id==ma.id))
+
+ # Convert MediaComment
+ comment_table = inspect_table(metadata, "core__media_comments")
+ for comment in db.execute(comment_table.select()):
+ db.execute(comment_table.update().values(
+ created=dt_to_utc(comment.created)
+ ).where(comment_table.c.id==comment.id))
+
+ # Convert Collection
+ collection_table = inspect_table(metadata, "core__collections")
+ for collection in db.execute(collection_table.select()):
+ db.execute(collection_table.update().values(
+ created=dt_to_utc(collection.created)
+ ).where(collection_table.c.id==collection.id))
+
+ # Convert Collection Item
+ collection_item_table = inspect_table(metadata, "core__collection_items")
+ for ci in db.execute(collection_item_table.select()):
+ db.execute(collection_item_table.update().values(
+ added=dt_to_utc(ci.added)
+ ).where(collection_item_table.c.id==ci.id))
+
+ # Convert Comment subscription
+ comment_sub = inspect_table(metadata, "core__comment_subscriptions")
+ for sub in db.execute(comment_sub.select()):
+ db.execute(comment_sub.update().values(
+ created=dt_to_utc(sub.created)
+ ).where(comment_sub.c.id==sub.id))
+
+ # Convert Notification
+ notification_table = inspect_table(metadata, "core__notifications")
+ for notification in db.execute(notification_table.select()):
+ db.execute(notification_table.update().values(
+ created=dt_to_utc(notification.created)
+ ).where(notification_table.c.id==notification.id))
+
+ # Convert ReportBase
+ reportbase_table = inspect_table(metadata, "core__reports")
+ for report in db.execute(reportbase_table.select()):
+ db.execute(reportbase_table.update().values(
+ created=dt_to_utc(report.created)
+ ).where(reportbase_table.c.id==report.id))
+
+ # Convert Generator
+ generator_table = inspect_table(metadata, "core__generators")
+ for generator in db.execute(generator_table.select()):
+ db.execute(generator_table.update().values(
+ published=dt_to_utc(generator.published),
+ updated=dt_to_utc(generator.updated)
+ ).where(generator_table.c.id==generator.id))
+
+ # Convert Activity
+ activity_table = inspect_table(metadata, "core__activities")
+ for activity in db.execute(activity_table.select()):
+ db.execute(activity_table.update().values(
+ published=dt_to_utc(activity.published),
+ updated=dt_to_utc(activity.updated)
+ ).where(activity_table.c.id==activity.id))
+
+ # Commit this to the database
+ db.commit()
+
+##
+# Migrations to handle migrating from activity specific foreign key to the
+# new GenericForeignKey implementations. They have been split up to improve
+# readability and minimise errors
+##
+
+class GenericModelReference_V0(declarative_base()):
+ __tablename__ = "core__generic_model_reference"
+
+ id = Column(Integer, primary_key=True)
+ obj_pk = Column(Integer, nullable=False)
+ model_type = Column(Unicode, nullable=False)
+
+@RegisterMigration(27, MIGRATIONS)
+def create_generic_model_reference(db):
+ """ Creates the Generic Model Reference table """
+ GenericModelReference_V0.__table__.create(db.bind)
+ db.commit()
+
+@RegisterMigration(28, MIGRATIONS)
+def add_foreign_key_fields(db):
+ """
+ Add the fields for GenericForeignKey to the model under temporary name,
+ this is so that later a data migration can occur. They will be renamed to
+ the origional names.
+ """
+ metadata = MetaData(bind=db.bind)
+ activity_table = inspect_table(metadata, "core__activities")
+
+ # Create column and add to model.
+ object_column = Column("temp_object", Integer, ForeignKey(GenericModelReference_V0.id))
+ object_column.create(activity_table)
+
+ target_column = Column("temp_target", Integer, ForeignKey(GenericModelReference_V0.id))
+ target_column.create(activity_table)
+
+ # Commit this to the database
+ db.commit()
+
+@RegisterMigration(29, MIGRATIONS)
+def migrate_data_foreign_keys(db):
+ """
+ This will migrate the data from the old object and target attributes which
+ use the old ActivityIntermediator to the new temparay fields which use the
+ new GenericForeignKey.
+ """
+
+ metadata = MetaData(bind=db.bind)
+ activity_table = inspect_table(metadata, "core__activities")
+ ai_table = inspect_table(metadata, "core__activity_intermediators")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # Iterate through all activities doing the migration per activity.
+ for activity in model_iteration_hack(db, activity_table.select()):
+ # First do the "Activity.object" migration to "Activity.temp_object"
+ # I need to get the object from the Activity, I can't use the old
+ # Activity.get_object as we're in a migration.
+ object_ai = db.execute(ai_table.select(
+ ai_table.c.id==activity.object
+ )).first()
+
+ object_ai_type = ActivityIntermediator_R0.TABLENAMES[object_ai.type]
+ object_ai_table = inspect_table(metadata, object_ai_type)
+
+ activity_object = db.execute(object_ai_table.select(
+ object_ai_table.c.activity==object_ai.id
+ )).first()
+
+ # If the object the activity is referecing doesn't revolve, then we
+ # should skip it, it should be deleted when the AI table is deleted.
+ if activity_object is None:
+ continue
+
+ # now we need to create the GenericModelReference
+ object_gmr = db.execute(gmr_table.insert().values(
+ obj_pk=activity_object.id,
+ model_type=object_ai_type
+ ))
+
+ # Now set the ID of the GenericModelReference in the GenericForignKey
+ db.execute(activity_table.update().values(
+ temp_object=object_gmr.inserted_primary_key[0]
+ ))
+
+ # Now do same process for "Activity.target" to "Activity.temp_target"
+ # not all Activities have a target so if it doesn't just skip the rest
+ # of this.
+ if activity.target is None:
+ continue
+
+ # Now get the target for the activity.
+ target_ai = db.execute(ai_table.select(
+ ai_table.c.id==activity.target
+ )).first()
+
+ target_ai_type = ActivityIntermediator_R0.TABLENAMES[target_ai.type]
+ target_ai_table = inspect_table(metadata, target_ai_type)
+
+ activity_target = db.execute(target_ai_table.select(
+ target_ai_table.c.activity==target_ai.id
+ )).first()
+
+ # It's quite possible that the target, alike the object could also have
+ # been deleted, if so we should just skip it
+ if activity_target is None:
+ continue
+
+ # We now want to create the new target GenericModelReference
+ target_gmr = db.execute(gmr_table.insert().values(
+ obj_pk=activity_target.id,
+ model_type=target_ai_type
+ ))
+
+ # Now set the ID of the GenericModelReference in the GenericForignKey
+ db.execute(activity_table.update().values(
+ temp_object=target_gmr.inserted_primary_key[0]
+ ))
+
+ # Commit to the database. We're doing it here rather than outside the
+ # loop because if the server has a lot of data this can cause problems
+ db.commit()
+
+@RegisterMigration(30, MIGRATIONS)
+def rename_and_remove_object_and_target(db):
+ """
+ Renames the new Activity.object and Activity.target fields and removes the
+ old ones.
+ """
+ metadata = MetaData(bind=db.bind)
+ activity_table = inspect_table(metadata, "core__activities")
+
+ # Firstly lets remove the old fields.
+ old_object_column = activity_table.columns["object"]
+ old_target_column = activity_table.columns["target"]
+
+ # Drop the tables.
+ old_object_column.drop()
+ old_target_column.drop()
+
+ # Now get the new columns.
+ new_object_column = activity_table.columns["temp_object"]
+ new_target_column = activity_table.columns["temp_target"]
+
+ # rename them to the old names.
+ new_object_column.alter(name="object_id")
+ new_target_column.alter(name="target_id")
+
+ # Commit the changes to the database.
+ db.commit()
+
+@RegisterMigration(31, MIGRATIONS)
+def remove_activityintermediator(db):
+ """
+ This removes the old specific ActivityIntermediator model which has been
+ superseeded by the GenericForeignKey field.
+ """
+ metadata = MetaData(bind=db.bind)
+
+ # Remove the columns which reference the AI
+ collection_table = inspect_table(metadata, "core__collections")
+ collection_ai_column = collection_table.columns["activity"]
+ collection_ai_column.drop()
+
+ media_entry_table = inspect_table(metadata, "core__media_entries")
+ media_entry_ai_column = media_entry_table.columns["activity"]
+ media_entry_ai_column.drop()
+
+ comments_table = inspect_table(metadata, "core__media_comments")
+ comments_ai_column = comments_table.columns["activity"]
+ comments_ai_column.drop()
+
+ user_table = inspect_table(metadata, "core__users")
+ user_ai_column = user_table.columns["activity"]
+ user_ai_column.drop()
+
+ # Drop the table
+ ai_table = inspect_table(metadata, "core__activity_intermediators")
+ ai_table.drop()
+
+ # Commit the changes
+ db.commit()
+
+##
+# Migrations for converting the User model into a Local and Remote User
+# setup.
+##
+
+class LocalUser_V0(declarative_base()):
+ __tablename__ = "core__local_users"
+
+ id = Column(Integer, ForeignKey(User.id), primary_key=True)
+ username = Column(Unicode, nullable=False, unique=True)
+ email = Column(Unicode, nullable=False)
+ pw_hash = Column(Unicode)
+
+ wants_comment_notification = Column(Boolean, default=True)
+ wants_notifications = Column(Boolean, default=True)
+ license_preference = Column(Unicode)
+ uploaded = Column(Integer, default=0)
+ upload_limit = Column(Integer)
+
+class RemoteUser_V0(declarative_base()):
+ __tablename__ = "core__remote_users"
+
+ id = Column(Integer, ForeignKey(User.id), primary_key=True)
+ webfinger = Column(Unicode, unique=True)
+
+@RegisterMigration(32, MIGRATIONS)
+def federation_user_create_tables(db):
+ """
+ Create all the tables
+ """
+ # Create tables needed
+ LocalUser_V0.__table__.create(db.bind)
+ RemoteUser_V0.__table__.create(db.bind)
+ db.commit()
+
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, "core__users")
+
+ # Create the fields
+ updated_column = Column(
+ "updated",
+ DateTime,
+ default=datetime.datetime.utcnow
+ )
+ updated_column.create(user_table)
+
+ type_column = Column(
+ "type",
+ Unicode
+ )
+ type_column.create(user_table)
+
+ name_column = Column(
+ "name",
+ Unicode
+ )
+ name_column.create(user_table)
+
+ db.commit()
+
+@RegisterMigration(33, MIGRATIONS)
+def federation_user_migrate_data(db):
+ """
+ Migrate the data over to the new user models
+ """
+ metadata = MetaData(bind=db.bind)
+
+ user_table = inspect_table(metadata, "core__users")
+ local_user_table = inspect_table(metadata, "core__local_users")
+
+ for user in model_iteration_hack(db, user_table.select()):
+ db.execute(local_user_table.insert().values(
+ id=user.id,
+ username=user.username,
+ email=user.email,
+ pw_hash=user.pw_hash,
+ wants_comment_notification=user.wants_comment_notification,
+ wants_notifications=user.wants_notifications,
+ license_preference=user.license_preference,
+ uploaded=user.uploaded,
+ upload_limit=user.upload_limit
+ ))
+
+ db.execute(user_table.update().where(user_table.c.id==user.id).values(
+ updated=user.created,
+ type=LocalUser.__mapper_args__["polymorphic_identity"]
+ ))
+
+ db.commit()
+
+class User_vR2(declarative_base()):
+ __tablename__ = "rename__users"
+
+ id = Column(Integer, primary_key=True)
+ url = Column(Unicode)
+ bio = Column(UnicodeText)
+ name = Column(Unicode)
+ type = Column(Unicode)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ location = Column(Integer, ForeignKey(Location.id))
+
+@RegisterMigration(34, MIGRATIONS)
+def federation_remove_fields(db):
+ """
+ This removes the fields from User model which aren't shared
+ """
+ metadata = MetaData(bind=db.bind)
+
+ user_table = inspect_table(metadata, "core__users")
+
+ # Remove the columns moved to LocalUser from User
+ username_column = user_table.columns["username"]
+ username_column.drop()
+
+ email_column = user_table.columns["email"]
+ email_column.drop()
+
+ pw_hash_column = user_table.columns["pw_hash"]
+ pw_hash_column.drop()
+
+ license_preference_column = user_table.columns["license_preference"]
+ license_preference_column.drop()
+
+ uploaded_column = user_table.columns["uploaded"]
+ uploaded_column.drop()
+
+ upload_limit_column = user_table.columns["upload_limit"]
+ upload_limit_column.drop()
+
+ # SQLLite can't drop booleans -.-
+ if db.bind.url.drivername == 'sqlite':
+ # Create the new hacky table
+ User_vR2.__table__.create(db.bind)
+ db.commit()
+ new_user_table = inspect_table(metadata, "rename__users")
+ replace_table_hack(db, user_table, new_user_table)
+ else:
+ wcn_column = user_table.columns["wants_comment_notification"]
+ wcn_column.drop()
+
+ wants_notifications_column = user_table.columns["wants_notifications"]
+ wants_notifications_column.drop()
+
+ db.commit()
+
+@RegisterMigration(35, MIGRATIONS)
+def federation_media_entry(db):
+ metadata = MetaData(bind=db.bind)
+ media_entry_table = inspect_table(metadata, "core__media_entries")
+
+ # Add new fields
+ public_id_column = Column(
+ "public_id",
+ Unicode,
+ unique=True,
+ nullable=True
+ )
+ public_id_column.create(
+ media_entry_table,
+ unique_name="media_public_id"
+ )
+
+ remote_column = Column(
+ "remote",
+ Boolean,
+ default=False
+ )
+ remote_column.create(media_entry_table)
+
+ updated_column = Column(
+ "updated",
+ DateTime,
+ default=datetime.datetime.utcnow,
+ )
+ updated_column.create(media_entry_table)
+ db.commit()
+
+ # Data migration
+ for entry in model_iteration_hack(db, media_entry_table.select()):
+ db.execute(media_entry_table.update().values(
+ updated=entry.created,
+ remote=False
+ ))
+
+ db.commit()
+
+@RegisterMigration(36, MIGRATIONS)
+def create_oauth1_dummies(db):
+ """
+ Creates a dummy client, request and access tokens.
+
+ This is used when invalid data is submitted but real clients and
+ access tokens. The use of dummy objects prevents timing attacks.
+ """
+ metadata = MetaData(bind=db.bind)
+ client_table = inspect_table(metadata, "core__clients")
+ request_token_table = inspect_table(metadata, "core__request_tokens")
+ access_token_table = inspect_table(metadata, "core__access_tokens")
+
+ # Whilst we don't rely on the secret key being unique or unknown to prevent
+ # unauthorized clients from using it to authenticate, we still as an extra
+ # layer of protection created a cryptographically secure key individual to
+ # each instance that should never be able to be known.
+ client_secret = crypto.random_string(50)
+ request_token_secret = crypto.random_string(50)
+ request_token_verifier = crypto.random_string(50)
+ access_token_secret = crypto.random_string(50)
+
+ # Dummy created/updated datetime object
+ epoc_datetime = datetime.datetime.fromtimestamp(0)
+
+ # Create the dummy Client
+ db.execute(client_table.insert().values(
+ id=oauth.DUMMY_CLIENT_ID,
+ secret=client_secret,
+ application_type="dummy",
+ created=epoc_datetime,
+ updated=epoc_datetime
+ ))
+
+ # Create the dummy RequestToken
+ db.execute(request_token_table.insert().values(
+ token=oauth.DUMMY_REQUEST_TOKEN,
+ secret=request_token_secret,
+ client=oauth.DUMMY_CLIENT_ID,
+ verifier=request_token_verifier,
+ created=epoc_datetime,
+ updated=epoc_datetime,
+ callback="oob"
+ ))
+
+ # Create the dummy AccessToken
+ db.execute(access_token_table.insert().values(
+ token=oauth.DUMMY_ACCESS_TOKEN,
+ secret=access_token_secret,
+ request_token=oauth.DUMMY_REQUEST_TOKEN,
+ created=epoc_datetime,
+ updated=epoc_datetime
+ ))
+
+ # Commit the changes
+ db.commit()
+
+@RegisterMigration(37, MIGRATIONS)
+def federation_collection_schema(db):
+ """ Converts the Collection and CollectionItem """
+ metadata = MetaData(bind=db.bind)
+ collection_table = inspect_table(metadata, "core__collections")
+ collection_items_table = inspect_table(metadata, "core__collection_items")
+ media_entry_table = inspect_table(metadata, "core__media_entries")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ ##
+ # Collection Table
+ ##
+
+ # Add the fields onto the Collection model, we need to set these as
+ # not null to avoid DB integreity errors. We will add the not null
+ # constraint later.
+ public_id_column = Column(
+ "public_id",
+ Unicode,
+ unique=True
+ )
+ public_id_column.create(
+ collection_table,
+ unique_name="collection_public_id")
+
+ updated_column = Column(
+ "updated",
+ DateTime,
+ default=datetime.datetime.utcnow
+ )
+ updated_column.create(collection_table)
+
+ type_column = Column(
+ "type",
+ Unicode,
+ )
+ type_column.create(collection_table)
+
+ db.commit()
+
+ # Iterate over the items and set the updated and type fields
+ for collection in db.execute(collection_table.select()):
+ db.execute(collection_table.update().where(
+ collection_table.c.id==collection.id
+ ).values(
+ updated=collection.created,
+ type="core-user-defined"
+ ))
+
+ db.commit()
+
+ # Add the not null constraint onto the fields
+ updated_column = collection_table.columns["updated"]
+ updated_column.alter(nullable=False)
+
+ type_column = collection_table.columns["type"]
+ type_column.alter(nullable=False)
+
+ db.commit()
+
+ # Rename the "items" to "num_items" as per the TODO
+ num_items_field = collection_table.columns["items"]
+ num_items_field.alter(name="num_items")
+ db.commit()
+
+ ##
+ # CollectionItem
+ ##
+ # Adding the object ID column, this again will have not null added later.
+ object_id = Column(
+ "object_id",
+ Integer,
+ ForeignKey(GenericModelReference_V0.id),
+ )
+ object_id.create(
+ collection_items_table,
+ )
+
+ db.commit()
+
+ # Iterate through and convert the Media reference to object_id
+ for item in db.execute(collection_items_table.select()):
+ # Check if there is a GMR for the MediaEntry
+ object_gmr = db.execute(gmr_table.select(
+ and_(
+ gmr_table.c.obj_pk == item.media_entry,
+ gmr_table.c.model_type == "core__media_entries"
+ )
+ )).first()
+
+ if object_gmr:
+ object_gmr = object_gmr[0]
+ else:
+ # Create a GenericModelReference
+ object_gmr = db.execute(gmr_table.insert().values(
+ obj_pk=item.media_entry,
+ model_type="core__media_entries"
+ )).inserted_primary_key[0]
+
+ # Now set the object_id column to the ID of the GMR
+ db.execute(collection_items_table.update().where(
+ collection_items_table.c.id==item.id
+ ).values(
+ object_id=object_gmr
+ ))
+
+ db.commit()
+
+ # Add not null constraint
+ object_id = collection_items_table.columns["object_id"]
+ object_id.alter(nullable=False)
+
+ db.commit()
+
+ # Now remove the old media_entry column
+ media_entry_column = collection_items_table.columns["media_entry"]
+ media_entry_column.drop()
+
+ db.commit()
+
+@RegisterMigration(38, MIGRATIONS)
+def federation_actor(db):
+ """ Renames refereces to the user to actor """
+ metadata = MetaData(bind=db.bind)
+
+ # RequestToken: user -> actor
+ request_token_table = inspect_table(metadata, "core__request_tokens")
+ rt_user_column = request_token_table.columns["user"]
+ rt_user_column.alter(name="actor")
+
+ # AccessToken: user -> actor
+ access_token_table = inspect_table(metadata, "core__access_tokens")
+ at_user_column = access_token_table.columns["user"]
+ at_user_column.alter(name="actor")
+
+ # MediaEntry: uploader -> actor
+ media_entry_table = inspect_table(metadata, "core__media_entries")
+ me_user_column = media_entry_table.columns["uploader"]
+ me_user_column.alter(name="actor")
+
+ # MediaComment: author -> actor
+ media_comment_table = inspect_table(metadata, "core__media_comments")
+ mc_user_column = media_comment_table.columns["author"]
+ mc_user_column.alter(name="actor")
+
+ # Collection: creator -> actor
+ collection_table = inspect_table(metadata, "core__collections")
+ mc_user_column = collection_table.columns["creator"]
+ mc_user_column.alter(name="actor")
+
+ # commit changes to db.
+ db.commit()
+
+class Graveyard_V0(declarative_base()):
+ """ Where models come to die """
+ __tablename__ = "core__graveyard"
+
+ id = Column(Integer, primary_key=True)
+ public_id = Column(Unicode, nullable=True, unique=True)
+
+ deleted = Column(DateTime, nullable=False)
+ object_type = Column(Unicode, nullable=False)
+
+ actor_id = Column(Integer, ForeignKey(GenericModelReference_V0.id))
+
+@RegisterMigration(39, MIGRATIONS)
+def federation_graveyard(db):
+ """ Introduces soft deletion to models
+
+ This adds a Graveyard model which is used to copy (soft-)deleted models to.
+ """
+ metadata = MetaData(bind=db.bind)
+
+ # Create the graveyard table
+ Graveyard_V0.__table__.create(db.bind)
+
+ # Commit changes to the db
+ db.commit()
+
+@RegisterMigration(40, MIGRATIONS)
+def add_public_id(db):
+ metadata = MetaData(bind=db.bind)
+
+ # Get the table
+ activity_table = inspect_table(metadata, "core__activities")
+ activity_public_id = Column(
+ "public_id",
+ Unicode,
+ unique=True,
+ nullable=True
+ )
+ activity_public_id.create(
+ activity_table,
+ unique_name="activity_public_id"
+ )
+
+ # Commit this.
+ db.commit()
+
+class Comment_V0(declarative_base()):
+ __tablename__ = "core__comment_links"
+
+ id = Column(Integer, primary_key=True)
+ target_id = Column(
+ Integer,
+ ForeignKey(GenericModelReference_V0.id),
+ nullable=False
+ )
+ comment_id = Column(
+ Integer,
+ ForeignKey(GenericModelReference_V0.id),
+ nullable=False
+ )
+ added = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
+
+@RegisterMigration(41, MIGRATIONS)
+def federation_comments(db):
+ """
+ This reworks the MediaComent to be a more generic Comment model.
+ """
+ metadata = MetaData(bind=db.bind)
+ textcomment_table = inspect_table(metadata, "core__media_comments")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # First of all add the public_id field to the TextComment table
+ comment_public_id_column = Column(
+ "public_id",
+ Unicode,
+ unique=True
+ )
+ comment_public_id_column.create(
+ textcomment_table,
+ unique_name="public_id_unique"
+ )
+
+ comment_updated_column = Column(
+ "updated",
+ DateTime,
+ )
+ comment_updated_column.create(textcomment_table)
+
+
+ # First create the Comment link table.
+ Comment_V0.__table__.create(db.bind)
+ db.commit()
+
+ # now look up the comment table
+ comment_table = inspect_table(metadata, "core__comment_links")
+
+ # Itierate over all the comments and add them to the link table.
+ for comment in db.execute(textcomment_table.select()):
+ # Check if there is a GMR to the comment.
+ comment_gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment.id,
+ gmr_table.c.model_type == "core__media_comments"
+ ))).first()
+
+ if comment_gmr:
+ comment_gmr = comment_gmr[0]
+ else:
+ comment_gmr = db.execute(gmr_table.insert().values(
+ obj_pk=comment.id,
+ model_type="core__media_comments"
+ )).inserted_primary_key[0]
+
+ # Get or create the GMR for the media entry
+ entry_gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment.media_entry,
+ gmr_table.c.model_type == "core__media_entries"
+ ))).first()
+
+ if entry_gmr:
+ entry_gmr = entry_gmr[0]
+ else:
+ entry_gmr = db.execute(gmr_table.insert().values(
+ obj_pk=comment.media_entry,
+ model_type="core__media_entries"
+ )).inserted_primary_key[0]
+
+ # Add the comment link.
+ db.execute(comment_table.insert().values(
+ target_id=entry_gmr,
+ comment_id=comment_gmr,
+ added=datetime.datetime.utcnow()
+ ))
+
+ # Add the data to the updated field
+ db.execute(textcomment_table.update().where(
+ textcomment_table.c.id == comment.id
+ ).values(
+ updated=comment.created
+ ))
+ db.commit()
+
+ # Add not null constraint
+ textcomment_update_column = textcomment_table.columns["updated"]
+ textcomment_update_column.alter(nullable=False)
+
+ # Remove the unused fields on the TextComment model
+ comment_media_entry_column = textcomment_table.columns["media_entry"]
+ comment_media_entry_column.drop()
+ db.commit()
+
+@RegisterMigration(42, MIGRATIONS)
+def consolidate_reports(db):
+ """ Consolidates the report tables into just one """
+ metadata = MetaData(bind=db.bind)
+
+ report_table = inspect_table(metadata, "core__reports")
+ comment_report_table = inspect_table(metadata, "core__reports_on_comments")
+ media_report_table = inspect_table(metadata, "core__reports_on_media")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # Add the GMR object field onto the base report table
+ report_object_id_column = Column(
+ "object_id",
+ Integer,
+ ForeignKey(GenericModelReference_V0.id),
+ nullable=True,
+ )
+ report_object_id_column.create(report_table)
+ db.commit()
+
+ # Iterate through the reports in the comment table and merge them in.
+ for comment_report in db.execute(comment_report_table.select()):
+ # If the comment is None it's been deleted, we should skip
+ if comment_report.comment_id is None:
+ continue
+
+ # Find a GMR for this if one exists.
+ crgmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment_report.comment_id,
+ gmr_table.c.model_type == "core__media_comments"
+ ))).first()
+
+ if crgmr:
+ crgmr = crgmr[0]
+ else:
+ crgmr = db.execute(gmr_table.insert().values(
+ obj_pk=comment_report.comment_id,
+ model_type="core__media_comments"
+ )).inserted_primary_key[0]
+
+ # Great now we can save this back onto the (base) report.
+ db.execute(report_table.update().where(
+ report_table.c.id == comment_report.id
+ ).values(
+ object_id=crgmr
+ ))
+
+ # Iterate through the Media Reports and do the save as above.
+ for media_report in db.execute(media_report_table.select()):
+ # If the media report is None then it's been deleted nd we should skip
+ if media_report.media_entry_id is None:
+ continue
+
+ # Find Mr. GMR :)
+ mrgmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == media_report.media_entry_id,
+ gmr_table.c.model_type == "core__media_entries"
+ ))).first()
+
+ if mrgmr:
+ mrgmr = mrgmr[0]
+ else:
+ mrgmr = db.execute(gmr_table.insert().values(
+ obj_pk=media_report.media_entry_id,
+ model_type="core__media_entries"
+ )).inserted_primary_key[0]
+
+ # Save back on to the base.
+ db.execute(report_table.update().where(
+ report_table.c.id == media_report.id
+ ).values(
+ object_id=mrgmr
+ ))
+
+ db.commit()
+
+ # Now we can remove the fields we don't need anymore
+ report_type = report_table.columns["type"]
+ report_type.drop()
+
+ # Drop both MediaReports and CommentTable.
+ comment_report_table.drop()
+ media_report_table.drop()
+
+ # Commit we're done.
+ db.commit()
+
+@RegisterMigration(43, MIGRATIONS)
+def consolidate_notification(db):
+ """ Consolidates the notification models into one """
+ metadata = MetaData(bind=db.bind)
+ notification_table = inspect_table(metadata, "core__notifications")
+ cn_table = inspect_table(metadata, "core__comment_notifications")
+ cp_table = inspect_table(metadata, "core__processing_notifications")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # Add fields needed
+ notification_object_id_column = Column(
+ "object_id",
+ Integer,
+ ForeignKey(GenericModelReference_V0.id)
+ )
+ notification_object_id_column.create(notification_table)
+ db.commit()
+
+ # Iterate over comments and move to notification base table.
+ for comment_notification in db.execute(cn_table.select()):
+ # Find the GMR.
+ cngmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment_notification.subject_id,
+ gmr_table.c.model_type == "core__media_comments"
+ ))).first()
+
+ if cngmr:
+ cngmr = cngmr[0]
+ else:
+ cngmr = db.execute(gmr_table.insert().values(
+ obj_pk=comment_notification.subject_id,
+ model_type="core__media_comments"
+ )).inserted_primary_key[0]
+
+ # Save back on notification
+ db.execute(notification_table.update().where(
+ notification_table.c.id == comment_notification.id
+ ).values(
+ object_id=cngmr
+ ))
+ db.commit()
+
+ # Do the same for processing notifications
+ for processing_notification in db.execute(cp_table.select()):
+ cpgmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == processing_notification.subject_id,
+ gmr_table.c.model_type == "core__processing_notifications"
+ ))).first()
+
+ if cpgmr:
+ cpgmr = cpgmr[0]
+ else:
+ cpgmr = db.execute(gmr_table.insert().values(
+ obj_pk=processing_notification.subject_id,
+ model_type="core__processing_notifications"
+ )).inserted_primary_key[0]
+
+ db.execute(notification_table.update().where(
+ notification_table.c.id == processing_notification.id
+ ).values(
+ object_id=cpgmr
+ ))
+ db.commit()
+
+ # Add the not null constraint
+ notification_object_id = notification_table.columns["object_id"]
+ notification_object_id.alter(nullable=False)
+
+ # Now drop the fields we don't need
+ notification_type_column = notification_table.columns["type"]
+ notification_type_column.drop()
+
+ # Drop the tables we no longer need
+ cp_table.drop()
+ cn_table.drop()
+
+ db.commit()
+
+@RegisterMigration(44, MIGRATIONS)
+def activity_cleanup(db):
+ """
+ This cleans up activities which are broken and have no graveyard object as
+ well as removing the not null constraint on Report.object_id as that can
+ be null when action has been taken to delete the reported content.
+
+ Some of this has been changed in previous migrations so we need to check
+ if there is anything to be done, there might not be. It was fixed as part
+ of the #5369 fix. Some past migrations could have broken on some people so
+ needed to be fixed then however for some they would have run fine.
+ """
+ metadata = MetaData(bind=db.bind)
+ report_table = inspect_table(metadata, "core__reports")
+ activity_table = inspect_table(metadata, "core__activities")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # Remove not null on Report.object_id
+ object_id_column = report_table.columns["object_id"]
+ if not object_id_column.nullable:
+ object_id_column.alter(nullable=False)
+ db.commit()
+
+ # Go through each activity and verify the object and if a target is
+ # specified both exist.
+ for activity in db.execute(activity_table.select()):
+ # Get the GMR
+ obj_gmr = db.execute(gmr_table.select().where(
+ gmr_table.c.id == activity.object_id,
+ )).first()
+
+ # Get the object the GMR points to, might be null.
+ obj_table = inspect_table(metadata, obj_gmr.model_type)
+ obj = db.execute(obj_table.select().where(
+ obj_table.c.id == obj_gmr.obj_pk
+ )).first()
+
+ if obj is None:
+ # Okay we need to delete the activity and move to the next
+ db.execute(activity_table.delete().where(
+ activity_table.c.id == activity.id
+ ))
+ continue
+
+ # If there is a target then check that too, if not that's fine
+ if activity.target_id == None:
+ continue
+
+ # Okay check the target is valid
+ target_gmr = db.execute(gmr_table.select().where(
+ gmr_table.c.id == activity.target_id
+ )).first()
+
+ target_table = inspect_table(metadata, target_gmr.model_type)
+ target = db.execute(target_table.select().where(
+ target_table.c.id == target_gmr.obj_pk
+ )).first()
+
+ # If it doesn't exist, delete the activity.
+ if target is None:
+ db.execute(activity_table.delete().where(
+ activity_table.c.id == activity.id
+ ))
diff --git a/mediagoblin/db/migrations/README b/mediagoblin/db/migrations/README
new file mode 100644
index 00000000..93d85eff
--- /dev/null
+++ b/mediagoblin/db/migrations/README
@@ -0,0 +1,57 @@
+Migration Guide
+---------------
+
+Alembic comes with a CLI called ``alembic``.
+
+Create a Migration
+^^^^^^^^^^^^^^^^^^
+
+Lets create our first migration::
+
+ $ alembic revision -m "add favourite_band field"
+ Generating
+ /your/gmg/path/mediagoblin/db/migrations/versions/1e3793de36a_add_favourite_band_field.py ... done
+
+By default, migration files have two methods: ``upgrade`` and ``downgrade``.
+Alembic will invoke these methods to apply the migrations to your current
+database.
+
+Now, we need to edit our newly created migration file
+``1e3793de36a_add_favourite_band_field.py`` to add a new column ``favourite_band``
+to ``core__users`` table::
+
+ def upgrade():
+ op.add_column('core__users', sa.Column('favourite_band', sa.Unicode(100)))
+
+
+ def downgrade():
+ op.drop_column('core__users', 'favourite_band')
+
+.. note::
+
+ Alembic can also generate `automatic migrations <http://alembic.readthedocs.org/en/latest/tutorial.html#auto-generating-migrations>`__.
+
+Then we can run ``gmg dbupdate`` to apply the new migration::
+
+ $ gmg dbupdate
+ INFO [alembic.migration] Context impl SQLiteImpl.
+ INFO [alembic.migration] Will assume non-transactional DDL.
+ INFO [alembic.migration] Running upgrade None -> 1e3793de36a, add favourite band field
+
+If you want to revert that migration, simply run::
+
+ $ alembic downgrade -1
+
+.. warning::
+
+ Currently, Alembic cannot do ``DROP COLUMN``, ``ALTER COLUMN`` etc.
+ operations in SQLite. Please see https://bitbucket.org/zzzeek/alembic/issue/21/column-renames-not-supported-on-sqlite
+ for detailed information.
+
+Glossary
+^^^^^^^^
+
+* ``alembic.ini``: The Alembic configuration file. The ``alembic`` CLI will
+ look that file everytime it invaked.
+* ``mediagoblin/db/migrations/versions/``: Alembic will add new migration files
+ to this directory.
diff --git a/mediagoblin/db/migrations/env.py b/mediagoblin/db/migrations/env.py
new file mode 100644
index 00000000..43b7b247
--- /dev/null
+++ b/mediagoblin/db/migrations/env.py
@@ -0,0 +1,64 @@
+from __future__ import with_statement
+from alembic import context
+from sqlalchemy import engine_from_config, pool
+from logging.config import fileConfig
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+from mediagoblin.db.models import Base
+target_metadata = Base.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(url=url, target_metadata=target_metadata)
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ connection = config.attributes["session"].get_bind()
+ context.configure(
+ connection=connection,
+ target_metadata=target_metadata
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
+
diff --git a/mediagoblin/db/migrations/script.py.mako b/mediagoblin/db/migrations/script.py.mako
new file mode 100644
index 00000000..43c09401
--- /dev/null
+++ b/mediagoblin/db/migrations/script.py.mako
@@ -0,0 +1,24 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+ ${downgrades if downgrades else "pass"}
diff --git a/mediagoblin/db/migrations/versions/.gitkeep b/mediagoblin/db/migrations/versions/.gitkeep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/.gitkeep
diff --git a/mediagoblin/db/migrations/versions/101510e3a713_removes_graveyard_items_from_.py b/mediagoblin/db/migrations/versions/101510e3a713_removes_graveyard_items_from_.py
new file mode 100644
index 00000000..723100c5
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/101510e3a713_removes_graveyard_items_from_.py
@@ -0,0 +1,60 @@
+"""#5382 Removes graveyard items from collections
+
+Revision ID: 101510e3a713
+Revises: 52bf0ccbedc1
+Create Date: 2016-01-12 10:46:26.486610
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '101510e3a713'
+down_revision = '52bf0ccbedc1'
+
+from alembic import op
+from sqlalchemy import MetaData
+from sqlalchemy.sql import and_
+from mediagoblin.db.migration_tools import inspect_table
+
+def upgrade():
+ """
+ The problem is deletions are occuring and as we expect the
+ GenericModelReference objects are being updated to point to the tombstone
+ object. The issue is that collections now contain deleted items, this
+ causes problems when it comes to rendering them for example.
+
+ This migration is to remove any Graveyard objects (tombstones) from any
+ Collection.
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+ collection_items_table = inspect_table(metadata, "core__collection_items")
+ graveyard_table = inspect_table(metadata, "core__graveyard")
+
+ res = list(db.execute(graveyard_table.select()))
+ for tombstone in res:
+ # Get GMR for tombstone
+ gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == tombstone.id,
+ gmr_table.c.model_type == "core__graveyard"
+ ))).first()
+
+ # If there is no GMR, we're all good as it's required to be in a
+ # collection
+ if gmr is None:
+ continue
+
+ # Delete all the CollectionItem objects for this GMR
+ db.execute(collection_items_table.delete().where(
+ collection_items_table.c.object_id == gmr.id
+ ))
+
+
+def downgrade():
+ """
+ Nothing to do here, the migration just deletes objects from collections.
+ There are no schema changes that have occured. This can be reverted without
+ any problems.
+ """
+ pass
diff --git a/mediagoblin/db/migrations/versions/228916769bd2_ensure_report_object_id_is_nullable.py b/mediagoblin/db/migrations/versions/228916769bd2_ensure_report_object_id_is_nullable.py
new file mode 100644
index 00000000..596b87de
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/228916769bd2_ensure_report_object_id_is_nullable.py
@@ -0,0 +1,33 @@
+"""ensure Report.object_id is nullable
+
+Revision ID: 228916769bd2
+Revises: 3145accb8fe3
+Create Date: 2016-02-29 18:54:37.295185
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '228916769bd2'
+down_revision = '3145accb8fe3'
+
+from alembic import op
+from sqlalchemy import MetaData
+from mediagoblin.db.migration_tools import inspect_table
+
+def upgrade():
+ """
+ This ensures that the Report.object_id field is nullable, it seems for a
+ short period of time it could have been NOT NULL but was fixed later.
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+ report_table = inspect_table(metadata, "core__reports")
+
+ # Check if the field has nullable on
+ object_id_field = report_table.columns["object_id"]
+ if object_id_field.nullable != True:
+ # We have to alter this.
+ object_id_field.alter(nullable=True)
+
+def downgrade():
+ pass
diff --git a/mediagoblin/db/migrations/versions/3145accb8fe3_remove_tombstone_comment_wrappers.py b/mediagoblin/db/migrations/versions/3145accb8fe3_remove_tombstone_comment_wrappers.py
new file mode 100644
index 00000000..1f336048
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/3145accb8fe3_remove_tombstone_comment_wrappers.py
@@ -0,0 +1,44 @@
+"""remove tombstone comment wrappers
+
+Revision ID: 3145accb8fe3
+Revises: 4066b9f8b84a
+Create Date: 2016-02-29 14:38:12.096859
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '3145accb8fe3'
+down_revision = '4066b9f8b84a'
+
+from alembic import op
+from sqlalchemy import MetaData, and_
+from mediagoblin.db.migration_tools import inspect_table
+
+def upgrade():
+ """
+ Removes comments which have been deleted and exist as a tombstone but still
+ have their Comment wrapper.
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+ comment_table = inspect_table(metadata, "core__comment_links")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # Get the Comment wrappers
+ comment_wrappers = list(db.execute(comment_table.select()))
+
+ for wrapper in comment_wrappers:
+ # Query for the graveyard GMR comment
+ gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.id == wrapper.comment_id,
+ gmr_table.c.model_type == "core__graveyard"
+ ))).first()
+
+ if gmr is not None:
+ # Okay delete this wrapper as it's to a deleted comment
+ db.execute(comment_table.delete().where(
+ comment_table.c.id == wrapper.id
+ ))
+
+def downgrade():
+ pass
diff --git a/mediagoblin/db/migrations/versions/4066b9f8b84a_use_comment_link_ids_notifications.py b/mediagoblin/db/migrations/versions/4066b9f8b84a_use_comment_link_ids_notifications.py
new file mode 100644
index 00000000..9dfef18d
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/4066b9f8b84a_use_comment_link_ids_notifications.py
@@ -0,0 +1,103 @@
+"""use_comment_link_ids_notifications
+
+Revision ID: 4066b9f8b84a
+Revises: 8429e33fdf7
+Create Date: 2016-02-29 11:46:13.511318
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '4066b9f8b84a'
+down_revision = '8429e33fdf7'
+
+from alembic import op
+from sqlalchemy import MetaData
+from sqlalchemy import and_
+from mediagoblin.db.migration_tools import inspect_table
+
+def upgrade():
+ """"
+ This replaces the Notification.obj with the ID of the Comment (i.e. comment
+ link) ID instead of the TextComment object.
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+ notification_table = inspect_table(metadata, "core__notifications")
+ comment_table = inspect_table(metadata, "core__comment_links")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # Get the notifications.
+ notifications = list(db.execute(notification_table.select()))
+
+ # Iterate through all the notifications
+ for notification in notifications:
+ # Lookup the Comment link object from the notification's ID
+ comment_link = db.execute(comment_table.select().where(
+ comment_table.c.comment_id == notification.object_id
+ )).first()
+
+ # Find the GMR for this comment or make one if one doesn't exist.
+ gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment_link.id,
+ gmr_table.c.model_type == "core__comment_links"
+ ))).first()
+
+ # If it doesn't exist we need to create one.
+ if gmr is None:
+ gmr = db.execute(gmr_table.insert().values(
+ obj_pk=comment_link.id,
+ model_type="core__comment_links"
+ )).inserted_primary_key[0]
+ else:
+ gmr = gmr.id
+
+ # Okay now we need to update the notification with the ID of the link
+ # rather than the ID of TextComment object.
+ db.execute(notification_table.update().values(
+ object_id=gmr
+ ).where(
+ notification_table.c.id == notification.id
+ ))
+
+
+def downgrade():
+ """
+ This puts back the TextComment ID for the notification.object_id field
+ where we're using the Comment object (i.e. the comment link ID)
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+ notification_table = inspect_table(metadata, "core__notifications")
+ comment_table = inspect_table(metadata, "core__comment_links")
+
+ # Notificaitons
+ notifications = list(db.execute(notification_table.select()))
+
+ # Iterate through all the notifications
+ for notification in notifications:
+ # Lookup the Comment link object from the notification's ID
+ comment_link = db.execute(comment_table.select().where(
+ comment_table.c.id == notification.object_id
+ )).first()
+
+ # Find the GMR for the TextComment
+ gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment_link.id,
+ gmr_table.c.model_type == "core__comment_links"
+ ))).first()
+
+ if gmr is None:
+ gmr = db.execute(gmr_table.insert().values(
+ obj_pk=comment_link.id,
+ model_type="core__comment_links"
+ )).inserted_primary_key[0]
+ else:
+ gmr = gmr.id
+
+ # Update the notification with the TextComment (i.e. the comment object)
+ db.execute(notification_table.update().values(
+ object_id=gmr
+ ).where(
+ notification_table.c.id == notification.id
+ ))
+
diff --git a/mediagoblin/db/migrations/versions/52bf0ccbedc1_initial_revision.py b/mediagoblin/db/migrations/versions/52bf0ccbedc1_initial_revision.py
new file mode 100644
index 00000000..964cf5be
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/52bf0ccbedc1_initial_revision.py
@@ -0,0 +1,422 @@
+"""initial revision
+
+Revision ID: 52bf0ccbedc1
+Revises: None
+Create Date: 2015-11-07 17:00:28.191042
+Description: This is an initial Alembic migration
+"""
+
+# revision identifiers, used by Alembic.
+revision = '52bf0ccbedc1'
+down_revision = None
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade():
+ # Well we already appear to have some of the core data, presumably because
+ # this database precedes our alembic migrations with sqlalchemy-migrate, so
+ # we can bail out early.
+ if op.get_bind().engine.has_table("core__users"):
+ return
+
+ op.create_table(
+ 'core__clients',
+ sa.Column('id', sa.Unicode(), nullable=True),
+ sa.Column('secret', sa.Unicode(), nullable=False),
+ sa.Column('expirey', sa.DateTime(), nullable=True),
+ sa.Column('application_type', sa.Unicode(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('redirect_uri', sa.UnicodeText(),
+ nullable=True),
+ sa.Column('logo_url', sa.Unicode(), nullable=True),
+ sa.Column('application_name', sa.Unicode(), nullable=True),
+ sa.Column('contacts', sa.UnicodeText(),
+ nullable=True),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__file_keynames',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('name', sa.Unicode(), nullable=True),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('name'))
+
+ op.create_table(
+ 'core__generators',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('name', sa.Unicode(), nullable=False),
+ sa.Column('published', sa.DateTime(), nullable=True),
+ sa.Column('updated', sa.DateTime(), nullable=True),
+ sa.Column('object_type', sa.Unicode(), nullable=False),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__generic_model_reference',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('obj_pk', sa.Integer(), nullable=False),
+ sa.Column('model_type', sa.Unicode(), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('model_type', 'obj_pk'))
+
+ op.create_table(
+ 'core__locations',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('name', sa.Unicode(), nullable=True),
+ sa.Column('position', sa.UnicodeText(),
+ nullable=True),
+ sa.Column('address', sa.UnicodeText(),
+ nullable=True),
+ sa.PrimaryKeyConstraint('id'))
+
+ # We should remove this in a future migration, though
+ op.create_table(
+ 'core__migrations',
+ sa.Column('name', sa.Unicode(), nullable=False),
+ sa.Column('version', sa.Integer(), nullable=False),
+ sa.PrimaryKeyConstraint('name'))
+
+ op.create_table(
+ 'core__nonce_timestamps',
+ sa.Column('nonce', sa.Unicode(), nullable=False),
+ sa.Column('timestamp', sa.DateTime(), nullable=False),
+ sa.PrimaryKeyConstraint('nonce', 'timestamp'))
+
+ op.create_table(
+ 'core__privileges',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('privilege_name', sa.Unicode(), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('privilege_name'))
+
+ op.create_table(
+ 'core__tags',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('slug', sa.Unicode(), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('slug'))
+
+ op.create_table(
+ 'core__comment_links',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('target_id', sa.Integer(), nullable=False),
+ sa.Column('comment_id', sa.Integer(), nullable=False),
+ sa.Column('added', sa.DateTime(), nullable=False),
+ sa.ForeignKeyConstraint(['comment_id'],
+ ['core__generic_model_reference.id']),
+ sa.ForeignKeyConstraint(['target_id'],
+ ['core__generic_model_reference.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__graveyard',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('public_id', sa.Unicode(), nullable=True),
+ sa.Column('deleted', sa.DateTime(), nullable=False),
+ sa.Column('object_type', sa.Unicode(), nullable=False),
+ sa.Column('actor_id', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['actor_id'],
+ ['core__generic_model_reference.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('public_id'))
+
+ op.create_table(
+ 'core__users',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('url', sa.Unicode(), nullable=True),
+ sa.Column('bio', sa.UnicodeText(), nullable=True),
+ sa.Column('name', sa.Unicode(), nullable=True),
+ sa.Column('type', sa.Unicode(), nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('location', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['location'], ['core__locations.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__activities',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('public_id', sa.Unicode(), nullable=True),
+ sa.Column('actor', sa.Integer(), nullable=False),
+ sa.Column('published', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('verb', sa.Unicode(), nullable=False),
+ sa.Column('content', sa.Unicode(), nullable=True),
+ sa.Column('title', sa.Unicode(), nullable=True),
+ sa.Column('generator', sa.Integer(), nullable=True),
+ sa.Column('object_id', sa.Integer(), nullable=False),
+ sa.Column('target_id', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['generator'], ['core__generators.id']),
+ sa.ForeignKeyConstraint(['object_id'],
+ ['core__generic_model_reference.id']),
+ sa.ForeignKeyConstraint(['target_id'],
+ ['core__generic_model_reference.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('public_id'))
+
+ op.create_table(
+ 'core__collections',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('public_id', sa.Unicode(), nullable=True),
+ sa.Column('title', sa.Unicode(), nullable=False),
+ sa.Column('slug', sa.Unicode(), nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('description', sa.UnicodeText(), nullable=True),
+ sa.Column('actor', sa.Integer(), nullable=False),
+ sa.Column('num_items', sa.Integer(), nullable=True),
+ sa.Column('type', sa.Unicode(), nullable=False),
+ sa.Column('location', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['location'], ['core__locations.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('actor', 'slug'),
+ sa.UniqueConstraint('public_id'))
+
+ op.create_index(
+ op.f('ix_core__collections_created'),
+ 'core__collections', ['created'], unique=False)
+
+ op.create_table(
+ 'core__local_users',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('username', sa.Unicode(), nullable=False),
+ sa.Column('email', sa.Unicode(), nullable=False),
+ sa.Column('pw_hash', sa.Unicode(), nullable=True),
+ sa.Column('wants_comment_notification', sa.Boolean(), nullable=True),
+ sa.Column('wants_notifications', sa.Boolean(), nullable=True),
+ sa.Column('license_preference', sa.Unicode(), nullable=True),
+ sa.Column('uploaded', sa.Integer(), nullable=True),
+ sa.Column('upload_limit', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('username'))
+
+ op.create_table(
+ 'core__media_comments',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('public_id', sa.Unicode(), nullable=True),
+ sa.Column('actor', sa.Integer(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('content', sa.UnicodeText(), nullable=False),
+ sa.Column('location', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['location'], ['core__locations.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('public_id'))
+
+ op.create_table(
+ 'core__media_entries',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('public_id', sa.Unicode(), nullable=True),
+ sa.Column('remote', sa.Boolean(), nullable=True),
+ sa.Column('actor', sa.Integer(), nullable=False),
+ sa.Column('title', sa.Unicode(), nullable=False),
+ sa.Column('slug', sa.Unicode(), nullable=True),
+ sa.Column('description', sa.UnicodeText(), nullable=True),
+ sa.Column('media_type', sa.Unicode(), nullable=False),
+ sa.Column('state', sa.Unicode(), nullable=False),
+ sa.Column('license', sa.Unicode(), nullable=True),
+ sa.Column('file_size', sa.Integer(), nullable=True),
+ sa.Column('location', sa.Integer(), nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('fail_error', sa.Unicode(), nullable=True),
+ sa.Column('fail_metadata', sa.UnicodeText(), nullable=True),
+ sa.Column('transcoding_progress', sa.SmallInteger(), nullable=True),
+ sa.Column('queued_media_file', sa.Unicode(), nullable=True),
+ sa.Column('queued_task_id', sa.Unicode(), nullable=True),
+ sa.Column('media_metadata', sa.UnicodeText(), nullable=True),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['location'], ['core__locations.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('actor', 'slug'),
+ sa.UniqueConstraint('public_id'))
+
+ op.create_index(
+ op.f('ix_core__media_entries_actor'),
+ 'core__media_entries', ['actor'], unique=False)
+ op.create_index(
+ op.f('ix_core__media_entries_created'),
+ 'core__media_entries', ['created'], unique=False)
+
+ op.create_table(
+ 'core__notifications',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('object_id', sa.Integer(), nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('user_id', sa.Integer(), nullable=False),
+ sa.Column('seen', sa.Boolean(), nullable=True),
+ sa.ForeignKeyConstraint(['object_id'],
+ ['core__generic_model_reference.id']),
+ sa.ForeignKeyConstraint(['user_id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_index(
+ op.f('ix_core__notifications_seen'),
+ 'core__notifications', ['seen'], unique=False)
+
+ op.create_index(
+ op.f('ix_core__notifications_user_id'),
+ 'core__notifications', ['user_id'], unique=False)
+
+ op.create_table(
+ 'core__privileges_users',
+ sa.Column('user', sa.Integer(), nullable=False),
+ sa.Column('privilege', sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(['privilege'], ['core__privileges.id']),
+ sa.ForeignKeyConstraint(['user'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('user', 'privilege'))
+
+ op.create_table(
+ 'core__remote_users',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('webfinger', sa.Unicode(), nullable=True),
+ sa.ForeignKeyConstraint(['id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('webfinger'))
+
+ op.create_table(
+ 'core__reports',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('reporter_id', sa.Integer(), nullable=False),
+ sa.Column('report_content', sa.UnicodeText(), nullable=True),
+ sa.Column('reported_user_id', sa.Integer(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('resolver_id', sa.Integer(), nullable=True),
+ sa.Column('resolved', sa.DateTime(), nullable=True),
+ sa.Column('result', sa.UnicodeText(), nullable=True),
+ sa.Column('object_id', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['object_id'],
+ ['core__generic_model_reference.id']),
+ sa.ForeignKeyConstraint(['reported_user_id'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['reporter_id'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['resolver_id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('id'))
+ op.create_table(
+ 'core__request_tokens',
+ sa.Column('token', sa.Unicode(), nullable=False),
+ sa.Column('secret', sa.Unicode(), nullable=False),
+ sa.Column('client', sa.Unicode(), nullable=True),
+ sa.Column('actor', sa.Integer(), nullable=True),
+ sa.Column('used', sa.Boolean(), nullable=True),
+ sa.Column('authenticated', sa.Boolean(), nullable=True),
+ sa.Column('verifier', sa.Unicode(), nullable=True),
+ sa.Column('callback', sa.Unicode(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['client'], ['core__clients.id']),
+ sa.PrimaryKeyConstraint('token'))
+
+ op.create_table(
+ 'core__user_bans',
+ sa.Column('user_id', sa.Integer(), nullable=False),
+ sa.Column('expiration_date', sa.Date(), nullable=True),
+ sa.Column('reason', sa.UnicodeText(), nullable=False),
+ sa.ForeignKeyConstraint(['user_id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('user_id'))
+
+ op.create_table(
+ 'core__access_tokens',
+ sa.Column('token', sa.Unicode(), nullable=False),
+ sa.Column('secret', sa.Unicode(), nullable=False),
+ sa.Column('actor', sa.Integer(), nullable=True),
+ sa.Column('request_token', sa.Unicode(), nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['request_token'],
+ ['core__request_tokens.token']),
+ sa.PrimaryKeyConstraint('token'))
+
+ op.create_table(
+ 'core__attachment_files',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('media_entry', sa.Integer(), nullable=False),
+ sa.Column('name', sa.Unicode(), nullable=False),
+ sa.Column('filepath', sa.Unicode(),
+ nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.ForeignKeyConstraint(['media_entry'], ['core__media_entries.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__collection_items',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('collection', sa.Integer(), nullable=False),
+ sa.Column('note', sa.UnicodeText(), nullable=True),
+ sa.Column('added', sa.DateTime(), nullable=False),
+ sa.Column('position', sa.Integer(), nullable=True),
+ sa.Column('object_id', sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(['collection'], ['core__collections.id']),
+ sa.ForeignKeyConstraint(['object_id'],
+ ['core__generic_model_reference.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('collection', 'object_id'))
+
+ op.create_index(
+ op.f('ix_core__collection_items_object_id'), 'core__collection_items',
+ ['object_id'], unique=False)
+
+ op.create_table(
+ 'core__comment_subscriptions',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('media_entry_id', sa.Integer(), nullable=False),
+ sa.Column('user_id', sa.Integer(), nullable=False),
+ sa.Column('notify', sa.Boolean(), nullable=False),
+ sa.Column('send_email', sa.Boolean(), nullable=False),
+ sa.ForeignKeyConstraint(['media_entry_id'], ['core__media_entries.id']),
+ sa.ForeignKeyConstraint(['user_id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__media_tags',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('media_entry', sa.Integer(), nullable=False),
+ sa.Column('tag', sa.Integer(), nullable=False),
+ sa.Column('name', sa.Unicode(), nullable=True),
+ sa.ForeignKeyConstraint(['media_entry'], ['core__media_entries.id']),
+ sa.ForeignKeyConstraint(['tag'], ['core__tags.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('tag', 'media_entry'))
+
+ op.create_index(
+ op.f('ix_core__media_tags_media_entry'), 'core__media_tags',
+ ['media_entry'], unique=False)
+
+ op.create_index(
+ op.f('ix_core__media_tags_tag'), 'core__media_tags',
+ ['tag'], unique=False)
+
+ op.create_table(
+ 'core__mediafiles',
+ sa.Column('media_entry', sa.Integer(), nullable=False),
+ sa.Column('name_id', sa.SmallInteger(), nullable=False),
+ sa.Column('file_path', sa.Unicode(), nullable=True),
+ sa.Column('file_metadata', sa.UnicodeText(),
+ nullable=True),
+ sa.ForeignKeyConstraint(['media_entry'], ['core__media_entries.id']),
+ sa.ForeignKeyConstraint(['name_id'], ['core__file_keynames.id']),
+ sa.PrimaryKeyConstraint('media_entry', 'name_id'))
+
+ op.create_table(
+ 'core__processing_metadata',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('media_entry_id', sa.Integer(), nullable=False),
+ sa.Column('callback_url', sa.Unicode(), nullable=True),
+ sa.ForeignKeyConstraint(['media_entry_id'], ['core__media_entries.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_index(
+ op.f('ix_core__processing_metadata_media_entry_id'),
+ 'core__processing_metadata', ['media_entry_id'], unique=False)
+
+def downgrade():
+ # Downgrading from a first revision is nonsense.
+ pass
diff --git a/mediagoblin/db/migrations/versions/8429e33fdf7_remove_the_graveyard_objects_from_.py b/mediagoblin/db/migrations/versions/8429e33fdf7_remove_the_graveyard_objects_from_.py
new file mode 100644
index 00000000..978260df
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/8429e33fdf7_remove_the_graveyard_objects_from_.py
@@ -0,0 +1,62 @@
+"""Remove the Graveyard objects from CommentNotification objects
+
+Revision ID: 8429e33fdf7
+Revises: 101510e3a713
+Create Date: 2016-01-19 08:01:21.577274
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '8429e33fdf7'
+down_revision = '101510e3a713'
+
+from alembic import op
+from sqlalchemy import MetaData
+from sqlalchemy.sql import and_
+from mediagoblin.db.migration_tools import inspect_table
+
+def upgrade():
+ """
+ This migration is very similiar to that of 101510e3a713. It removes objects
+ from Notification objects which are from Graveyard. It also iterates through
+ any reports which might have been filed and sets the objects to None.
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+ notification_table = inspect_table(metadata, "core__notifications")
+ report_table = inspect_table(metadata, "core__reports")
+ graveyard_table = inspect_table(metadata, "core__graveyard")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ res = list(db.execute(gmr_table.select()))
+ for tombstone in res:
+ # Look up the gmr for the tombstone8
+ gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == tombstone.id,
+ gmr_table.c.model_type == "core__graveyard"
+ ))).first()
+
+ # If we can't find one we can skip it as it needs one to be part of
+ # the notification objects
+ if gmr is None:
+ continue
+
+ # Delete all notifications which link to the GMR as that's invalid.
+ db.execute(notification_table.delete().where(
+ notification_table.c.object_id == gmr.id
+ ))
+
+ # Deal with reports, we don't want to delete these though, they want to
+ # still exist if the object that was reported was deleted as that can
+ # be part of the resolution, just set it to None.
+ db.execute(report_table.update().where(
+ report_table.c.object_id == gmr.id
+ ).values(object_id=None))
+
+
+def downgrade():
+ """
+ There is nothing to do as this was a data migration, it'll downgrade
+ just fine without any steps. It's not like we can undo the deletions.
+ """
+ pass
diff --git a/mediagoblin/db/mixin.py b/mediagoblin/db/mixin.py
index 048cc07c..e8b121d0 100644
--- a/mediagoblin/db/mixin.py
+++ b/mediagoblin/db/mixin.py
@@ -31,17 +31,96 @@ import uuid
import re
from datetime import datetime
+from pytz import UTC
from werkzeug.utils import cached_property
-from mediagoblin import mg_globals
from mediagoblin.media_types import FileTypeNotSupported
from mediagoblin.tools import common, licenses
from mediagoblin.tools.pluginapi import hook_handle
from mediagoblin.tools.text import cleaned_markdown_conversion
from mediagoblin.tools.url import slugify
+from mediagoblin.tools.translate import pass_to_ugettext as _
+class CommentingMixin(object):
+ """
+ Mixin that gives classes methods to get and add the comments on/to it
+
+ This assumes the model has a "comments" class which is a ForeignKey to the
+ Collection model. This will hold a Collection of comments which are
+ associated to this model. It also assumes the model has an "actor"
+ ForeignKey which points to the creator/publisher/etc. of the model.
+
+ NB: This is NOT the mixin for the Comment Model, this is for
+ other models which support commenting.
+ """
+
+ def get_comment_link(self):
+ # Import here to avoid cyclic imports
+ from mediagoblin.db.models import Comment, GenericModelReference
+
+ gmr = GenericModelReference.query.filter_by(
+ obj_pk=self.id,
+ model_type=self.__tablename__
+ ).first()
+
+ if gmr is None:
+ return None
+
+ link = Comment.query.filter_by(comment_id=gmr.id).first()
+ return link
+
+ def get_reply_to(self):
+ link = self.get_comment_link()
+ if link is None or link.target_id is None:
+ return None
+
+ return link.target()
+
+ def soft_delete(self, *args, **kwargs):
+ link = self.get_comment_link()
+ if link is not None:
+ link.delete()
+ super(CommentingMixin, self).soft_delete(*args, **kwargs)
+
+class GeneratePublicIDMixin(object):
+ """
+ Mixin that ensures that a the public_id field is populated.
+
+ The public_id is the ID that is used in the API, this must be globally
+ unique and dereferencable. This will be the URL for the API view of the
+ object. It's used in several places, not only is it used to give out via
+ the API but it's also vital information stored when a soft_deletion occurs
+ on the `Graveyard.public_id` field, this is needed to follow the spec which
+ says we have to be able to provide a shell of an object and return a 410
+ (rather than a 404) when a deleted object has been deleted.
+
+ This requires a the urlgen off the request object (`request.urlgen`) to be
+ provided as it's the ID is a URL.
+ """
+
+ def get_public_id(self, urlgen):
+ # Verify that the class this is on actually has a public_id field...
+ if "public_id" not in self.__table__.columns.keys():
+ raise Exception("Model has no public_id field")
+
+ # Great! the model has a public id, if it's None, let's create one!
+ if self.public_id is None:
+ # We need the internal ID for this so ensure we've been saved.
+ self.save(commit=False)
+
+ # Create the URL
+ self.public_id = urlgen(
+ "mediagoblin.api.object",
+ object_type=self.object_type,
+ id=str(uuid.uuid4()),
+ qualified=True
+ )
+ self.save()
+ return self.public_id
class UserMixin(object):
+ object_type = "person"
+
@property
def bio_html(self):
return cleaned_markdown_conversion(self.bio)
@@ -49,6 +128,7 @@ class UserMixin(object):
def url_for_self(self, urlgen, **kwargs):
"""Generate a URL for this User's home page."""
return urlgen('mediagoblin.user_pages.user_home',
+
user=self.username, **kwargs)
@@ -84,51 +164,59 @@ class GenerateSlugMixin(object):
generated bits until it's unique. That'll be a little bit of junk,
but at least it has the basis of a nice slug.
"""
+
#Is already a slug assigned? Check if it is valid
if self.slug:
- self.slug = slugify(self.slug)
+ slug = slugify(self.slug)
# otherwise, try to use the title.
elif self.title:
# assign slug based on title
- self.slug = slugify(self.title)
+ slug = slugify(self.title)
- # We don't want any empty string slugs
- if self.slug == u"":
- self.slug = None
+ else:
+ # We don't have any information to set a slug
+ return
- # Do we have anything at this point?
- # If not, we're not going to get a slug
- # so just return... we're not going to force one.
- if not self.slug:
- return # giving up!
+ # We don't want any empty string slugs
+ if slug == u"":
+ return
# Otherwise, let's see if this is unique.
- if self.check_slug_used(self.slug):
+ if self.check_slug_used(slug):
# It looks like it's being used... lame.
# Can we just append the object's id to the end?
if self.id:
- slug_with_id = u"%s-%s" % (self.slug, self.id)
+ slug_with_id = u"%s-%s" % (slug, self.id)
if not self.check_slug_used(slug_with_id):
self.slug = slug_with_id
return # success!
# okay, still no success;
# let's whack junk on there till it's unique.
- self.slug += '-' + uuid.uuid4().hex[:4]
+ slug += '-' + uuid.uuid4().hex[:4]
# keep going if necessary!
- while self.check_slug_used(self.slug):
- self.slug += uuid.uuid4().hex[:4]
+ while self.check_slug_used(slug):
+ slug += uuid.uuid4().hex[:4]
+
+ # self.check_slug_used(slug) must be False now so we have a slug that
+ # we can use now.
+ self.slug = slug
-class MediaEntryMixin(GenerateSlugMixin):
+class MediaEntryMixin(GenerateSlugMixin, GeneratePublicIDMixin):
def check_slug_used(self, slug):
# import this here due to a cyclic import issue
# (db.models -> db.mixin -> db.util -> db.models)
from mediagoblin.db.util import check_media_slug_used
- return check_media_slug_used(self.uploader, slug, self.id)
+ return check_media_slug_used(self.actor, slug, self.id)
+
+ @property
+ def object_type(self):
+ """ Converts media_type to pump-like type - don't use internally """
+ return self.media_type.split(".")[-1]
@property
def description_html(self):
@@ -177,7 +265,7 @@ class MediaEntryMixin(GenerateSlugMixin):
Use a slug if we have one, else use our 'id'.
"""
- uploader = self.get_uploader
+ uploader = self.get_actor
return urlgen(
'mediagoblin.user_pages.media_home',
@@ -192,16 +280,36 @@ class MediaEntryMixin(GenerateSlugMixin):
# TODO: implement generic fallback in case MEDIA_MANAGER does
# not specify one?
if u'thumb' in self.media_files:
- thumb_url = mg_globals.app.public_store.file_url(
+ thumb_url = self._app.public_store.file_url(
self.media_files[u'thumb'])
else:
# No thumbnail in media available. Get the media's
# MEDIA_MANAGER for the fallback icon and return static URL
# Raises FileTypeNotSupported in case no such manager is enabled
manager = self.media_manager
- thumb_url = mg_globals.app.staticdirector(manager[u'default_thumb'])
+ thumb_url = self._app.staticdirector(manager[u'default_thumb'])
return thumb_url
+ @property
+ def original_url(self):
+ """ Returns the URL for the original image
+ will return self.thumb_url if original url doesn't exist"""
+ if u"original" not in self.media_files:
+ return self.thumb_url
+
+ return self._app.public_store.file_url(
+ self.media_files[u"original"]
+ )
+
+ @property
+ def icon_url(self):
+ '''Return the icon URL (for usage in templates) if it exists'''
+ try:
+ return self._app.staticdirector(
+ self.media_manager['type_icon'])
+ except AttributeError:
+ return None
+
@cached_property
def media_manager(self):
"""Returns the MEDIA_MANAGER of the media's media_type
@@ -222,7 +330,17 @@ class MediaEntryMixin(GenerateSlugMixin):
Get the exception that's appropriate for this error
"""
if self.fail_error:
- return common.import_component(self.fail_error)
+ try:
+ return common.import_component(self.fail_error)
+ except ImportError:
+ # TODO(breton): fail_error should give some hint about why it
+ # failed. fail_error is used as a path to import().
+ # Unfortunately, I didn't know about that and put general error
+ # message there. Maybe it's for the best, because for admin,
+ # we could show even some raw python things. Anyway, this
+ # should be properly resolved. Now we are in a freeze, that's
+ # why I simply catch ImportError.
+ return None
def get_license_data(self):
"""Return license dict for requested license"""
@@ -248,7 +366,7 @@ class MediaEntryMixin(GenerateSlugMixin):
if 'Image DateTimeOriginal' in exif_all:
# format date taken
- takendate = datetime.datetime.strptime(
+ takendate = datetime.strptime(
exif_all['Image DateTimeOriginal']['printable'],
'%Y:%m:%d %H:%M:%S').date()
taken = takendate.strftime('%B %d %Y')
@@ -285,7 +403,9 @@ class MediaEntryMixin(GenerateSlugMixin):
return exif_short
-class MediaCommentMixin(object):
+class TextCommentMixin(GeneratePublicIDMixin):
+ object_type = "comment"
+
@property
def content_html(self):
"""
@@ -294,21 +414,29 @@ class MediaCommentMixin(object):
"""
return cleaned_markdown_conversion(self.content)
+ def __unicode__(self):
+ return u'<{klass} #{id} {actor} "{comment}">'.format(
+ klass=self.__class__.__name__,
+ id=self.id,
+ actor=self.get_actor,
+ comment=self.content)
+
def __repr__(self):
- return '<{klass} #{id} {author} "{comment}">'.format(
+ return '<{klass} #{id} {actor} "{comment}">'.format(
klass=self.__class__.__name__,
id=self.id,
- author=self.get_author,
+ actor=self.get_actor,
comment=self.content)
+class CollectionMixin(GenerateSlugMixin, GeneratePublicIDMixin):
+ object_type = "collection"
-class CollectionMixin(GenerateSlugMixin):
def check_slug_used(self, slug):
# import this here due to a cyclic import issue
# (db.models -> db.mixin -> db.util -> db.models)
from mediagoblin.db.util import check_collection_slug_used
- return check_collection_slug_used(self.creator, slug, self.id)
+ return check_collection_slug_used(self.actor, slug, self.id)
@property
def description_html(self):
@@ -328,7 +456,7 @@ class CollectionMixin(GenerateSlugMixin):
Use a slug if we have one, else use our 'id'.
"""
- creator = self.get_creator
+ creator = self.get_actor
return urlgen(
'mediagoblin.user_pages.user_collection',
@@ -336,6 +464,28 @@ class CollectionMixin(GenerateSlugMixin):
collection=self.slug_or_id,
**extra_args)
+ def add_to_collection(self, obj, content=None, commit=True):
+ """ Adds an object to the collection """
+ # It's here to prevent cyclic imports
+ from mediagoblin.db.models import CollectionItem
+
+ # Need the ID of this collection for this so check we've got one.
+ self.save(commit=False)
+
+ # Create the CollectionItem
+ item = CollectionItem()
+ item.collection = self.id
+ item.get_object = obj
+
+ if content is not None:
+ item.note = content
+
+ self.num_items = self.num_items + 1
+
+ # Save both!
+ self.save(commit=commit)
+ item.save(commit=commit)
+ return item
class CollectionItemMixin(object):
@property
@@ -345,3 +495,147 @@ class CollectionItemMixin(object):
Run through Markdown and the HTML cleaner.
"""
return cleaned_markdown_conversion(self.note)
+
+class ActivityMixin(GeneratePublicIDMixin):
+ object_type = "activity"
+
+ VALID_VERBS = ["add", "author", "create", "delete", "dislike", "favorite",
+ "follow", "like", "post", "share", "unfavorite", "unfollow",
+ "unlike", "unshare", "update", "tag"]
+
+ def get_url(self, request):
+ return request.urlgen(
+ "mediagoblin.user_pages.activity_view",
+ username=self.get_actor.username,
+ id=self.id,
+ qualified=True
+ )
+
+ def generate_content(self):
+ """ Produces a HTML content for object """
+ # some of these have simple and targetted. If self.target it set
+ # it will pick the targetted. If they DON'T have a targetted version
+ # the information in targetted won't be added to the content.
+ verb_to_content = {
+ "add": {
+ "simple" : _("{username} added {object}"),
+ "targetted": _("{username} added {object} to {target}"),
+ },
+ "author": {"simple": _("{username} authored {object}")},
+ "create": {"simple": _("{username} created {object}")},
+ "delete": {"simple": _("{username} deleted {object}")},
+ "dislike": {"simple": _("{username} disliked {object}")},
+ "favorite": {"simple": _("{username} favorited {object}")},
+ "follow": {"simple": _("{username} followed {object}")},
+ "like": {"simple": _("{username} liked {object}")},
+ "post": {
+ "simple": _("{username} posted {object}"),
+ "targetted": _("{username} posted {object} to {target}"),
+ },
+ "share": {"simple": _("{username} shared {object}")},
+ "unfavorite": {"simple": _("{username} unfavorited {object}")},
+ "unfollow": {"simple": _("{username} stopped following {object}")},
+ "unlike": {"simple": _("{username} unliked {object}")},
+ "unshare": {"simple": _("{username} unshared {object}")},
+ "update": {"simple": _("{username} updated {object}")},
+ "tag": {"simple": _("{username} tagged {object}")},
+ }
+
+ object_map = {
+ "image": _("an image"),
+ "comment": _("a comment"),
+ "collection": _("a collection"),
+ "video": _("a video"),
+ "audio": _("audio"),
+ "person": _("a person"),
+ }
+ obj = self.object()
+ target = None if self.target_id is None else self.target()
+ actor = self.get_actor
+ content = verb_to_content.get(self.verb, None)
+
+ if content is None or self.object is None:
+ return
+
+ # Decide what to fill the object with
+ if hasattr(obj, "title") and obj.title.strip(" "):
+ object_value = obj.title
+ elif obj.object_type in object_map:
+ object_value = object_map[obj.object_type]
+ else:
+ object_value = _("an object")
+
+ # Do we want to add a target (indirect object) to content?
+ if target is not None and "targetted" in content:
+ if hasattr(target, "title") and target.title.strip(" "):
+ target_value = target.title
+ elif target.object_type in object_map:
+ target_value = object_map[target.object_type]
+ else:
+ target_value = _("an object")
+
+ self.content = content["targetted"].format(
+ username=actor.username,
+ object=object_value,
+ target=target_value
+ )
+ else:
+ self.content = content["simple"].format(
+ username=actor.username,
+ object=object_value
+ )
+
+ return self.content
+
+ def serialize(self, request):
+ href = request.urlgen(
+ "mediagoblin.api.object",
+ object_type=self.object_type,
+ id=self.id,
+ qualified=True
+ )
+ published = UTC.localize(self.published)
+ updated = UTC.localize(self.updated)
+ obj = {
+ "id": href,
+ "actor": self.get_actor.serialize(request),
+ "verb": self.verb,
+ "published": published.isoformat(),
+ "updated": updated.isoformat(),
+ "content": self.content,
+ "url": self.get_url(request),
+ "object": self.object().serialize(request),
+ "objectType": self.object_type,
+ "links": {
+ "self": {
+ "href": href,
+ },
+ },
+ }
+
+ if self.generator:
+ obj["generator"] = self.get_generator.serialize(request)
+
+ if self.title:
+ obj["title"] = self.title
+
+ if self.target_id is not None:
+ obj["target"] = self.target().serialize(request)
+
+ return obj
+
+ def unseralize(self, data):
+ """
+ Takes data given and set it on this activity.
+
+ Several pieces of data are not written on because of security
+ reasons. For example changing the author or id of an activity.
+ """
+ if "verb" in data:
+ self.verb = data["verb"]
+
+ if "title" in data:
+ self.title = data["title"]
+
+ if "content" in data:
+ self.content = data["content"]
diff --git a/mediagoblin/db/models.py b/mediagoblin/db/models.py
index e388bd5b..f4644b9f 100644
--- a/mediagoblin/db/models.py
+++ b/mediagoblin/db/models.py
@@ -18,79 +18,266 @@
TODO: indexes on foreignkeys, where useful.
"""
+from __future__ import print_function
+
import logging
import datetime
from sqlalchemy import Column, Integer, Unicode, UnicodeText, DateTime, \
Boolean, ForeignKey, UniqueConstraint, PrimaryKeyConstraint, \
- SmallInteger, Date
-from sqlalchemy.orm import relationship, backref, with_polymorphic
+ SmallInteger, Date, types
+from sqlalchemy.orm import relationship, backref, with_polymorphic, validates, \
+ class_mapper
from sqlalchemy.orm.collections import attribute_mapped_collection
+from sqlalchemy.sql import and_
from sqlalchemy.sql.expression import desc
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.util import memoized_property
from mediagoblin.db.extratypes import (PathTupleWithSlashes, JSONEncoded,
MutationDict)
-from mediagoblin.db.base import Base, DictReadAttrProxy
+from mediagoblin.db.base import Base, DictReadAttrProxy, FakeCursor
from mediagoblin.db.mixin import UserMixin, MediaEntryMixin, \
- MediaCommentMixin, CollectionMixin, CollectionItemMixin
+ CollectionMixin, CollectionItemMixin, ActivityMixin, TextCommentMixin, \
+ CommentingMixin
from mediagoblin.tools.files import delete_media_files
from mediagoblin.tools.common import import_component
+from mediagoblin.tools.routing import extract_url_arguments
-# It's actually kind of annoying how sqlalchemy-migrate does this, if
-# I understand it right, but whatever. Anyway, don't remove this :P
-#
-# We could do migration calls more manually instead of relying on
-# this import-based meddling...
-from migrate import changeset
+import six
+from six.moves.urllib.parse import urljoin
+from pytz import UTC
_log = logging.getLogger(__name__)
+class GenericModelReference(Base):
+ """
+ Represents a relationship to any model that is defined with a integer pk
+ """
+ __tablename__ = "core__generic_model_reference"
+
+ id = Column(Integer, primary_key=True)
+ obj_pk = Column(Integer, nullable=False)
+
+ # This will be the tablename of the model
+ model_type = Column(Unicode, nullable=False)
+
+ # Constrain it so obj_pk and model_type have to be unique
+ # They should be this order as the index is generated, "model_type" will be
+ # the major order as it's put first.
+ __table_args__ = (
+ UniqueConstraint("model_type", "obj_pk"),
+ {})
+
+ def get_object(self):
+ # This can happen if it's yet to be saved
+ if self.model_type is None or self.obj_pk is None:
+ return None
+
+ model = self._get_model_from_type(self.model_type)
+ return model.query.filter_by(id=self.obj_pk).first()
+
+ def set_object(self, obj):
+ model = obj.__class__
+
+ # Check we've been given a object
+ if not issubclass(model, Base):
+ raise ValueError("Only models can be set as using the GMR")
+
+ # Check that the model has an explicit __tablename__ declaration
+ if getattr(model, "__tablename__", None) is None:
+ raise ValueError("Models must have __tablename__ attribute")
+
+ # Check that it's not a composite primary key
+ primary_keys = [key.name for key in class_mapper(model).primary_key]
+ if len(primary_keys) > 1:
+ raise ValueError("Models can not have composite primary keys")
+
+ # Check that the field on the model is a an integer field
+ pk_column = getattr(model, primary_keys[0])
+ if not isinstance(pk_column.type, Integer):
+ raise ValueError("Only models with integer pks can be set")
+
+ if getattr(obj, pk_column.key) is None:
+ obj.save(commit=False)
+
+ self.obj_pk = getattr(obj, pk_column.key)
+ self.model_type = obj.__tablename__
+
+ def _get_model_from_type(self, model_type):
+ """ Gets a model from a tablename (model type) """
+ if getattr(type(self), "_TYPE_MAP", None) is None:
+ # We want to build on the class (not the instance) a map of all the
+ # models by the table name (type) for easy lookup, this is done on
+ # the class so it can be shared between all instances
+
+ # to prevent circular imports do import here
+ registry = dict(Base._decl_class_registry).values()
+ self._TYPE_MAP = dict(
+ ((m.__tablename__, m) for m in registry if hasattr(m, "__tablename__"))
+ )
+ setattr(type(self), "_TYPE_MAP", self._TYPE_MAP)
+
+ return self.__class__._TYPE_MAP[model_type]
+
+ @classmethod
+ def find_for_obj(cls, obj):
+ """ Finds a GMR for an object or returns None """
+ # Is there one for this already.
+ model = type(obj)
+ pk = getattr(obj, "id")
+
+ gmr = cls.query.filter_by(
+ obj_pk=pk,
+ model_type=model.__tablename__
+ )
+
+ return gmr.first()
+
+ @classmethod
+ def find_or_new(cls, obj):
+ """ Finds an existing GMR or creates a new one for the object """
+ gmr = cls.find_for_obj(obj)
+
+ # If there isn't one already create one
+ if gmr is None:
+ gmr = cls(
+ obj_pk=obj.id,
+ model_type=type(obj).__tablename__
+ )
+
+ return gmr
+
+class Location(Base):
+ """ Represents a physical location """
+ __tablename__ = "core__locations"
+
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode)
+ # GPS coordinates
+ position = Column(MutationDict.as_mutable(JSONEncoded))
+ address = Column(MutationDict.as_mutable(JSONEncoded))
+
+ @classmethod
+ def create(cls, data, obj):
+ location = cls()
+ location.unserialize(data)
+ location.save()
+ obj.location = location.id
+ return location
+
+ def serialize(self, request):
+ location = {"objectType": "place"}
+
+ if self.name is not None:
+ location["displayName"] = self.name
+
+ if self.position:
+ location["position"] = self.position
+
+ if self.address:
+ location["address"] = self.address
+
+ return location
+
+ def unserialize(self, data):
+ if "displayName" in data:
+ self.name = data["displayName"]
+
+ self.position = {}
+ self.address = {}
+
+ # nicer way to do this?
+ if "position" in data:
+ # TODO: deal with ISO 9709 formatted string as position
+ if "altitude" in data["position"]:
+ self.position["altitude"] = data["position"]["altitude"]
+
+ if "direction" in data["position"]:
+ self.position["direction"] = data["position"]["direction"]
+
+ if "longitude" in data["position"]:
+ self.position["longitude"] = data["position"]["longitude"]
+
+ if "latitude" in data["position"]:
+ self.position["latitude"] = data["position"]["latitude"]
+
+ if "address" in data:
+ if "formatted" in data["address"]:
+ self.address["formatted"] = data["address"]["formatted"]
+
+ if "streetAddress" in data["address"]:
+ self.address["streetAddress"] = data["address"]["streetAddress"]
+
+ if "locality" in data["address"]:
+ self.address["locality"] = data["address"]["locality"]
+
+ if "region" in data["address"]:
+ self.address["region"] = data["address"]["region"]
+
+ if "postalCode" in data["address"]:
+ self.address["postalCode"] = data["addresss"]["postalCode"]
+
+ if "country" in data["address"]:
+ self.address["country"] = data["address"]["country"]
class User(Base, UserMixin):
"""
- TODO: We should consider moving some rarely used fields
- into some sort of "shadow" table.
+ Base user that is common amongst LocalUser and RemoteUser.
+
+ This holds all the fields which are common between both the Local and Remote
+ user models.
+
+ NB: ForeignKeys should reference this User model and NOT the LocalUser or
+ RemoteUser models.
"""
__tablename__ = "core__users"
id = Column(Integer, primary_key=True)
- username = Column(Unicode, nullable=False, unique=True)
- # Note: no db uniqueness constraint on email because it's not
- # reliable (many email systems case insensitive despite against
- # the RFC) and because it would be a mess to implement at this
- # point.
- email = Column(Unicode, nullable=False)
- pw_hash = Column(Unicode)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
- # Intented to be nullable=False, but migrations would not work for it
- # set to nullable=True implicitly.
- wants_comment_notification = Column(Boolean, default=True)
- wants_notifications = Column(Boolean, default=True)
- license_preference = Column(Unicode)
url = Column(Unicode)
- bio = Column(UnicodeText) # ??
- uploaded = Column(Integer, default=0)
- upload_limit = Column(Integer)
+ bio = Column(UnicodeText)
+ name = Column(Unicode)
- ## TODO
- # plugin data would be in a separate model
+ # This is required for the polymorphic inheritance
+ type = Column(Unicode)
- def __repr__(self):
- return '<{0} #{1} {2} {3} "{4}">'.format(
- self.__class__.__name__,
- self.id,
- 'verified' if self.has_privilege(u'active') else 'non-verified',
- 'admin' if self.has_privilege(u'admin') else 'user',
- self.username)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
+ location = Column(Integer, ForeignKey("core__locations.id"))
+
+ # Lazy getters
+ get_location = relationship("Location", lazy="joined")
+
+ __mapper_args__ = {
+ 'polymorphic_identity': 'user',
+ 'polymorphic_on': type,
+ }
+
+ deletion_mode = Base.SOFT_DELETE
+
+ def soft_delete(self, *args, **kwargs):
+ # Find all the Collections and delete those
+ for collection in Collection.query.filter_by(actor=self.id):
+ collection.delete(**kwargs)
+
+ # Find all the comments and delete those too
+ for comment in TextComment.query.filter_by(actor=self.id):
+ comment.delete(**kwargs)
+
+ # Find all the activities and delete those too
+ for activity in Activity.query.filter_by(actor=self.id):
+ activity.delete(**kwargs)
- def delete(self, **kwargs):
+ super(User, self).soft_delete(*args, **kwargs)
+
+
+ def delete(self, *args, **kwargs):
"""Deletes a User and all related entries/comments/files/..."""
# Collections get deleted by relationships.
- media_entries = MediaEntry.query.filter(MediaEntry.uploader == self.id)
+ media_entries = MediaEntry.query.filter(MediaEntry.actor == self.id)
for media in media_entries:
# TODO: Make sure that "MediaEntry.delete()" also deletes
# all related files/Comments
@@ -102,28 +289,30 @@ class User(Base, UserMixin):
clean_orphan_tags(commit=False)
# Delete user, pass through commit=False/True in kwargs
- super(User, self).delete(**kwargs)
- _log.info('Deleted user "{0}" account'.format(self.username))
+ username = self.username
+ super(User, self).delete(*args, **kwargs)
+ _log.info('Deleted user "{0}" account'.format(username))
- def has_privilege(self,*priv_names):
+ def has_privilege(self, privilege, allow_admin=True):
"""
This method checks to make sure a user has all the correct privileges
to access a piece of content.
- :param priv_names A variable number of unicode objects which rep-
- -resent the different privileges which may give
- the user access to this content. If you pass
- multiple arguments, the user will be granted
- access if they have ANY of the privileges
- passed.
+ :param privilege A unicode object which represent the different
+ privileges which may give the user access to
+ content.
+
+ :param allow_admin If this is set to True the then if the user is
+ an admin, then this will always return True
+ even if the user hasn't been given the
+ privilege. (defaults to True)
"""
- if len(priv_names) == 1:
- priv = Privilege.query.filter(
- Privilege.privilege_name==priv_names[0]).one()
- return (priv in self.all_privileges)
- elif len(priv_names) > 1:
- return self.has_privilege(priv_names[0]) or \
- self.has_privilege(*priv_names[1:])
+ priv = Privilege.query.filter_by(privilege_name=privilege).one()
+ if priv in self.all_privileges:
+ return True
+ elif allow_admin and self.has_privilege(u'admin', allow_admin=False):
+ return True
+
return False
def is_banned(self):
@@ -135,6 +324,125 @@ class User(Base, UserMixin):
"""
return UserBan.query.get(self.id) is not None
+ def serialize(self, request):
+ published = UTC.localize(self.created)
+ updated = UTC.localize(self.updated)
+ user = {
+ "published": published.isoformat(),
+ "updated": updated.isoformat(),
+ "objectType": self.object_type,
+ "pump_io": {
+ "shared": False,
+ "followed": False,
+ },
+ }
+
+ if self.bio:
+ user.update({"summary": self.bio})
+ if self.url:
+ user.update({"url": self.url})
+ if self.location:
+ user.update({"location": self.get_location.serialize(request)})
+
+ return user
+
+ def unserialize(self, data):
+ if "summary" in data:
+ self.bio = data["summary"]
+
+ if "location" in data:
+ Location.create(data, self)
+
+class LocalUser(User):
+ """ This represents a user registered on this instance """
+ __tablename__ = "core__local_users"
+
+ id = Column(Integer, ForeignKey("core__users.id"), primary_key=True)
+ username = Column(Unicode, nullable=False, unique=True)
+ # Note: no db uniqueness constraint on email because it's not
+ # reliable (many email systems case insensitive despite against
+ # the RFC) and because it would be a mess to implement at this
+ # point.
+ email = Column(Unicode, nullable=False)
+ pw_hash = Column(Unicode)
+
+ # Intented to be nullable=False, but migrations would not work for it
+ # set to nullable=True implicitly.
+ wants_comment_notification = Column(Boolean, default=True)
+ wants_notifications = Column(Boolean, default=True)
+ license_preference = Column(Unicode)
+ uploaded = Column(Integer, default=0)
+ upload_limit = Column(Integer)
+
+ __mapper_args__ = {
+ "polymorphic_identity": "user_local",
+ }
+
+ ## TODO
+ # plugin data would be in a separate model
+
+ def __repr__(self):
+ return '<{0} #{1} {2} {3} "{4}">'.format(
+ self.__class__.__name__,
+ self.id,
+ 'verified' if self.has_privilege(u'active') else 'non-verified',
+ 'admin' if self.has_privilege(u'admin') else 'user',
+ self.username)
+
+ def get_public_id(self, host):
+ return "acct:{0}@{1}".format(self.username, host)
+
+ def serialize(self, request):
+ user = {
+ "id": self.get_public_id(request.host),
+ "preferredUsername": self.username,
+ "displayName": self.get_public_id(request.host).split(":", 1)[1],
+ "links": {
+ "self": {
+ "href": request.urlgen(
+ "mediagoblin.api.user.profile",
+ username=self.username,
+ qualified=True
+ ),
+ },
+ "activity-inbox": {
+ "href": request.urlgen(
+ "mediagoblin.api.inbox",
+ username=self.username,
+ qualified=True
+ )
+ },
+ "activity-outbox": {
+ "href": request.urlgen(
+ "mediagoblin.api.feed",
+ username=self.username,
+ qualified=True
+ )
+ },
+ },
+ }
+
+ user.update(super(LocalUser, self).serialize(request))
+ return user
+
+class RemoteUser(User):
+ """ User that is on another (remote) instance """
+ __tablename__ = "core__remote_users"
+
+ id = Column(Integer, ForeignKey("core__users.id"), primary_key=True)
+ webfinger = Column(Unicode, unique=True)
+
+ __mapper_args__ = {
+ 'polymorphic_identity': 'user_remote'
+ }
+
+ def __repr__(self):
+ return "<{0} #{1} {2}>".format(
+ self.__class__.__name__,
+ self.id,
+ self.webfinger
+ )
+
class Client(Base):
"""
@@ -146,8 +454,8 @@ class Client(Base):
secret = Column(Unicode, nullable=False)
expirey = Column(DateTime, nullable=True)
application_type = Column(Unicode, nullable=False)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
- updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
# optional stuff
redirect_uri = Column(JSONEncoded, nullable=True)
@@ -170,13 +478,15 @@ class RequestToken(Base):
token = Column(Unicode, primary_key=True)
secret = Column(Unicode, nullable=False)
client = Column(Unicode, ForeignKey(Client.id))
- user = Column(Integer, ForeignKey(User.id), nullable=True)
+ actor = Column(Integer, ForeignKey(User.id), nullable=True)
used = Column(Boolean, default=False)
authenticated = Column(Boolean, default=False)
verifier = Column(Unicode, nullable=True)
callback = Column(Unicode, nullable=False, default=u"oob")
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
- updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
+ get_client = relationship(Client)
class AccessToken(Base):
"""
@@ -186,10 +496,12 @@ class AccessToken(Base):
token = Column(Unicode, nullable=False, primary_key=True)
secret = Column(Unicode, nullable=False)
- user = Column(Integer, ForeignKey(User.id))
+ actor = Column(Integer, ForeignKey(User.id))
request_token = Column(Unicode, ForeignKey(RequestToken.token))
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
- updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
+ get_requesttoken = relationship(RequestToken)
class NonceTimestamp(Base):
@@ -201,25 +513,31 @@ class NonceTimestamp(Base):
nonce = Column(Unicode, nullable=False, primary_key=True)
timestamp = Column(DateTime, nullable=False, primary_key=True)
-
-class MediaEntry(Base, MediaEntryMixin):
+class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
"""
TODO: Consider fetching the media_files using join
"""
__tablename__ = "core__media_entries"
id = Column(Integer, primary_key=True)
- uploader = Column(Integer, ForeignKey(User.id), nullable=False, index=True)
+ public_id = Column(Unicode, unique=True, nullable=True)
+ remote = Column(Boolean, default=False)
+
+ actor = Column(Integer, ForeignKey(User.id), nullable=False, index=True)
title = Column(Unicode, nullable=False)
slug = Column(Unicode)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now,
- index=True)
description = Column(UnicodeText) # ??
media_type = Column(Unicode, nullable=False)
state = Column(Unicode, default=u'unprocessed', nullable=False)
# or use sqlalchemy.types.Enum?
license = Column(Unicode)
file_size = Column(Integer, default=0)
+ location = Column(Integer, ForeignKey("core__locations.id"))
+ get_location = relationship("Location", lazy="joined")
+
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow,
+ index=True)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
fail_error = Column(Unicode)
fail_metadata = Column(JSONEncoded)
@@ -231,10 +549,12 @@ class MediaEntry(Base, MediaEntryMixin):
queued_task_id = Column(Unicode)
__table_args__ = (
- UniqueConstraint('uploader', 'slug'),
+ UniqueConstraint('actor', 'slug'),
{})
- get_uploader = relationship(User)
+ deletion_mode = Base.SOFT_DELETE
+
+ get_actor = relationship(User)
media_files_helper = relationship("MediaFile",
collection_class=attribute_mapped_collection("name"),
@@ -260,26 +580,51 @@ class MediaEntry(Base, MediaEntryMixin):
creator=lambda v: MediaTag(name=v["name"], slug=v["slug"])
)
- collections_helper = relationship("CollectionItem",
- cascade="all, delete-orphan"
- )
- collections = association_proxy("collections_helper", "in_collection")
media_metadata = Column(MutationDict.as_mutable(JSONEncoded),
default=MutationDict())
## TODO
# fail_error
+ @property
+ def get_uploader(self):
+ # for compatibility
+ return self.get_actor
+
+ @property
+ def uploader(self):
+ # for compatibility
+ return self.actor
+
+ @property
+ def collections(self):
+ """ Get any collections that this MediaEntry is in """
+ return list(Collection.query.join(Collection.collection_items).join(
+ CollectionItem.object_helper
+ ).filter(
+ and_(
+ GenericModelReference.model_type == self.__tablename__,
+ GenericModelReference.obj_pk == self.id
+ )
+ ))
+
def get_comments(self, ascending=False):
- order_col = MediaComment.created
- if not ascending:
- order_col = desc(order_col)
- return self.all_comments.order_by(order_col)
+ query = Comment.query.join(Comment.target_helper).filter(and_(
+ GenericModelReference.obj_pk == self.id,
+ GenericModelReference.model_type == self.__tablename__
+ ))
+
+ if ascending:
+ query = query.order_by(Comment.added.asc())
+ else:
+ query = query.order_by(Comment.added.desc())
+
+ return query
def url_to_prev(self, urlgen):
"""get the next 'newer' entry by this user"""
media = MediaEntry.query.filter(
- (MediaEntry.uploader == self.uploader)
+ (MediaEntry.actor == self.actor)
& (MediaEntry.state == u'processed')
& (MediaEntry.id > self.id)).order_by(MediaEntry.id).first()
@@ -289,7 +634,7 @@ class MediaEntry(Base, MediaEntryMixin):
def url_to_next(self, urlgen):
"""get the next 'older' entry by this user"""
media = MediaEntry.query.filter(
- (MediaEntry.uploader == self.uploader)
+ (MediaEntry.actor == self.actor)
& (MediaEntry.state == u'processed')
& (MediaEntry.id < self.id)).order_by(desc(MediaEntry.id)).first()
@@ -302,7 +647,7 @@ class MediaEntry(Base, MediaEntryMixin):
return the value of the key.
"""
media_file = MediaFile.query.filter_by(media_entry=self.id,
- name=unicode(file_key)).first()
+ name=six.text_type(file_key)).first()
if media_file:
if metadata_key:
@@ -315,11 +660,11 @@ class MediaEntry(Base, MediaEntryMixin):
Update the file_metadata of a MediaFile.
"""
media_file = MediaFile.query.filter_by(media_entry=self.id,
- name=unicode(file_key)).first()
+ name=six.text_type(file_key)).first()
file_metadata = media_file.file_metadata or {}
- for key, value in kwargs.iteritems():
+ for key, value in six.iteritems(kwargs):
file_metadata[key] = value
media_file.file_metadata = file_metadata
@@ -344,7 +689,7 @@ class MediaEntry(Base, MediaEntryMixin):
media_data.get_media_entry = self
else:
# Update old media data
- for field, value in kwargs.iteritems():
+ for field, value in six.iteritems(kwargs):
setattr(media_data, field, value)
@memoized_property
@@ -352,13 +697,24 @@ class MediaEntry(Base, MediaEntryMixin):
return import_component(self.media_type + '.models:BACKREF_NAME')
def __repr__(self):
- safe_title = self.title.encode('ascii', 'replace')
+ if six.PY2:
+ # obj.__repr__() should return a str on Python 2
+ safe_title = self.title.encode('utf-8', 'replace')
+ else:
+ safe_title = self.title
return '<{classname} {id}: {title}>'.format(
classname=self.__class__.__name__,
id=self.id,
title=safe_title)
+ def soft_delete(self, *args, **kwargs):
+ # Find all of the media comments for this and delete them
+ for comment in self.get_comments():
+ comment.delete(*args, **kwargs)
+
+ super(MediaEntry, self).soft_delete(*args, **kwargs)
+
def delete(self, del_orphan_tags=True, **kwargs):
"""Delete MediaEntry and all related files/attachments/comments
@@ -373,10 +729,10 @@ class MediaEntry(Base, MediaEntryMixin):
# Delete all related files/attachments
try:
delete_media_files(self)
- except OSError, error:
+ except OSError as error:
# Returns list of files we failed to delete
_log.error('No such files from the user "{1}" to delete: '
- '{0}'.format(str(error), self.get_uploader))
+ '{0}'.format(str(error), self.get_actor))
_log.info('Deleted Media entry id "{0}"'.format(self.id))
# Related MediaTag's are automatically cleaned, but we might
# want to clean out unused Tag's too.
@@ -388,6 +744,96 @@ class MediaEntry(Base, MediaEntryMixin):
# pass through commit=False/True in kwargs
super(MediaEntry, self).delete(**kwargs)
+ def serialize(self, request, show_comments=True):
+ """ Unserialize MediaEntry to object """
+ author = self.get_actor
+ published = UTC.localize(self.created)
+ updated = UTC.localize(self.updated)
+ public_id = self.get_public_id(request.urlgen)
+ context = {
+ "id": public_id,
+ "author": author.serialize(request),
+ "objectType": self.object_type,
+ "url": self.url_for_self(request.urlgen, qualified=True),
+ "image": {
+ "url": urljoin(request.host_url, self.thumb_url),
+ },
+ "fullImage":{
+ "url": urljoin(request.host_url, self.original_url),
+ },
+ "published": published.isoformat(),
+ "updated": updated.isoformat(),
+ "pump_io": {
+ "shared": False,
+ },
+ "links": {
+ "self": {
+ "href": public_id,
+ },
+
+ }
+ }
+
+ if self.title:
+ context["displayName"] = self.title
+
+ if self.description:
+ context["content"] = self.description
+
+ if self.license:
+ context["license"] = self.license
+
+ if self.location:
+ context["location"] = self.get_location.serialize(request)
+
+ if show_comments:
+ comments = [
+ l.comment().serialize(request) for l in self.get_comments()]
+ total = len(comments)
+ context["replies"] = {
+ "totalItems": total,
+ "items": comments,
+ "url": request.urlgen(
+ "mediagoblin.api.object.comments",
+ object_type=self.object_type,
+ id=self.id,
+ qualified=True
+ ),
+ }
+
+ # Add image height and width if possible. We didn't use to store this
+ # data and we're not able (and maybe not willing) to re-process all
+ # images so it's possible this might not exist.
+ if self.get_file_metadata("thumb", "height"):
+ height = self.get_file_metadata("thumb", "height")
+ context["image"]["height"] = height
+ if self.get_file_metadata("thumb", "width"):
+ width = self.get_file_metadata("thumb", "width")
+ context["image"]["width"] = width
+ if self.get_file_metadata("original", "height"):
+ height = self.get_file_metadata("original", "height")
+ context["fullImage"]["height"] = height
+ if self.get_file_metadata("original", "height"):
+ width = self.get_file_metadata("original", "width")
+ context["fullImage"]["width"] = width
+
+ return context
+
+ def unserialize(self, data):
+ """ Takes API objects and unserializes on existing MediaEntry """
+ if "displayName" in data:
+ self.title = data["displayName"]
+
+ if "content" in data:
+ self.description = data["content"]
+
+ if "license" in data:
+ self.license = data["license"]
+
+ if "location" in data:
+ License.create(data["location"], self)
+
+ return True
class FileKeynames(Base):
"""
@@ -445,7 +891,7 @@ class MediaAttachmentFile(Base):
nullable=False)
name = Column(Unicode, nullable=False)
filepath = Column(PathTupleWithSlashes)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
@property
def dict_view(self):
@@ -479,7 +925,7 @@ class MediaTag(Base):
nullable=False, index=True)
tag = Column(Integer, ForeignKey(Tag.id), nullable=False, index=True)
name = Column(Unicode)
- # created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ # created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
__table_args__ = (
UniqueConstraint('tag', 'media_entry'),
@@ -502,65 +948,218 @@ class MediaTag(Base):
"""A dict like view on this object"""
return DictReadAttrProxy(self)
+class Comment(Base):
+ """
+ Link table between a response and another object that can have replies.
-class MediaComment(Base, MediaCommentMixin):
+ This acts as a link table between an object and the comments on it, it's
+ done like this so that you can look up all the comments without knowing
+ whhich comments are on an object before hand. Any object can be a comment
+ and more or less any object can accept comments too.
+
+ Important: This is NOT the old MediaComment table.
+ """
+ __tablename__ = "core__comment_links"
+
+ id = Column(Integer, primary_key=True)
+
+ # The GMR to the object the comment is on.
+ target_id = Column(
+ Integer,
+ ForeignKey(GenericModelReference.id),
+ nullable=False
+ )
+ target_helper = relationship(
+ GenericModelReference,
+ foreign_keys=[target_id]
+ )
+ target = association_proxy("target_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
+
+ # The comment object
+ comment_id = Column(
+ Integer,
+ ForeignKey(GenericModelReference.id),
+ nullable=False
+ )
+ comment_helper = relationship(
+ GenericModelReference,
+ foreign_keys=[comment_id]
+ )
+ comment = association_proxy("comment_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
+
+ # When it was added
+ added = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
+ @property
+ def get_author(self):
+ # for compatibility
+ return self.comment().get_actor # noqa
+
+ def __getattr__(self, attr):
+ if attr.startswith('_'):
+ # if attr starts with '_', then it's probably some internal
+ # sqlalchemy variable. Since __getattr__ is called when
+ # non-existing attributes are being accessed, we should not try to
+ # fetch it from self.comment()
+ raise AttributeError
+ try:
+ _log.debug('Old attr is being accessed: {0}'.format(attr))
+ return getattr(self.comment(), attr) # noqa
+ except Exception as e:
+ _log.error(e)
+ raise
+
+class TextComment(Base, TextCommentMixin, CommentingMixin):
+ """
+ A basic text comment, this is a usually short amount of text and nothing else
+ """
+ # This is a legacy from when Comments where just on MediaEntry objects.
__tablename__ = "core__media_comments"
id = Column(Integer, primary_key=True)
- media_entry = Column(
- Integer, ForeignKey(MediaEntry.id), nullable=False, index=True)
- author = Column(Integer, ForeignKey(User.id), nullable=False)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ public_id = Column(Unicode, unique=True)
+ actor = Column(Integer, ForeignKey(User.id), nullable=False)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
content = Column(UnicodeText, nullable=False)
+ location = Column(Integer, ForeignKey("core__locations.id"))
+ get_location = relationship("Location", lazy="joined")
# Cascade: Comments are owned by their creator. So do the full thing.
# lazy=dynamic: People might post a *lot* of comments,
# so make the "posted_comments" a query-like thing.
- get_author = relationship(User,
+ get_actor = relationship(User,
backref=backref("posted_comments",
lazy="dynamic",
cascade="all, delete-orphan"))
- get_entry = relationship(MediaEntry,
- backref=backref("comments",
- lazy="dynamic",
- cascade="all, delete-orphan"))
-
- # Cascade: Comments are somewhat owned by their MediaEntry.
- # So do the full thing.
- # lazy=dynamic: MediaEntries might have many comments,
- # so make the "all_comments" a query-like thing.
- get_media_entry = relationship(MediaEntry,
- backref=backref("all_comments",
- lazy="dynamic",
- cascade="all, delete-orphan"))
-
-
-class Collection(Base, CollectionMixin):
- """An 'album' or 'set' of media by a user.
+ deletion_mode = Base.SOFT_DELETE
+
+ def serialize(self, request):
+ """ Unserialize to python dictionary for API """
+ target = self.get_reply_to()
+ # If this is target just.. give them nothing?
+ if target is None:
+ target = {}
+ else:
+ target = target.serialize(request, show_comments=False)
+
+
+ author = self.get_actor
+ published = UTC.localize(self.created)
+ context = {
+ "id": self.get_public_id(request.urlgen),
+ "objectType": self.object_type,
+ "content": self.content,
+ "inReplyTo": target,
+ "author": author.serialize(request),
+ "published": published.isoformat(),
+ "updated": published.isoformat(),
+ }
+
+ if self.location:
+ context["location"] = self.get_location.seralize(request)
+
+ return context
+
+ def unserialize(self, data, request):
+ """ Takes API objects and unserializes on existing comment """
+ if "content" in data:
+ self.content = data["content"]
+
+ if "location" in data:
+ Location.create(data["location"], self)
+
+
+ # Handle changing the reply ID
+ if "inReplyTo" in data:
+ # Validate that the ID is correct
+ try:
+ id = extract_url_arguments(
+ url=data["inReplyTo"]["id"],
+ urlmap=request.app.url_map
+ )["id"]
+ except ValueError:
+ raise False
+
+ public_id = request.urlgen(
+ "mediagoblin.api.object",
+ id=id,
+ object_type=data["inReplyTo"]["objectType"],
+ qualified=True
+ )
+
+ media = MediaEntry.query.filter_by(public_id=public_id).first()
+ if media is None:
+ return False
+
+ # We need an ID for this model.
+ self.save(commit=False)
+
+ # Create the link
+ link = Comment()
+ link.target = media
+ link.comment = self
+ link.save()
+
+ return True
+
+class Collection(Base, CollectionMixin, CommentingMixin):
+ """A representation of a collection of objects.
+
+ This holds a group/collection of objects that could be a user defined album
+ or their inbox, outbox, followers, etc. These are always ordered and accessable
+ via the API and web.
+
+ The collection has a number of types which determine what kind of collection
+ it is, for example the users inbox will be of `Collection.INBOX_TYPE` that will
+ be stored on the `Collection.type` field. It's important to set the correct type.
On deletion, contained CollectionItems get automatically reaped via
SQL cascade"""
__tablename__ = "core__collections"
id = Column(Integer, primary_key=True)
+ public_id = Column(Unicode, unique=True)
title = Column(Unicode, nullable=False)
slug = Column(Unicode)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now,
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow,
index=True)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
description = Column(UnicodeText)
- creator = Column(Integer, ForeignKey(User.id), nullable=False)
- # TODO: No of items in Collection. Badly named, can we migrate to num_items?
- items = Column(Integer, default=0)
+ actor = Column(Integer, ForeignKey(User.id), nullable=False)
+ num_items = Column(Integer, default=0)
+
+ # There are lots of different special types of collections in the pump.io API
+ # for example: followers, following, inbox, outbox, etc. See type constants
+ # below the fields on this model.
+ type = Column(Unicode, nullable=False)
+
+ # Location
+ location = Column(Integer, ForeignKey("core__locations.id"))
+ get_location = relationship("Location", lazy="joined")
# Cascade: Collections are owned by their creator. So do the full thing.
- get_creator = relationship(User,
+ get_actor = relationship(User,
backref=backref("collections",
cascade="all, delete-orphan"))
-
__table_args__ = (
- UniqueConstraint('creator', 'slug'),
+ UniqueConstraint("actor", "slug"),
{})
+ deletion_mode = Base.SOFT_DELETE
+
+ # These are the types, It's strongly suggested if new ones are invented they
+ # are prefixed to ensure they're unique from other types. Any types used in
+ # the main mediagoblin should be prefixed "core-"
+ INBOX_TYPE = "core-inbox"
+ OUTBOX_TYPE = "core-outbox"
+ FOLLOWER_TYPE = "core-followers"
+ FOLLOWING_TYPE = "core-following"
+ COMMENT_TYPE = "core-comments"
+ USER_DEFINED_TYPE = "core-user-defined"
+
def get_collection_items(self, ascending=False):
#TODO, is this still needed with self.collection_items being available?
order_col = CollectionItem.position
@@ -569,28 +1168,58 @@ class Collection(Base, CollectionMixin):
return CollectionItem.query.filter_by(
collection=self.id).order_by(order_col)
+ def __repr__(self):
+ safe_title = self.title.encode('ascii', 'replace')
+ return '<{classname} #{id}: {title} by {actor}>'.format(
+ id=self.id,
+ classname=self.__class__.__name__,
+ actor=self.actor,
+ title=safe_title)
+
+ def serialize(self, request):
+ # Get all serialized output in a list
+ items = [i.serialize(request) for i in self.get_collection_items()]
+ return {
+ "totalItems": self.num_items,
+ "url": self.url_for_self(request.urlgen, qualified=True),
+ "items": items,
+ }
+
class CollectionItem(Base, CollectionItemMixin):
__tablename__ = "core__collection_items"
id = Column(Integer, primary_key=True)
- media_entry = Column(
- Integer, ForeignKey(MediaEntry.id), nullable=False, index=True)
+
collection = Column(Integer, ForeignKey(Collection.id), nullable=False)
note = Column(UnicodeText, nullable=True)
- added = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ added = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
position = Column(Integer)
-
# Cascade: CollectionItems are owned by their Collection. So do the full thing.
in_collection = relationship(Collection,
backref=backref(
"collection_items",
cascade="all, delete-orphan"))
- get_media_entry = relationship(MediaEntry)
+ # Link to the object (could be anything.
+ object_id = Column(
+ Integer,
+ ForeignKey(GenericModelReference.id),
+ nullable=False,
+ index=True
+ )
+ object_helper = relationship(
+ GenericModelReference,
+ foreign_keys=[object_id]
+ )
+ get_object = association_proxy(
+ "object_helper",
+ "get_object",
+ creator=GenericModelReference.find_or_new
+ )
__table_args__ = (
- UniqueConstraint('collection', 'media_entry'),
+ UniqueConstraint('collection', 'object_id'),
{})
@property
@@ -598,6 +1227,17 @@ class CollectionItem(Base, CollectionItemMixin):
"""A dict like view on this object"""
return DictReadAttrProxy(self)
+ def __repr__(self):
+ return '<{classname} #{id}: Object {obj} in {collection}>'.format(
+ id=self.id,
+ classname=self.__class__.__name__,
+ collection=self.collection,
+ obj=self.get_object()
+ )
+
+ def serialize(self, request):
+ return self.get_object().serialize(request)
+
class ProcessingMetaData(Base):
__tablename__ = 'core__processing_metadata'
@@ -620,7 +1260,7 @@ class CommentSubscription(Base):
__tablename__ = 'core__comment_subscriptions'
id = Column(Integer, primary_key=True)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False)
media_entry = relationship(MediaEntry,
@@ -649,10 +1289,13 @@ class CommentSubscription(Base):
class Notification(Base):
__tablename__ = 'core__notifications'
id = Column(Integer, primary_key=True)
- type = Column(Unicode)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ object_id = Column(Integer, ForeignKey(GenericModelReference.id))
+ object_helper = relationship(GenericModelReference)
+ obj = association_proxy("object_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
user_id = Column(Integer, ForeignKey('core__users.id'), nullable=False,
index=True)
seen = Column(Boolean, default=lambda: False, index=True)
@@ -660,11 +1303,6 @@ class Notification(Base):
User,
backref=backref('notifications', cascade='all, delete-orphan'))
- __mapper_args__ = {
- 'polymorphic_identity': 'notification',
- 'polymorphic_on': type
- }
-
def __repr__(self):
return '<{klass} #{id}: {user}: {subject} ({seen})>'.format(
id=self.id,
@@ -673,43 +1311,17 @@ class Notification(Base):
subject=getattr(self, 'subject', None),
seen='unseen' if not self.seen else 'seen')
+ def __unicode__(self):
+ return u'<{klass} #{id}: {user}: {subject} ({seen})>'.format(
+ id=self.id,
+ klass=self.__class__.__name__,
+ user=self.user,
+ subject=getattr(self, 'subject', None),
+ seen='unseen' if not self.seen else 'seen')
-class CommentNotification(Notification):
- __tablename__ = 'core__comment_notifications'
- id = Column(Integer, ForeignKey(Notification.id), primary_key=True)
-
- subject_id = Column(Integer, ForeignKey(MediaComment.id))
- subject = relationship(
- MediaComment,
- backref=backref('comment_notifications', cascade='all, delete-orphan'))
-
- __mapper_args__ = {
- 'polymorphic_identity': 'comment_notification'
- }
-
-
-class ProcessingNotification(Notification):
- __tablename__ = 'core__processing_notifications'
-
- id = Column(Integer, ForeignKey(Notification.id), primary_key=True)
-
- subject_id = Column(Integer, ForeignKey(MediaEntry.id))
- subject = relationship(
- MediaEntry,
- backref=backref('processing_notifications',
- cascade='all, delete-orphan'))
-
- __mapper_args__ = {
- 'polymorphic_identity': 'processing_notification'
- }
-
-with_polymorphic(
- Notification,
- [ProcessingNotification, CommentNotification])
-
-class ReportBase(Base):
+class Report(Base):
"""
- This is the basic report object which the other reports are based off of.
+ Represents a report that someone might file against Media, Comments, etc.
:keyword reporter_id Holds the id of the user who created
the report, as an Integer column.
@@ -722,8 +1334,6 @@ class ReportBase(Base):
an Integer column.
:keyword created Holds a datetime column of when the re-
-port was filed.
- :keyword discriminator This column distinguishes between the
- different types of reports.
:keyword resolver_id Holds the id of the moderator/admin who
resolved the report.
:keyword resolved Holds the DateTime object which descri-
@@ -732,8 +1342,11 @@ class ReportBase(Base):
resolver's reasons for resolving
the report this way. Some of this
is auto-generated
+ :keyword object_id Holds the ID of the GenericModelReference
+ which points to the reported object.
"""
__tablename__ = 'core__reports'
+
id = Column(Integer, primary_key=True)
reporter_id = Column(Integer, ForeignKey(User.id), nullable=False)
reporter = relationship(
@@ -741,7 +1354,7 @@ class ReportBase(Base):
backref=backref("reports_filed_by",
lazy="dynamic",
cascade="all, delete-orphan"),
- primaryjoin="User.id==ReportBase.reporter_id")
+ primaryjoin="User.id==Report.reporter_id")
report_content = Column(UnicodeText)
reported_user_id = Column(Integer, ForeignKey(User.id), nullable=False)
reported_user = relationship(
@@ -749,70 +1362,42 @@ class ReportBase(Base):
backref=backref("reports_filed_on",
lazy="dynamic",
cascade="all, delete-orphan"),
- primaryjoin="User.id==ReportBase.reported_user_id")
- created = Column(DateTime, nullable=False, default=datetime.datetime.now())
- discriminator = Column('type', Unicode(50))
+ primaryjoin="User.id==Report.reported_user_id")
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
resolver_id = Column(Integer, ForeignKey(User.id))
resolver = relationship(
User,
backref=backref("reports_resolved_by",
lazy="dynamic",
cascade="all, delete-orphan"),
- primaryjoin="User.id==ReportBase.resolver_id")
+ primaryjoin="User.id==Report.resolver_id")
resolved = Column(DateTime)
result = Column(UnicodeText)
- __mapper_args__ = {'polymorphic_on': discriminator}
- def is_comment_report(self):
- return self.discriminator=='comment_report'
-
- def is_media_entry_report(self):
- return self.discriminator=='media_report'
+ object_id = Column(Integer, ForeignKey(GenericModelReference.id), nullable=True)
+ object_helper = relationship(GenericModelReference)
+ obj = association_proxy("object_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
def is_archived_report(self):
return self.resolved is not None
+ def is_comment_report(self):
+ if self.object_id is None:
+ return False
+ return isinstance(self.obj(), TextComment)
+
+ def is_media_entry_report(self):
+ if self.object_id is None:
+ return False
+ return isinstance(self.obj(), MediaEntry)
+
def archive(self,resolver_id, resolved, result):
self.resolver_id = resolver_id
self.resolved = resolved
self.result = result
-
-class CommentReport(ReportBase):
- """
- Reports that have been filed on comments.
- :keyword comment_id Holds the integer value of the reported
- comment's ID
- """
- __tablename__ = 'core__reports_on_comments'
- __mapper_args__ = {'polymorphic_identity': 'comment_report'}
-
- id = Column('id',Integer, ForeignKey('core__reports.id'),
- primary_key=True)
- comment_id = Column(Integer, ForeignKey(MediaComment.id), nullable=True)
- comment = relationship(
- MediaComment, backref=backref("reports_filed_on",
- lazy="dynamic"))
-
-
-class MediaReport(ReportBase):
- """
- Reports that have been filed on media entries
- :keyword media_entry_id Holds the integer value of the reported
- media entry's ID
- """
- __tablename__ = 'core__reports_on_media'
- __mapper_args__ = {'polymorphic_identity': 'media_report'}
-
- id = Column('id',Integer, ForeignKey('core__reports.id'),
- primary_key=True)
- media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=True)
- media_entry = relationship(
- MediaEntry,
- backref=backref("reports_filed_on",
- lazy="dynamic"))
-
class UserBan(Base):
"""
Holds the information on a specific user's ban-state. As long as one of
@@ -888,13 +1473,146 @@ class PrivilegeUserAssociation(Base):
ForeignKey(Privilege.id),
primary_key=True)
+class Generator(Base):
+ """ Information about what created an activity """
+ __tablename__ = "core__generators"
+
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode, nullable=False)
+ published = Column(DateTime, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, default=datetime.datetime.utcnow)
+ object_type = Column(Unicode, nullable=False)
+
+ deletion_mode = Base.SOFT_DELETE
+
+ def __repr__(self):
+ return "<{klass} {name}>".format(
+ klass=self.__class__.__name__,
+ name=self.name
+ )
+
+ def serialize(self, request):
+ href = request.urlgen(
+ "mediagoblin.api.object",
+ object_type=self.object_type,
+ id=self.id,
+ qualified=True
+ )
+ published = UTC.localize(self.published)
+ updated = UTC.localize(self.updated)
+ return {
+ "id": href,
+ "displayName": self.name,
+ "published": published.isoformat(),
+ "updated": updated.isoformat(),
+ "objectType": self.object_type,
+ }
+
+ def unserialize(self, data):
+ if "displayName" in data:
+ self.name = data["displayName"]
+
+class Activity(Base, ActivityMixin):
+ """
+ This holds all the metadata about an activity such as uploading an image,
+ posting a comment, etc.
+ """
+ __tablename__ = "core__activities"
+
+ id = Column(Integer, primary_key=True)
+ public_id = Column(Unicode, unique=True)
+ actor = Column(Integer,
+ ForeignKey("core__users.id"),
+ nullable=False)
+ published = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
+ verb = Column(Unicode, nullable=False)
+ content = Column(Unicode, nullable=True)
+ title = Column(Unicode, nullable=True)
+ generator = Column(Integer,
+ ForeignKey("core__generators.id"),
+ nullable=True)
+
+ # Create the generic foreign keys for the object
+ object_id = Column(Integer, ForeignKey(GenericModelReference.id), nullable=False)
+ object_helper = relationship(GenericModelReference, foreign_keys=[object_id])
+ object = association_proxy("object_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
+
+ # Create the generic foreign Key for the target
+ target_id = Column(Integer, ForeignKey(GenericModelReference.id), nullable=True)
+ target_helper = relationship(GenericModelReference, foreign_keys=[target_id])
+ target = association_proxy("target_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
+
+ get_actor = relationship(User,
+ backref=backref("activities",
+ cascade="all, delete-orphan"))
+ get_generator = relationship(Generator)
+
+ deletion_mode = Base.SOFT_DELETE
+
+ def __repr__(self):
+ if self.content is None:
+ return "<{klass} verb:{verb}>".format(
+ klass=self.__class__.__name__,
+ verb=self.verb
+ )
+ else:
+ return "<{klass} {content}>".format(
+ klass=self.__class__.__name__,
+ content=self.content
+ )
+
+ def save(self, set_updated=True, *args, **kwargs):
+ if set_updated:
+ self.updated = datetime.datetime.now()
+ super(Activity, self).save(*args, **kwargs)
+
+class Graveyard(Base):
+ """ Where models come to die """
+ __tablename__ = "core__graveyard"
+
+ id = Column(Integer, primary_key=True)
+ public_id = Column(Unicode, nullable=True, unique=True)
+
+ deleted = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ object_type = Column(Unicode, nullable=False)
+
+ # This could either be a deleted actor or a real actor, this must be
+ # nullable as it we shouldn't have it set for deleted actor
+ actor_id = Column(Integer, ForeignKey(GenericModelReference.id))
+ actor_helper = relationship(GenericModelReference)
+ actor = association_proxy("actor_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
+
+ def __repr__(self):
+ return "<{klass} deleted {obj_type}>".format(
+ klass=type(self).__name__,
+ obj_type=self.object_type
+ )
+
+ def serialize(self, request):
+ deleted = UTC.localize(self.deleted).isoformat()
+ context = {
+ "id": self.public_id,
+ "objectType": self.object_type,
+ "published": deleted,
+ "updated": deleted,
+ "deleted": deleted,
+ }
+
+ if self.actor_id is not None:
+ context["actor"] = self.actor().serialize(request)
+
+ return context
MODELS = [
- User, MediaEntry, Tag, MediaTag, MediaComment, Collection, CollectionItem,
- MediaFile, FileKeynames, MediaAttachmentFile, ProcessingMetaData,
- Notification, CommentNotification, ProcessingNotification, Client,
- CommentSubscription, ReportBase, CommentReport, MediaReport, UserBan,
- Privilege, PrivilegeUserAssociation,
- RequestToken, AccessToken, NonceTimestamp]
+ LocalUser, RemoteUser, User, MediaEntry, Tag, MediaTag, Comment, TextComment,
+ Collection, CollectionItem, MediaFile, FileKeynames, MediaAttachmentFile,
+ ProcessingMetaData, Notification, Client, CommentSubscription, Report,
+ UserBan, Privilege, PrivilegeUserAssociation, RequestToken, AccessToken,
+ NonceTimestamp, Activity, Generator, Location, GenericModelReference, Graveyard]
"""
Foundations are the default rows that are created immediately after the tables
@@ -945,7 +1663,7 @@ def show_table_init(engine_uri):
if __name__ == '__main__':
from sys import argv
- print repr(argv)
+ print(repr(argv))
if len(argv) == 2:
uri = argv[1]
else:
diff --git a/mediagoblin/db/open.py b/mediagoblin/db/open.py
index 4ff0945f..8f81c8d9 100644
--- a/mediagoblin/db/open.py
+++ b/mediagoblin/db/open.py
@@ -15,38 +15,117 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from sqlalchemy import create_engine, event
+from contextlib import contextmanager
import logging
-from mediagoblin.db.base import Base, Session
+import six
+from sqlalchemy import create_engine, event
+
from mediagoblin import mg_globals
+from mediagoblin.db.base import Base
_log = logging.getLogger(__name__)
+from mediagoblin.tools.transition import DISABLE_GLOBALS
+
+def set_models_as_attributes(obj):
+ """
+ Set all models as attributes on this object, for convenience
+
+ TODO: This should eventually be deprecated.
+ """
+ for k, v in six.iteritems(Base._decl_class_registry):
+ setattr(obj, k, v)
+
+
+if not DISABLE_GLOBALS:
+ from mediagoblin.db.base import Session
+
+ class DatabaseMaster(object):
+ def __init__(self, engine):
+ self.engine = engine
+
+ set_models_as_attributes(self)
+
+ def commit(self):
+ Session.commit()
-class DatabaseMaster(object):
- def __init__(self, engine):
- self.engine = engine
+ def save(self, obj):
+ Session.add(obj)
+ Session.flush()
- for k, v in Base._decl_class_registry.iteritems():
- setattr(self, k, v)
+ def check_session_clean(self):
+ for dummy in Session():
+ _log.warn("STRANGE: There are elements in the sql session. "
+ "Please report this and help us track this down.")
+ break
- def commit(self):
- Session.commit()
+ def reset_after_request(self):
+ Session.rollback()
+ Session.remove()
- def save(self, obj):
- Session.add(obj)
- Session.flush()
+ @property
+ def query(self):
+ return Session.query
- def check_session_clean(self):
- for dummy in Session():
- _log.warn("STRANGE: There are elements in the sql session. "
- "Please report this and help us track this down.")
- break
+else:
+ from sqlalchemy.orm import sessionmaker
+
+ class DatabaseManager(object):
+ """
+ Manage database connections.
+
+ The main method here is session_scope which can be used with a
+ "with" statement to get a session that is properly torn down
+ by the end of execution.
+ """
+ def __init__(self, engine):
+ self.engine = engine
+ self.Session = sessionmaker(bind=engine)
+ set_models_as_attributes(self)
+
+ @contextmanager
+ def session_scope(self):
+ """
+ This is a context manager, use like::
+
+ with dbmanager.session_scope() as request.db:
+ some_view(request)
+ """
+ session = self.Session()
+
+ #####################################
+ # Functions to emulate DatabaseMaster
+ #####################################
+ def save(obj):
+ session.add(obj)
+ session.flush()
+
+ def check_session_clean():
+ # Is this implemented right?
+ for dummy in session:
+ _log.warn("STRANGE: There are elements in the sql session. "
+ "Please report this and help us track this down.")
+ break
+
+ def reset_after_request():
+ session.rollback()
+ session.remove()
+
+ # now attach
+ session.save = save
+ session.check_session_clean = check_session_clean
+ session.reset_after_request = reset_after_request
+
+ set_models_as_attributes(session)
+ #####################################
+
+ try:
+ yield session
+ finally:
+ session.rollback()
+ session.close()
- def reset_after_request(self):
- Session.rollback()
- Session.remove()
def load_models(app_config):
@@ -75,9 +154,14 @@ def _sqlite_disable_fk_pragma_on_connect(dbapi_con, con_record):
dbapi_con.execute('pragma foreign_keys=off')
-def setup_connection_and_db_from_config(app_config, migrations=False):
+def setup_connection_and_db_from_config(app_config, migrations=False, app=None):
engine = create_engine(app_config['sql_engine'])
+ # @@: Maybe make a weak-ref so an engine can get garbage
+ # collected? Not that we expect to make a lot of MediaGoblinApp
+ # instances in a single process...
+ engine.app = app
+
# Enable foreign key checking for sqlite
if app_config['sql_engine'].startswith('sqlite://'):
if migrations:
@@ -88,9 +172,13 @@ def setup_connection_and_db_from_config(app_config, migrations=False):
# logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
- Session.configure(bind=engine)
+ if DISABLE_GLOBALS:
+ return DatabaseManager(engine)
+
+ else:
+ Session.configure(bind=engine)
- return DatabaseMaster(engine)
+ return DatabaseMaster(engine)
def check_db_migrations_current(db):
diff --git a/mediagoblin/db/util.py b/mediagoblin/db/util.py
index aba9c59c..57e6b942 100644
--- a/mediagoblin/db/util.py
+++ b/mediagoblin/db/util.py
@@ -17,10 +17,14 @@
import sys
from mediagoblin import mg_globals as mgg
-from mediagoblin.db.base import Session
from mediagoblin.db.models import MediaEntry, Tag, MediaTag, Collection
from mediagoblin.gmg_commands.dbupdate import gather_database_data
+from mediagoblin.tools.transition import DISABLE_GLOBALS
+
+if not DISABLE_GLOBALS:
+ from mediagoblin.db.base import Session
+
##########################
# Random utility functions
##########################
@@ -33,7 +37,7 @@ def atomic_update(table, query_dict, update_values):
def check_media_slug_used(uploader_id, slug, ignore_m_id):
- query = MediaEntry.query.filter_by(uploader=uploader_id, slug=slug)
+ query = MediaEntry.query.filter_by(actor=uploader_id, slug=slug)
if ignore_m_id is not None:
query = query.filter(MediaEntry.id != ignore_m_id)
does_exist = query.first() is not None
@@ -63,7 +67,7 @@ def clean_orphan_tags(commit=True):
def check_collection_slug_used(creator_id, slug, ignore_c_id):
- filt = (Collection.creator == creator_id) \
+ filt = (Collection.actor == creator_id) \
& (Collection.slug == slug)
if ignore_c_id is not None:
filt = filt & (Collection.id != ignore_c_id)
@@ -76,11 +80,16 @@ def check_db_up_to_date():
dbdatas = gather_database_data(mgg.global_config.get('plugins', {}).keys())
for dbdata in dbdatas:
- migration_manager = dbdata.make_migration_manager(Session())
- if migration_manager.database_current_migration is None or \
- migration_manager.migrations_to_run():
- sys.exit("Your database is not up to date. Please run "
- "'gmg dbupdate' before starting MediaGoblin.")
+ session = Session()
+ try:
+ migration_manager = dbdata.make_migration_manager(session)
+ if migration_manager.database_current_migration is None or \
+ migration_manager.migrations_to_run():
+ sys.exit("Your database is not up to date. Please run "
+ "'gmg dbupdate' before starting MediaGoblin.")
+ finally:
+ Session.rollback()
+ Session.remove()
if __name__ == '__main__':