aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/db
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/db')
-rw-r--r--mediagoblin/db/__init__.py1
-rw-r--r--mediagoblin/db/base.py84
-rw-r--r--mediagoblin/db/migrations.py899
-rw-r--r--mediagoblin/db/mixin.py138
-rw-r--r--mediagoblin/db/models.py876
-rw-r--r--mediagoblin/db/util.py4
6 files changed, 1618 insertions, 384 deletions
diff --git a/mediagoblin/db/__init__.py b/mediagoblin/db/__init__.py
index 719b56e7..621845ba 100644
--- a/mediagoblin/db/__init__.py
+++ b/mediagoblin/db/__init__.py
@@ -13,4 +13,3 @@
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
diff --git a/mediagoblin/db/base.py b/mediagoblin/db/base.py
index 6acb0b79..11afbcec 100644
--- a/mediagoblin/db/base.py
+++ b/mediagoblin/db/base.py
@@ -13,7 +13,8 @@
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
+import six
+import copy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import inspect
@@ -24,8 +25,38 @@ if not DISABLE_GLOBALS:
from sqlalchemy.orm import scoped_session, sessionmaker
Session = scoped_session(sessionmaker())
+class FakeCursor(object):
+
+ def __init__ (self, cursor, mapper, filter=None):
+ self.cursor = cursor
+ self.mapper = mapper
+ self.filter = filter
+
+ def count(self):
+ return self.cursor.count()
+
+ def __copy__(self):
+ # Or whatever the function is named to make
+ # copy.copy happy?
+ return FakeCursor(copy.copy(self.cursor), self.mapper, self.filter)
+
+ def __iter__(self):
+ return six.moves.filter(self.filter, six.moves.map(self.mapper, self.cursor))
+
+ def __getitem__(self, key):
+ return self.mapper(self.cursor[key])
+
+ def slice(self, *args, **kwargs):
+ r = self.cursor.slice(*args, **kwargs)
+ return list(six.moves.filter(self.filter, six.moves.map(self.mapper, r)))
class GMGTableBase(object):
+ # Deletion types
+ HARD_DELETE = "hard-deletion"
+ SOFT_DELETE = "soft-deletion"
+
+ deletion_mode = HARD_DELETE
+
@property
def _session(self):
return inspect(self).session
@@ -55,7 +86,56 @@ class GMGTableBase(object):
else:
sess.flush()
- def delete(self, commit=True):
+ def delete(self, commit=True, deletion=None):
+ """ Delete the object either using soft or hard deletion """
+ # Get the setting in the model args if none has been specified.
+ if deletion is None:
+ deletion = self.deletion_mode
+
+ # Hand off to the correct deletion function.
+ if deletion == self.HARD_DELETE:
+ return self.hard_delete(commit=commit)
+ elif deletion == self.SOFT_DELETE:
+ return self.soft_delete(commit=commit)
+ else:
+ raise ValueError(
+ "Invalid deletion mode {mode!r}".format(
+ mode=deletion
+ )
+ )
+
+ def soft_delete(self, commit):
+ # Create the graveyard version of this model
+ # Importing this here due to cyclic imports
+ from mediagoblin.db.models import User, Graveyard, GenericModelReference
+ tombstone = Graveyard()
+ if getattr(self, "public_id", None) is not None:
+ tombstone.public_id = self.public_id
+
+ # This is a special case, we don't want to save any actor if the thing
+ # being soft deleted is a User model as this would create circular
+ # ForeignKeys
+ if not isinstance(self, User):
+ tombstone.actor = User.query.filter_by(
+ id=self.actor
+ ).first()
+ tombstone.object_type = self.object_type
+ tombstone.save(commit=False)
+
+ # There will be a lot of places where the GenericForeignKey will point
+ # to the model, we want to remap those to our tombstone.
+ gmrs = GenericModelReference.query.filter_by(
+ obj_pk=self.id,
+ model_type=self.__tablename__
+ ).update({
+ "obj_pk": tombstone.id,
+ "model_type": tombstone.__tablename__,
+ })
+
+ # Now we can go ahead and actually delete the model.
+ return self.hard_delete(commit=commit)
+
+ def hard_delete(self, commit):
"""Delete the object and commit the change immediately by default"""
sess = self._session
assert sess is not None, "Not going to delete detached %r" % self
diff --git a/mediagoblin/db/migrations.py b/mediagoblin/db/migrations.py
index 74c1194f..461b9c0a 100644
--- a/mediagoblin/db/migrations.py
+++ b/mediagoblin/db/migrations.py
@@ -32,11 +32,14 @@ from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import and_
from sqlalchemy.schema import UniqueConstraint
+from mediagoblin import oauth
+from mediagoblin.tools import crypto
from mediagoblin.db.extratypes import JSONEncoded, MutationDict
from mediagoblin.db.migration_tools import (
RegisterMigration, inspect_table, replace_table_hack)
-from mediagoblin.db.models import (MediaEntry, Collection, MediaComment, User,
- Privilege, Generator)
+from mediagoblin.db.models import (MediaEntry, Collection, Comment, User,
+ Privilege, Generator, LocalUser, Location,
+ Client, RequestToken, AccessToken)
from mediagoblin.db.extratypes import JSONEncoded, MutationDict
@@ -350,7 +353,7 @@ class CommentNotification_v0(Notification_v0):
__tablename__ = 'core__comment_notifications'
id = Column(Integer, ForeignKey(Notification_v0.id), primary_key=True)
- subject_id = Column(Integer, ForeignKey(MediaComment.id))
+ subject_id = Column(Integer, ForeignKey(Comment.id))
class ProcessingNotification_v0(Notification_v0):
@@ -539,7 +542,7 @@ class CommentReport_v0(ReportBase_v0):
id = Column('id',Integer, ForeignKey('core__reports.id'),
primary_key=True)
- comment_id = Column(Integer, ForeignKey(MediaComment.id), nullable=True)
+ comment_id = Column(Integer, ForeignKey(Comment.id), nullable=True)
class MediaReport_v0(ReportBase_v0):
@@ -910,6 +913,14 @@ class ActivityIntermediator_R0(declarative_base()):
id = Column(Integer, primary_key=True)
type = Column(Unicode, nullable=False)
+ # These are needed for migration 29
+ TYPES = {
+ "user": User,
+ "media": MediaEntry,
+ "comment": Comment,
+ "collection": Collection,
+ }
+
class Activity_R0(declarative_base()):
__tablename__ = "core__activities"
id = Column(Integer, primary_key=True)
@@ -927,6 +938,7 @@ class Activity_R0(declarative_base()):
ForeignKey(ActivityIntermediator_R0.id),
nullable=True)
+
@RegisterMigration(24, MIGRATIONS)
def activity_migration(db):
"""
@@ -1249,3 +1261,882 @@ def datetime_to_utc(db):
# Commit this to the database
db.commit()
+
+##
+# Migrations to handle migrating from activity specific foreign key to the
+# new GenericForeignKey implementations. They have been split up to improve
+# readability and minimise errors
+##
+
+class GenericModelReference_V0(declarative_base()):
+ __tablename__ = "core__generic_model_reference"
+
+ id = Column(Integer, primary_key=True)
+ obj_pk = Column(Integer, nullable=False)
+ model_type = Column(Unicode, nullable=False)
+
+@RegisterMigration(27, MIGRATIONS)
+def create_generic_model_reference(db):
+ """ Creates the Generic Model Reference table """
+ GenericModelReference_V0.__table__.create(db.bind)
+ db.commit()
+
+@RegisterMigration(28, MIGRATIONS)
+def add_foreign_key_fields(db):
+ """
+ Add the fields for GenericForeignKey to the model under temporary name,
+ this is so that later a data migration can occur. They will be renamed to
+ the origional names.
+ """
+ metadata = MetaData(bind=db.bind)
+ activity_table = inspect_table(metadata, "core__activities")
+
+ # Create column and add to model.
+ object_column = Column("temp_object", Integer, ForeignKey(GenericModelReference_V0.id))
+ object_column.create(activity_table)
+
+ target_column = Column("temp_target", Integer, ForeignKey(GenericModelReference_V0.id))
+ target_column.create(activity_table)
+
+ # Commit this to the database
+ db.commit()
+
+@RegisterMigration(29, MIGRATIONS)
+def migrate_data_foreign_keys(db):
+ """
+ This will migrate the data from the old object and target attributes which
+ use the old ActivityIntermediator to the new temparay fields which use the
+ new GenericForeignKey.
+ """
+
+ metadata = MetaData(bind=db.bind)
+ activity_table = inspect_table(metadata, "core__activities")
+ ai_table = inspect_table(metadata, "core__activity_intermediators")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+
+ # Iterate through all activities doing the migration per activity.
+ for activity in db.execute(activity_table.select()):
+ # First do the "Activity.object" migration to "Activity.temp_object"
+ # I need to get the object from the Activity, I can't use the old
+ # Activity.get_object as we're in a migration.
+ object_ai = db.execute(ai_table.select(
+ ai_table.c.id==activity.object
+ )).first()
+
+ object_ai_type = ActivityIntermediator_R0.TYPES[object_ai.type]
+ object_ai_table = inspect_table(metadata, object_ai_type.__tablename__)
+
+ activity_object = db.execute(object_ai_table.select(
+ object_ai_table.c.activity==object_ai.id
+ )).first()
+
+ # now we need to create the GenericModelReference
+ object_gmr = db.execute(gmr_table.insert().values(
+ obj_pk=activity_object.id,
+ model_type=object_ai_type.__tablename__
+ ))
+
+ # Now set the ID of the GenericModelReference in the GenericForignKey
+ db.execute(activity_table.update().values(
+ temp_object=object_gmr.inserted_primary_key[0]
+ ))
+
+ # Now do same process for "Activity.target" to "Activity.temp_target"
+ # not all Activities have a target so if it doesn't just skip the rest
+ # of this.
+ if activity.target is None:
+ continue
+
+ # Now get the target for the activity.
+ target_ai = db.execute(ai_table.select(
+ ai_table.c.id==activity.target
+ )).first()
+
+ target_ai_type = ActivityIntermediator_R0.TYPES[target_ai.type]
+ target_ai_table = inspect_table(metadata, target_ai_type.__tablename__)
+
+ activity_target = db.execute(target_ai_table.select(
+ target_ai_table.c.activity==target_ai.id
+ )).first()
+
+ # We now want to create the new target GenericModelReference
+ target_gmr = db.execute(gmr_table.insert().values(
+ obj_pk=activity_target.id,
+ model_type=target_ai_type.__tablename__
+ ))
+
+ # Now set the ID of the GenericModelReference in the GenericForignKey
+ db.execute(activity_table.update().values(
+ temp_object=target_gmr.inserted_primary_key[0]
+ ))
+
+ # Commit to the database.
+ db.commit()
+
+@RegisterMigration(30, MIGRATIONS)
+def rename_and_remove_object_and_target(db):
+ """
+ Renames the new Activity.object and Activity.target fields and removes the
+ old ones.
+ """
+ metadata = MetaData(bind=db.bind)
+ activity_table = inspect_table(metadata, "core__activities")
+
+ # Firstly lets remove the old fields.
+ old_object_column = activity_table.columns["object"]
+ old_target_column = activity_table.columns["target"]
+
+ # Drop the tables.
+ old_object_column.drop()
+ old_target_column.drop()
+
+ # Now get the new columns.
+ new_object_column = activity_table.columns["temp_object"]
+ new_target_column = activity_table.columns["temp_target"]
+
+ # rename them to the old names.
+ new_object_column.alter(name="object_id")
+ new_target_column.alter(name="target_id")
+
+ # Commit the changes to the database.
+ db.commit()
+
+@RegisterMigration(31, MIGRATIONS)
+def remove_activityintermediator(db):
+ """
+ This removes the old specific ActivityIntermediator model which has been
+ superseeded by the GenericForeignKey field.
+ """
+ metadata = MetaData(bind=db.bind)
+
+ # Remove the columns which reference the AI
+ collection_table = inspect_table(metadata, "core__collections")
+ collection_ai_column = collection_table.columns["activity"]
+ collection_ai_column.drop()
+
+ media_entry_table = inspect_table(metadata, "core__media_entries")
+ media_entry_ai_column = media_entry_table.columns["activity"]
+ media_entry_ai_column.drop()
+
+ comments_table = inspect_table(metadata, "core__media_comments")
+ comments_ai_column = comments_table.columns["activity"]
+ comments_ai_column.drop()
+
+ user_table = inspect_table(metadata, "core__users")
+ user_ai_column = user_table.columns["activity"]
+ user_ai_column.drop()
+
+ # Drop the table
+ ai_table = inspect_table(metadata, "core__activity_intermediators")
+ ai_table.drop()
+
+ # Commit the changes
+ db.commit()
+
+##
+# Migrations for converting the User model into a Local and Remote User
+# setup.
+##
+
+class LocalUser_V0(declarative_base()):
+ __tablename__ = "core__local_users"
+
+ id = Column(Integer, ForeignKey(User.id), primary_key=True)
+ username = Column(Unicode, nullable=False, unique=True)
+ email = Column(Unicode, nullable=False)
+ pw_hash = Column(Unicode)
+
+ wants_comment_notification = Column(Boolean, default=True)
+ wants_notifications = Column(Boolean, default=True)
+ license_preference = Column(Unicode)
+ uploaded = Column(Integer, default=0)
+ upload_limit = Column(Integer)
+
+class RemoteUser_V0(declarative_base()):
+ __tablename__ = "core__remote_users"
+
+ id = Column(Integer, ForeignKey(User.id), primary_key=True)
+ webfinger = Column(Unicode, unique=True)
+
+@RegisterMigration(32, MIGRATIONS)
+def federation_user_create_tables(db):
+ """
+ Create all the tables
+ """
+ # Create tables needed
+ LocalUser_V0.__table__.create(db.bind)
+ RemoteUser_V0.__table__.create(db.bind)
+ db.commit()
+
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, "core__users")
+
+ # Create the fields
+ updated_column = Column(
+ "updated",
+ DateTime,
+ default=datetime.datetime.utcnow
+ )
+ updated_column.create(user_table)
+
+ type_column = Column(
+ "type",
+ Unicode
+ )
+ type_column.create(user_table)
+
+ name_column = Column(
+ "name",
+ Unicode
+ )
+ name_column.create(user_table)
+
+ db.commit()
+
+@RegisterMigration(33, MIGRATIONS)
+def federation_user_migrate_data(db):
+ """
+ Migrate the data over to the new user models
+ """
+ metadata = MetaData(bind=db.bind)
+
+ user_table = inspect_table(metadata, "core__users")
+ local_user_table = inspect_table(metadata, "core__local_users")
+
+ for user in db.execute(user_table.select()):
+ db.execute(local_user_table.insert().values(
+ id=user.id,
+ username=user.username,
+ email=user.email,
+ pw_hash=user.pw_hash,
+ wants_comment_notification=user.wants_comment_notification,
+ wants_notifications=user.wants_notifications,
+ license_preference=user.license_preference,
+ uploaded=user.uploaded,
+ upload_limit=user.upload_limit
+ ))
+
+ db.execute(user_table.update().where(user_table.c.id==user.id).values(
+ updated=user.created,
+ type=LocalUser.__mapper_args__["polymorphic_identity"]
+ ))
+
+ db.commit()
+
+class User_vR2(declarative_base()):
+ __tablename__ = "rename__users"
+
+ id = Column(Integer, primary_key=True)
+ url = Column(Unicode)
+ bio = Column(UnicodeText)
+ name = Column(Unicode)
+ type = Column(Unicode)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ location = Column(Integer, ForeignKey(Location.id))
+
+@RegisterMigration(34, MIGRATIONS)
+def federation_remove_fields(db):
+ """
+ This removes the fields from User model which aren't shared
+ """
+ metadata = MetaData(bind=db.bind)
+
+ user_table = inspect_table(metadata, "core__users")
+
+ # Remove the columns moved to LocalUser from User
+ username_column = user_table.columns["username"]
+ username_column.drop()
+
+ email_column = user_table.columns["email"]
+ email_column.drop()
+
+ pw_hash_column = user_table.columns["pw_hash"]
+ pw_hash_column.drop()
+
+ license_preference_column = user_table.columns["license_preference"]
+ license_preference_column.drop()
+
+ uploaded_column = user_table.columns["uploaded"]
+ uploaded_column.drop()
+
+ upload_limit_column = user_table.columns["upload_limit"]
+ upload_limit_column.drop()
+
+ # SQLLite can't drop booleans -.-
+ if db.bind.url.drivername == 'sqlite':
+ # Create the new hacky table
+ User_vR2.__table__.create(db.bind)
+ db.commit()
+ new_user_table = inspect_table(metadata, "rename__users")
+ replace_table_hack(db, user_table, new_user_table)
+ else:
+ wcn_column = user_table.columns["wants_comment_notification"]
+ wcn_column.drop()
+
+ wants_notifications_column = user_table.columns["wants_notifications"]
+ wants_notifications_column.drop()
+
+ db.commit()
+
+@RegisterMigration(35, MIGRATIONS)
+def federation_media_entry(db):
+ metadata = MetaData(bind=db.bind)
+ media_entry_table = inspect_table(metadata, "core__media_entries")
+
+ # Add new fields
+ public_id_column = Column(
+ "public_id",
+ Unicode,
+ unique=True,
+ nullable=True
+ )
+ public_id_column.create(
+ media_entry_table,
+ unique_name="media_public_id"
+ )
+
+ remote_column = Column(
+ "remote",
+ Boolean,
+ default=False
+ )
+ remote_column.create(media_entry_table)
+
+ updated_column = Column(
+ "updated",
+ DateTime,
+ default=datetime.datetime.utcnow,
+ )
+ updated_column.create(media_entry_table)
+
+ # Data migration
+ for entry in db.execute(media_entry_table.select()):
+ db.execute(media_entry_table.update().values(
+ updated=entry.created,
+ remote=False
+ ))
+
+ db.commit()
+
+@RegisterMigration(36, MIGRATIONS)
+def create_oauth1_dummies(db):
+ """
+ Creates a dummy client, request and access tokens.
+
+ This is used when invalid data is submitted but real clients and
+ access tokens. The use of dummy objects prevents timing attacks.
+ """
+ metadata = MetaData(bind=db.bind)
+ client_table = inspect_table(metadata, "core__clients")
+ request_token_table = inspect_table(metadata, "core__request_tokens")
+ access_token_table = inspect_table(metadata, "core__access_tokens")
+
+ # Whilst we don't rely on the secret key being unique or unknown to prevent
+ # unauthorized clients from using it to authenticate, we still as an extra
+ # layer of protection created a cryptographically secure key individual to
+ # each instance that should never be able to be known.
+ client_secret = crypto.random_string(50)
+ request_token_secret = crypto.random_string(50)
+ request_token_verifier = crypto.random_string(50)
+ access_token_secret = crypto.random_string(50)
+
+ # Dummy created/updated datetime object
+ epoc_datetime = datetime.datetime.fromtimestamp(0)
+
+ # Create the dummy Client
+ db.execute(client_table.insert().values(
+ id=oauth.DUMMY_CLIENT_ID,
+ secret=client_secret,
+ application_type="dummy",
+ created=epoc_datetime,
+ updated=epoc_datetime
+ ))
+
+ # Create the dummy RequestToken
+ db.execute(request_token_table.insert().values(
+ token=oauth.DUMMY_REQUEST_TOKEN,
+ secret=request_token_secret,
+ client=oauth.DUMMY_CLIENT_ID,
+ verifier=request_token_verifier,
+ created=epoc_datetime,
+ updated=epoc_datetime,
+ callback="oob"
+ ))
+
+ # Create the dummy AccessToken
+ db.execute(access_token_table.insert().values(
+ token=oauth.DUMMY_ACCESS_TOKEN,
+ secret=access_token_secret,
+ request_token=oauth.DUMMY_REQUEST_TOKEN,
+ created=epoc_datetime,
+ updated=epoc_datetime
+ ))
+
+ # Commit the changes
+ db.commit()
+
+@RegisterMigration(37, MIGRATIONS)
+def federation_collection_schema(db):
+ """ Converts the Collection and CollectionItem """
+ metadata = MetaData(bind=db.bind)
+ collection_table = inspect_table(metadata, "core__collections")
+ collection_items_table = inspect_table(metadata, "core__collection_items")
+ media_entry_table = inspect_table(metadata, "core__media_entries")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ ##
+ # Collection Table
+ ##
+
+ # Add the fields onto the Collection model, we need to set these as
+ # not null to avoid DB integreity errors. We will add the not null
+ # constraint later.
+ public_id_column = Column(
+ "public_id",
+ Unicode,
+ unique=True
+ )
+ public_id_column.create(
+ collection_table,
+ unique_name="collection_public_id")
+
+ updated_column = Column(
+ "updated",
+ DateTime,
+ default=datetime.datetime.utcnow
+ )
+ updated_column.create(collection_table)
+
+ type_column = Column(
+ "type",
+ Unicode,
+ )
+ type_column.create(collection_table)
+
+ db.commit()
+
+ # Iterate over the items and set the updated and type fields
+ for collection in db.execute(collection_table.select()):
+ db.execute(collection_table.update().where(
+ collection_table.c.id==collection.id
+ ).values(
+ updated=collection.created,
+ type="core-user-defined"
+ ))
+
+ db.commit()
+
+ # Add the not null constraint onto the fields
+ updated_column = collection_table.columns["updated"]
+ updated_column.alter(nullable=False)
+
+ type_column = collection_table.columns["type"]
+ type_column.alter(nullable=False)
+
+ db.commit()
+
+ # Rename the "items" to "num_items" as per the TODO
+ num_items_field = collection_table.columns["items"]
+ num_items_field.alter(name="num_items")
+ db.commit()
+
+ ##
+ # CollectionItem
+ ##
+ # Adding the object ID column, this again will have not null added later.
+ object_id = Column(
+ "object_id",
+ Integer,
+ ForeignKey(GenericModelReference_V0.id),
+ )
+ object_id.create(
+ collection_items_table,
+ )
+
+ db.commit()
+
+ # Iterate through and convert the Media reference to object_id
+ for item in db.execute(collection_items_table.select()):
+ # Check if there is a GMR for the MediaEntry
+ object_gmr = db.execute(gmr_table.select(
+ and_(
+ gmr_table.c.obj_pk == item.media_entry,
+ gmr_table.c.model_type == "core__media_entries"
+ )
+ )).first()
+
+ if object_gmr:
+ object_gmr = object_gmr[0]
+ else:
+ # Create a GenericModelReference
+ object_gmr = db.execute(gmr_table.insert().values(
+ obj_pk=item.media_entry,
+ model_type="core__media_entries"
+ )).inserted_primary_key[0]
+
+ # Now set the object_id column to the ID of the GMR
+ db.execute(collection_items_table.update().where(
+ collection_items_table.c.id==item.id
+ ).values(
+ object_id=object_gmr
+ ))
+
+ db.commit()
+
+ # Add not null constraint
+ object_id = collection_items_table.columns["object_id"]
+ object_id.alter(nullable=False)
+
+ db.commit()
+
+ # Now remove the old media_entry column
+ media_entry_column = collection_items_table.columns["media_entry"]
+ media_entry_column.drop()
+
+ db.commit()
+
+@RegisterMigration(38, MIGRATIONS)
+def federation_actor(db):
+ """ Renames refereces to the user to actor """
+ metadata = MetaData(bind=db.bind)
+
+ # RequestToken: user -> actor
+ request_token_table = inspect_table(metadata, "core__request_tokens")
+ rt_user_column = request_token_table.columns["user"]
+ rt_user_column.alter(name="actor")
+
+ # AccessToken: user -> actor
+ access_token_table = inspect_table(metadata, "core__access_tokens")
+ at_user_column = access_token_table.columns["user"]
+ at_user_column.alter(name="actor")
+
+ # MediaEntry: uploader -> actor
+ media_entry_table = inspect_table(metadata, "core__media_entries")
+ me_user_column = media_entry_table.columns["uploader"]
+ me_user_column.alter(name="actor")
+
+ # MediaComment: author -> actor
+ media_comment_table = inspect_table(metadata, "core__media_comments")
+ mc_user_column = media_comment_table.columns["author"]
+ mc_user_column.alter(name="actor")
+
+ # Collection: creator -> actor
+ collection_table = inspect_table(metadata, "core__collections")
+ mc_user_column = collection_table.columns["creator"]
+ mc_user_column.alter(name="actor")
+
+ # commit changes to db.
+ db.commit()
+
+class Graveyard_V0(declarative_base()):
+ """ Where models come to die """
+ __tablename__ = "core__graveyard"
+
+ id = Column(Integer, primary_key=True)
+ public_id = Column(Unicode, nullable=True, unique=True)
+
+ deleted = Column(DateTime, nullable=False)
+ object_type = Column(Unicode, nullable=False)
+
+ actor_id = Column(Integer, ForeignKey(GenericModelReference_V0.id))
+
+@RegisterMigration(39, MIGRATIONS)
+def federation_graveyard(db):
+ """ Introduces soft deletion to models
+
+ This adds a Graveyard model which is used to copy (soft-)deleted models to.
+ """
+ metadata = MetaData(bind=db.bind)
+
+ # Create the graveyard table
+ Graveyard_V0.__table__.create(db.bind)
+
+ # Commit changes to the db
+ db.commit()
+
+@RegisterMigration(40, MIGRATIONS)
+def add_public_id(db):
+ metadata = MetaData(bind=db.bind)
+
+ # Get the table
+ activity_table = inspect_table(metadata, "core__activities")
+ activity_public_id = Column(
+ "public_id",
+ Unicode,
+ unique=True,
+ nullable=True
+ )
+ activity_public_id.create(
+ activity_table,
+ unique_name="activity_public_id"
+ )
+
+ # Commit this.
+ db.commit()
+
+class Comment_V0(declarative_base()):
+ __tablename__ = "core__comment_links"
+
+ id = Column(Integer, primary_key=True)
+ target_id = Column(
+ Integer,
+ ForeignKey(GenericModelReference_V0.id),
+ nullable=False
+ )
+ comment_id = Column(
+ Integer,
+ ForeignKey(GenericModelReference_V0.id),
+ nullable=False
+ )
+ added = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
+
+@RegisterMigration(41, MIGRATIONS)
+def federation_comments(db):
+ """
+ This reworks the MediaComent to be a more generic Comment model.
+ """
+ metadata = MetaData(bind=db.bind)
+ textcomment_table = inspect_table(metadata, "core__media_comments")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # First of all add the public_id field to the TextComment table
+ comment_public_id_column = Column(
+ "public_id",
+ Unicode,
+ unique=True
+ )
+ comment_public_id_column.create(
+ textcomment_table,
+ unique_name="public_id_unique"
+ )
+
+ comment_updated_column = Column(
+ "updated",
+ DateTime,
+ )
+ comment_updated_column.create(textcomment_table)
+
+
+ # First create the Comment link table.
+ Comment_V0.__table__.create(db.bind)
+ db.commit()
+
+ # now look up the comment table
+ comment_table = inspect_table(metadata, "core__comment_links")
+
+ # Itierate over all the comments and add them to the link table.
+ for comment in db.execute(textcomment_table.select()):
+ # Check if there is a GMR to the comment.
+ comment_gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment.id,
+ gmr_table.c.model_type == "core__media_comments"
+ ))).first()
+
+ if comment_gmr:
+ comment_gmr = comment_gmr[0]
+ else:
+ comment_gmr = db.execute(gmr_table.insert().values(
+ obj_pk=comment.id,
+ model_type="core__media_comments"
+ )).inserted_primary_key[0]
+
+ # Get or create the GMR for the media entry
+ entry_gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment.media_entry,
+ gmr_table.c.model_type == "core__media_entries"
+ ))).first()
+
+ if entry_gmr:
+ entry_gmr = entry_gmr[0]
+ else:
+ entry_gmr = db.execute(gmr_table.insert().values(
+ obj_pk=comment.media_entry,
+ model_type="core__media_entries"
+ )).inserted_primary_key[0]
+
+ # Add the comment link.
+ db.execute(comment_table.insert().values(
+ target_id=entry_gmr,
+ comment_id=comment_gmr,
+ added=datetime.datetime.utcnow()
+ ))
+
+ # Add the data to the updated field
+ db.execute(textcomment_table.update().where(
+ textcomment_table.c.id == comment.id
+ ).values(
+ updated=comment.created
+ ))
+ db.commit()
+
+ # Add not null constraint
+ textcomment_update_column = textcomment_table.columns["updated"]
+ textcomment_update_column.alter(nullable=False)
+
+ # Remove the unused fields on the TextComment model
+ comment_media_entry_column = textcomment_table.columns["media_entry"]
+ comment_media_entry_column.drop()
+ db.commit()
+
+@RegisterMigration(42, MIGRATIONS)
+def consolidate_reports(db):
+ """ Consolidates the report tables into just one """
+ metadata = MetaData(bind=db.bind)
+
+ report_table = inspect_table(metadata, "core__reports")
+ comment_report_table = inspect_table(metadata, "core__reports_on_comments")
+ media_report_table = inspect_table(metadata, "core__reports_on_media")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # Add the GMR object field onto the base report table
+ report_object_id_column = Column(
+ "object_id",
+ Integer,
+ ForeignKey(GenericModelReference_V0.id),
+ )
+ report_object_id_column.create(report_table)
+ db.commit()
+
+ # Iterate through the reports in the comment table and merge them in.
+ for comment_report in db.execute(comment_report_table.select()):
+ # Find a GMR for this if one exists.
+ crgmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment_report.comment_id,
+ gmr_table.c.model_type == "core__media_comments"
+ ))).first()
+
+ if crgmr:
+ crgmr = crgmr[0]
+ else:
+ crgmr = db.execute(gmr_table.insert().values(
+ gmr_table.c.obj_pk == comment_report.comment_id,
+ gmr_table.c.model_type == "core__media_comments"
+ )).inserted_primary_key[0]
+
+ # Great now we can save this back onto the (base) report.
+ db.execute(report_table.update().where(
+ report_table.c.id == comment_report.id
+ ).values(
+ object_id=crgmr
+ ))
+
+ # Iterate through the Media Reports and do the save as above.
+ for media_report in db.execute(media_report_table.select()):
+ # Find Mr. GMR :)
+ mrgmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == media_report.media_entry_id,
+ gmr_table.c.model_type == "core__media_entries"
+ ))).first()
+
+ if mrgmr:
+ mrgmr = mrgmr[0]
+ else:
+ mrgmr = db.execute(gmr_table.insert().values(
+ obj_pk=media_report.media_entry_id,
+ model_type="core__media_entries"
+ )).inserted_primary_key[0]
+
+ # Save back on to the base.
+ db.execute(report_table.update().where(
+ report_table.c.id == media_report.id
+ ).values(
+ object_id=mrgmr
+ ))
+
+ db.commit()
+
+ # Add the not null constraint
+ report_object_id = report_table.columns["object_id"]
+ report_object_id.alter(nullable=False)
+
+ # Now we can remove the fields we don't need anymore
+ report_type = report_table.columns["type"]
+ report_type.drop()
+
+ # Drop both MediaReports and CommentTable.
+ comment_report_table.drop()
+ media_report_table.drop()
+
+ # Commit we're done.
+ db.commit()
+
+@RegisterMigration(43, MIGRATIONS)
+def consolidate_notification(db):
+ """ Consolidates the notification models into one """
+ metadata = MetaData(bind=db.bind)
+ notification_table = inspect_table(metadata, "core__notifications")
+ cn_table = inspect_table(metadata, "core__comment_notifications")
+ cp_table = inspect_table(metadata, "core__processing_notifications")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # Add fields needed
+ notification_object_id_column = Column(
+ "object_id",
+ Integer,
+ ForeignKey(GenericModelReference_V0.id)
+ )
+ notification_object_id_column.create(notification_table)
+ db.commit()
+
+ # Iterate over comments and move to notification base table.
+ for comment_notification in db.execute(cn_table.select()):
+ # Find the GMR.
+ cngmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment_notification.subject_id,
+ gmr_table.c.model_type == "core__media_comments"
+ ))).first()
+
+ if cngmr:
+ cngmr = cngmr[0]
+ else:
+ cngmr = db.execute(gmr_table.insert().values(
+ obj_pk=comment_notification.subject_id,
+ model_type="core__media_comments"
+ )).inserted_primary_key[0]
+
+ # Save back on notification
+ db.execute(notification_table.update().where(
+ notification_table.c.id == comment_notification.id
+ ).values(
+ object_id=cngmr
+ ))
+ db.commit()
+
+ # Do the same for processing notifications
+ for processing_notification in db.execute(cp_table.select()):
+ cpgmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == processing_notification.subject_id,
+ gmr_table.c.model_type == "core__processing_notifications"
+ ))).first()
+
+ if cpgmr:
+ cpgmr = cpgmr[0]
+ else:
+ cpgmr = db.execute(gmr_table.insert().values(
+ obj_pk=processing_notification.subject_id,
+ model_type="core__processing_notifications"
+ )).inserted_primary_key[0]
+
+ db.execute(notification_table.update().where(
+ notification_table.c.id == processing_notification.id
+ ).values(
+ object_id=cpgmr
+ ))
+ db.commit()
+
+ # Add the not null constraint
+ notification_object_id = notification_table.columns["object_id"]
+ notification_object_id.alter(nullable=False)
+
+ # Now drop the fields we don't need
+ notification_type_column = notification_table.columns["type"]
+ notification_type_column.drop()
+
+ # Drop the tables we no longer need
+ cp_table.drop()
+ cn_table.drop()
+
+ db.commit()
diff --git a/mediagoblin/db/mixin.py b/mediagoblin/db/mixin.py
index 4602c709..ecd04874 100644
--- a/mediagoblin/db/mixin.py
+++ b/mediagoblin/db/mixin.py
@@ -41,6 +41,82 @@ from mediagoblin.tools.text import cleaned_markdown_conversion
from mediagoblin.tools.url import slugify
from mediagoblin.tools.translate import pass_to_ugettext as _
+class CommentingMixin(object):
+ """
+ Mixin that gives classes methods to get and add the comments on/to it
+
+ This assumes the model has a "comments" class which is a ForeignKey to the
+ Collection model. This will hold a Collection of comments which are
+ associated to this model. It also assumes the model has an "actor"
+ ForeignKey which points to the creator/publisher/etc. of the model.
+
+ NB: This is NOT the mixin for the Comment Model, this is for
+ other models which support commenting.
+ """
+
+ def get_comment_link(self):
+ # Import here to avoid cyclic imports
+ from mediagoblin.db.models import Comment, GenericModelReference
+
+ gmr = GenericModelReference.query.filter_by(
+ obj_pk=self.id,
+ model_type=self.__tablename__
+ ).first()
+
+ if gmr is None:
+ return None
+
+ link = Comment.query.filter_by(comment_id=gmr.id).first()
+ return link
+
+ def get_reply_to(self):
+ link = self.get_comment_link()
+ if link is None or link.target_id is None:
+ return None
+
+ return link.target()
+
+ def soft_delete(self, *args, **kwargs):
+ link = self.get_comment_link()
+ if link is not None:
+ link.delete()
+ super(CommentingMixin, self).soft_delete(*args, **kwargs)
+
+class GeneratePublicIDMixin(object):
+ """
+ Mixin that ensures that a the public_id field is populated.
+
+ The public_id is the ID that is used in the API, this must be globally
+ unique and dereferencable. This will be the URL for the API view of the
+ object. It's used in several places, not only is it used to give out via
+ the API but it's also vital information stored when a soft_deletion occurs
+ on the `Graveyard.public_id` field, this is needed to follow the spec which
+ says we have to be able to provide a shell of an object and return a 410
+ (rather than a 404) when a deleted object has been deleted.
+
+ This requires a the urlgen off the request object (`request.urlgen`) to be
+ provided as it's the ID is a URL.
+ """
+
+ def get_public_id(self, urlgen):
+ # Verify that the class this is on actually has a public_id field...
+ if "public_id" not in self.__table__.columns.keys():
+ raise Exception("Model has no public_id field")
+
+ # Great! the model has a public id, if it's None, let's create one!
+ if self.public_id is None:
+ # We need the internal ID for this so ensure we've been saved.
+ self.save(commit=False)
+
+ # Create the URL
+ self.public_id = urlgen(
+ "mediagoblin.api.object",
+ object_type=self.object_type,
+ id=str(uuid.uuid4()),
+ qualified=True
+ )
+ self.save()
+ return self.public_id
class UserMixin(object):
object_type = "person"
@@ -52,6 +128,7 @@ class UserMixin(object):
def url_for_self(self, urlgen, **kwargs):
"""Generate a URL for this User's home page."""
return urlgen('mediagoblin.user_pages.user_home',
+
user=self.username, **kwargs)
@@ -128,13 +205,13 @@ class GenerateSlugMixin(object):
self.slug = slug
-class MediaEntryMixin(GenerateSlugMixin):
+class MediaEntryMixin(GenerateSlugMixin, GeneratePublicIDMixin):
def check_slug_used(self, slug):
# import this here due to a cyclic import issue
# (db.models -> db.mixin -> db.util -> db.models)
from mediagoblin.db.util import check_media_slug_used
- return check_media_slug_used(self.uploader, slug, self.id)
+ return check_media_slug_used(self.actor, slug, self.id)
@property
def object_type(self):
@@ -188,7 +265,7 @@ class MediaEntryMixin(GenerateSlugMixin):
Use a slug if we have one, else use our 'id'.
"""
- uploader = self.get_uploader
+ uploader = self.get_actor
return urlgen(
'mediagoblin.user_pages.media_home',
@@ -307,7 +384,7 @@ class MediaEntryMixin(GenerateSlugMixin):
return exif_short
-class MediaCommentMixin(object):
+class TextCommentMixin(GeneratePublicIDMixin):
object_type = "comment"
@property
@@ -319,21 +396,20 @@ class MediaCommentMixin(object):
return cleaned_markdown_conversion(self.content)
def __unicode__(self):
- return u'<{klass} #{id} {author} "{comment}">'.format(
+ return u'<{klass} #{id} {actor} "{comment}">'.format(
klass=self.__class__.__name__,
id=self.id,
- author=self.get_author,
+ actor=self.get_actor,
comment=self.content)
def __repr__(self):
- return '<{klass} #{id} {author} "{comment}">'.format(
+ return '<{klass} #{id} {actor} "{comment}">'.format(
klass=self.__class__.__name__,
id=self.id,
- author=self.get_author,
+ actor=self.get_actor,
comment=self.content)
-
-class CollectionMixin(GenerateSlugMixin):
+class CollectionMixin(GenerateSlugMixin, GeneratePublicIDMixin):
object_type = "collection"
def check_slug_used(self, slug):
@@ -341,7 +417,7 @@ class CollectionMixin(GenerateSlugMixin):
# (db.models -> db.mixin -> db.util -> db.models)
from mediagoblin.db.util import check_collection_slug_used
- return check_collection_slug_used(self.creator, slug, self.id)
+ return check_collection_slug_used(self.actor, slug, self.id)
@property
def description_html(self):
@@ -361,7 +437,7 @@ class CollectionMixin(GenerateSlugMixin):
Use a slug if we have one, else use our 'id'.
"""
- creator = self.get_creator
+ creator = self.get_actor
return urlgen(
'mediagoblin.user_pages.user_collection',
@@ -369,6 +445,28 @@ class CollectionMixin(GenerateSlugMixin):
collection=self.slug_or_id,
**extra_args)
+ def add_to_collection(self, obj, content=None, commit=True):
+ """ Adds an object to the collection """
+ # It's here to prevent cyclic imports
+ from mediagoblin.db.models import CollectionItem
+
+ # Need the ID of this collection for this so check we've got one.
+ self.save(commit=False)
+
+ # Create the CollectionItem
+ item = CollectionItem()
+ item.collection = self.id
+ item.get_object = obj
+
+ if content is not None:
+ item.note = content
+
+ self.num_items = self.num_items + 1
+
+ # Save both!
+ self.save(commit=commit)
+ item.save(commit=commit)
+ return item
class CollectionItemMixin(object):
@property
@@ -379,7 +477,7 @@ class CollectionItemMixin(object):
"""
return cleaned_markdown_conversion(self.note)
-class ActivityMixin(object):
+class ActivityMixin(GeneratePublicIDMixin):
object_type = "activity"
VALID_VERBS = ["add", "author", "create", "delete", "dislike", "favorite",
@@ -432,13 +530,12 @@ class ActivityMixin(object):
"audio": _("audio"),
"person": _("a person"),
}
-
- obj = self.get_object
- target = self.get_target
+ obj = self.object()
+ target = None if self.target_id is None else self.target()
actor = self.get_actor
content = verb_to_content.get(self.verb, None)
- if content is None or obj is None:
+ if content is None or self.object is None:
return
# Decide what to fill the object with
@@ -488,7 +585,7 @@ class ActivityMixin(object):
"updated": updated.isoformat(),
"content": self.content,
"url": self.get_url(request),
- "object": self.get_object.serialize(request),
+ "object": self.object().serialize(request),
"objectType": self.object_type,
"links": {
"self": {
@@ -503,9 +600,8 @@ class ActivityMixin(object):
if self.title:
obj["title"] = self.title
- target = self.get_target
- if target is not None:
- obj["target"] = target.serialize(request)
+ if self.target_id is not None:
+ obj["target"] = self.target().serialize(request)
return obj
diff --git a/mediagoblin/db/models.py b/mediagoblin/db/models.py
index e8fb17a7..77f8a1b8 100644
--- a/mediagoblin/db/models.py
+++ b/mediagoblin/db/models.py
@@ -25,28 +25,129 @@ import datetime
from sqlalchemy import Column, Integer, Unicode, UnicodeText, DateTime, \
Boolean, ForeignKey, UniqueConstraint, PrimaryKeyConstraint, \
- SmallInteger, Date
-from sqlalchemy.orm import relationship, backref, with_polymorphic, validates
+ SmallInteger, Date, types
+from sqlalchemy.orm import relationship, backref, with_polymorphic, validates, \
+ class_mapper
from sqlalchemy.orm.collections import attribute_mapped_collection
+from sqlalchemy.sql import and_
from sqlalchemy.sql.expression import desc
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.util import memoized_property
from mediagoblin.db.extratypes import (PathTupleWithSlashes, JSONEncoded,
MutationDict)
-from mediagoblin.db.base import Base, DictReadAttrProxy
+from mediagoblin.db.base import Base, DictReadAttrProxy, FakeCursor
from mediagoblin.db.mixin import UserMixin, MediaEntryMixin, \
- MediaCommentMixin, CollectionMixin, CollectionItemMixin, \
- ActivityMixin
+ CollectionMixin, CollectionItemMixin, ActivityMixin, TextCommentMixin, \
+ CommentingMixin
from mediagoblin.tools.files import delete_media_files
from mediagoblin.tools.common import import_component
from mediagoblin.tools.routing import extract_url_arguments
import six
+from six.moves.urllib.parse import urljoin
from pytz import UTC
_log = logging.getLogger(__name__)
+class GenericModelReference(Base):
+ """
+ Represents a relationship to any model that is defined with a integer pk
+ """
+ __tablename__ = "core__generic_model_reference"
+
+ id = Column(Integer, primary_key=True)
+ obj_pk = Column(Integer, nullable=False)
+
+ # This will be the tablename of the model
+ model_type = Column(Unicode, nullable=False)
+
+ # Constrain it so obj_pk and model_type have to be unique
+ # They should be this order as the index is generated, "model_type" will be
+ # the major order as it's put first.
+ __table_args__ = (
+ UniqueConstraint("model_type", "obj_pk"),
+ {})
+
+ def get_object(self):
+ # This can happen if it's yet to be saved
+ if self.model_type is None or self.obj_pk is None:
+ return None
+
+ model = self._get_model_from_type(self.model_type)
+ return model.query.filter_by(id=self.obj_pk).first()
+
+ def set_object(self, obj):
+ model = obj.__class__
+
+ # Check we've been given a object
+ if not issubclass(model, Base):
+ raise ValueError("Only models can be set as using the GMR")
+
+ # Check that the model has an explicit __tablename__ declaration
+ if getattr(model, "__tablename__", None) is None:
+ raise ValueError("Models must have __tablename__ attribute")
+
+ # Check that it's not a composite primary key
+ primary_keys = [key.name for key in class_mapper(model).primary_key]
+ if len(primary_keys) > 1:
+ raise ValueError("Models can not have composite primary keys")
+
+ # Check that the field on the model is a an integer field
+ pk_column = getattr(model, primary_keys[0])
+ if not isinstance(pk_column.type, Integer):
+ raise ValueError("Only models with integer pks can be set")
+
+ if getattr(obj, pk_column.key) is None:
+ obj.save(commit=False)
+
+ self.obj_pk = getattr(obj, pk_column.key)
+ self.model_type = obj.__tablename__
+
+ def _get_model_from_type(self, model_type):
+ """ Gets a model from a tablename (model type) """
+ if getattr(type(self), "_TYPE_MAP", None) is None:
+ # We want to build on the class (not the instance) a map of all the
+ # models by the table name (type) for easy lookup, this is done on
+ # the class so it can be shared between all instances
+
+ # to prevent circular imports do import here
+ registry = dict(Base._decl_class_registry).values()
+ self._TYPE_MAP = dict(
+ ((m.__tablename__, m) for m in registry if hasattr(m, "__tablename__"))
+ )
+ setattr(type(self), "_TYPE_MAP", self._TYPE_MAP)
+
+ return self.__class__._TYPE_MAP[model_type]
+
+ @classmethod
+ def find_for_obj(cls, obj):
+ """ Finds a GMR for an object or returns None """
+ # Is there one for this already.
+ model = type(obj)
+ pk = getattr(obj, "id")
+
+ gmr = cls.query.filter_by(
+ obj_pk=pk,
+ model_type=model.__tablename__
+ )
+
+ return gmr.first()
+
+ @classmethod
+ def find_or_new(cls, obj):
+ """ Finds an existing GMR or creates a new one for the object """
+ gmr = cls.find_for_obj(obj)
+
+ # If there isn't one already create one
+ if gmr is None:
+ gmr = cls(
+ obj_pk=obj.id,
+ model_type=type(obj).__tablename__
+ )
+
+ return gmr
+
class Location(Base):
""" Represents a physical location """
__tablename__ = "core__locations"
@@ -123,50 +224,60 @@ class Location(Base):
class User(Base, UserMixin):
"""
- TODO: We should consider moving some rarely used fields
- into some sort of "shadow" table.
+ Base user that is common amongst LocalUser and RemoteUser.
+
+ This holds all the fields which are common between both the Local and Remote
+ user models.
+
+ NB: ForeignKeys should reference this User model and NOT the LocalUser or
+ RemoteUser models.
"""
__tablename__ = "core__users"
id = Column(Integer, primary_key=True)
- username = Column(Unicode, nullable=False, unique=True)
- # Note: no db uniqueness constraint on email because it's not
- # reliable (many email systems case insensitive despite against
- # the RFC) and because it would be a mess to implement at this
- # point.
- email = Column(Unicode, nullable=False)
- pw_hash = Column(Unicode)
- created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
- # Intented to be nullable=False, but migrations would not work for it
- # set to nullable=True implicitly.
- wants_comment_notification = Column(Boolean, default=True)
- wants_notifications = Column(Boolean, default=True)
- license_preference = Column(Unicode)
url = Column(Unicode)
- bio = Column(UnicodeText) # ??
- uploaded = Column(Integer, default=0)
- upload_limit = Column(Integer)
+ bio = Column(UnicodeText)
+ name = Column(Unicode)
+
+ # This is required for the polymorphic inheritance
+ type = Column(Unicode)
+
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
location = Column(Integer, ForeignKey("core__locations.id"))
+
+ # Lazy getters
get_location = relationship("Location", lazy="joined")
- activity = Column(Integer, ForeignKey("core__activity_intermediators.id"))
+ __mapper_args__ = {
+ 'polymorphic_identity': 'user',
+ 'polymorphic_on': type,
+ }
- ## TODO
- # plugin data would be in a separate model
+ deletion_mode = Base.SOFT_DELETE
- def __repr__(self):
- return '<{0} #{1} {2} {3} "{4}">'.format(
- self.__class__.__name__,
- self.id,
- 'verified' if self.has_privilege(u'active') else 'non-verified',
- 'admin' if self.has_privilege(u'admin') else 'user',
- self.username)
+ def soft_delete(self, *args, **kwargs):
+ # Find all the Collections and delete those
+ for collection in Collection.query.filter_by(actor=self.id):
+ collection.delete(**kwargs)
+
+ # Find all the comments and delete those too
+ for comment in TextComment.query.filter_by(actor=self.id):
+ comment.delete(**kwargs)
+
+ # Find all the activities and delete those too
+ for activity in Activity.query.filter_by(actor=self.id):
+ activity.delete(**kwargs)
+
+ super(User, self).soft_delete(*args, **kwargs)
- def delete(self, **kwargs):
+
+ def delete(self, *args, **kwargs):
"""Deletes a User and all related entries/comments/files/..."""
# Collections get deleted by relationships.
- media_entries = MediaEntry.query.filter(MediaEntry.uploader == self.id)
+ media_entries = MediaEntry.query.filter(MediaEntry.actor == self.id)
for media in media_entries:
# TODO: Make sure that "MediaEntry.delete()" also deletes
# all related files/Comments
@@ -178,8 +289,9 @@ class User(Base, UserMixin):
clean_orphan_tags(commit=False)
# Delete user, pass through commit=False/True in kwargs
- super(User, self).delete(**kwargs)
- _log.info('Deleted user "{0}" account'.format(self.username))
+ username = self.username
+ super(User, self).delete(*args, **kwargs)
+ _log.info('Deleted user "{0}" account'.format(username))
def has_privilege(self, privilege, allow_admin=True):
"""
@@ -212,19 +324,79 @@ class User(Base, UserMixin):
"""
return UserBan.query.get(self.id) is not None
-
def serialize(self, request):
published = UTC.localize(self.created)
+ updated = UTC.localize(self.updated)
user = {
- "id": "acct:{0}@{1}".format(self.username, request.host),
"published": published.isoformat(),
- "preferredUsername": self.username,
- "displayName": "{0}@{1}".format(self.username, request.host),
+ "updated": updated.isoformat(),
"objectType": self.object_type,
"pump_io": {
"shared": False,
"followed": False,
},
+ }
+
+ if self.bio:
+ user.update({"summary": self.bio})
+ if self.url:
+ user.update({"url": self.url})
+ if self.location:
+ user.update({"location": self.get_location.serialize(request)})
+
+ return user
+
+ def unserialize(self, data):
+ if "summary" in data:
+ self.bio = data["summary"]
+
+ if "location" in data:
+ Location.create(data, self)
+
+class LocalUser(User):
+ """ This represents a user registered on this instance """
+ __tablename__ = "core__local_users"
+
+ id = Column(Integer, ForeignKey("core__users.id"), primary_key=True)
+ username = Column(Unicode, nullable=False, unique=True)
+ # Note: no db uniqueness constraint on email because it's not
+ # reliable (many email systems case insensitive despite against
+ # the RFC) and because it would be a mess to implement at this
+ # point.
+ email = Column(Unicode, nullable=False)
+ pw_hash = Column(Unicode)
+
+ # Intented to be nullable=False, but migrations would not work for it
+ # set to nullable=True implicitly.
+ wants_comment_notification = Column(Boolean, default=True)
+ wants_notifications = Column(Boolean, default=True)
+ license_preference = Column(Unicode)
+ uploaded = Column(Integer, default=0)
+ upload_limit = Column(Integer)
+
+ __mapper_args__ = {
+ "polymorphic_identity": "user_local",
+ }
+
+ ## TODO
+ # plugin data would be in a separate model
+
+ def __repr__(self):
+ return '<{0} #{1} {2} {3} "{4}">'.format(
+ self.__class__.__name__,
+ self.id,
+ 'verified' if self.has_privilege(u'active') else 'non-verified',
+ 'admin' if self.has_privilege(u'admin') else 'user',
+ self.username)
+
+ def get_public_id(self, host):
+ return "acct:{0}@{1}".format(self.username, host)
+
+ def serialize(self, request):
+ user = {
+ "id": self.get_public_id(request.host),
+ "preferredUsername": self.username,
+ "displayName": self.get_public_id(request.host).split(":", 1)[1],
"links": {
"self": {
"href": request.urlgen(
@@ -250,21 +422,27 @@ class User(Base, UserMixin):
},
}
- if self.bio:
- user.update({"summary": self.bio})
- if self.url:
- user.update({"url": self.url})
- if self.location:
- user.update({"location": self.get_location.serialize(request)})
-
+ user.update(super(LocalUser, self).serialize(request))
return user
- def unserialize(self, data):
- if "summary" in data:
- self.bio = data["summary"]
+class RemoteUser(User):
+ """ User that is on another (remote) instance """
+ __tablename__ = "core__remote_users"
+
+ id = Column(Integer, ForeignKey("core__users.id"), primary_key=True)
+ webfinger = Column(Unicode, unique=True)
+
+ __mapper_args__ = {
+ 'polymorphic_identity': 'user_remote'
+ }
+
+ def __repr__(self):
+ return "<{0} #{1} {2}>".format(
+ self.__class__.__name__,
+ self.id,
+ self.webfinger
+ )
- if "location" in data:
- Location.create(data, self)
class Client(Base):
"""
@@ -300,7 +478,7 @@ class RequestToken(Base):
token = Column(Unicode, primary_key=True)
secret = Column(Unicode, nullable=False)
client = Column(Unicode, ForeignKey(Client.id))
- user = Column(Integer, ForeignKey(User.id), nullable=True)
+ actor = Column(Integer, ForeignKey(User.id), nullable=True)
used = Column(Boolean, default=False)
authenticated = Column(Boolean, default=False)
verifier = Column(Unicode, nullable=True)
@@ -318,7 +496,7 @@ class AccessToken(Base):
token = Column(Unicode, nullable=False, primary_key=True)
secret = Column(Unicode, nullable=False)
- user = Column(Integer, ForeignKey(User.id))
+ actor = Column(Integer, ForeignKey(User.id))
request_token = Column(Unicode, ForeignKey(RequestToken.token))
created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
@@ -335,18 +513,19 @@ class NonceTimestamp(Base):
nonce = Column(Unicode, nullable=False, primary_key=True)
timestamp = Column(DateTime, nullable=False, primary_key=True)
-class MediaEntry(Base, MediaEntryMixin):
+class MediaEntry(Base, MediaEntryMixin, CommentingMixin):
"""
TODO: Consider fetching the media_files using join
"""
__tablename__ = "core__media_entries"
id = Column(Integer, primary_key=True)
- uploader = Column(Integer, ForeignKey(User.id), nullable=False, index=True)
+ public_id = Column(Unicode, unique=True, nullable=True)
+ remote = Column(Boolean, default=False)
+
+ actor = Column(Integer, ForeignKey(User.id), nullable=False, index=True)
title = Column(Unicode, nullable=False)
slug = Column(Unicode)
- created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow,
- index=True)
description = Column(UnicodeText) # ??
media_type = Column(Unicode, nullable=False)
state = Column(Unicode, default=u'unprocessed', nullable=False)
@@ -356,6 +535,10 @@ class MediaEntry(Base, MediaEntryMixin):
location = Column(Integer, ForeignKey("core__locations.id"))
get_location = relationship("Location", lazy="joined")
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow,
+ index=True)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
fail_error = Column(Unicode)
fail_metadata = Column(JSONEncoded)
@@ -366,10 +549,12 @@ class MediaEntry(Base, MediaEntryMixin):
queued_task_id = Column(Unicode)
__table_args__ = (
- UniqueConstraint('uploader', 'slug'),
+ UniqueConstraint('actor', 'slug'),
{})
- get_uploader = relationship(User)
+ deletion_mode = Base.SOFT_DELETE
+
+ get_actor = relationship(User)
media_files_helper = relationship("MediaFile",
collection_class=attribute_mapped_collection("name"),
@@ -395,28 +580,41 @@ class MediaEntry(Base, MediaEntryMixin):
creator=lambda v: MediaTag(name=v["name"], slug=v["slug"])
)
- collections_helper = relationship("CollectionItem",
- cascade="all, delete-orphan"
- )
- collections = association_proxy("collections_helper", "in_collection")
media_metadata = Column(MutationDict.as_mutable(JSONEncoded),
default=MutationDict())
- activity = Column(Integer, ForeignKey("core__activity_intermediators.id"))
-
## TODO
# fail_error
+ @property
+ def collections(self):
+ """ Get any collections that this MediaEntry is in """
+ return list(Collection.query.join(Collection.collection_items).join(
+ CollectionItem.object_helper
+ ).filter(
+ and_(
+ GenericModelReference.model_type == self.__tablename__,
+ GenericModelReference.obj_pk == self.id
+ )
+ ))
+
def get_comments(self, ascending=False):
- order_col = MediaComment.created
- if not ascending:
- order_col = desc(order_col)
- return self.all_comments.order_by(order_col)
+ query = Comment.query.join(Comment.target_helper).filter(and_(
+ GenericModelReference.obj_pk == self.id,
+ GenericModelReference.model_type == self.__tablename__
+ ))
+ if ascending:
+ query = query.order_by(Comment.added.asc())
+ else:
+ qury = query.order_by(Comment.added.desc())
+
+ return FakeCursor(query, lambda c:c.comment())
+
def url_to_prev(self, urlgen):
"""get the next 'newer' entry by this user"""
media = MediaEntry.query.filter(
- (MediaEntry.uploader == self.uploader)
+ (MediaEntry.actor == self.actor)
& (MediaEntry.state == u'processed')
& (MediaEntry.id > self.id)).order_by(MediaEntry.id).first()
@@ -426,7 +624,7 @@ class MediaEntry(Base, MediaEntryMixin):
def url_to_next(self, urlgen):
"""get the next 'older' entry by this user"""
media = MediaEntry.query.filter(
- (MediaEntry.uploader == self.uploader)
+ (MediaEntry.actor == self.actor)
& (MediaEntry.state == u'processed')
& (MediaEntry.id < self.id)).order_by(desc(MediaEntry.id)).first()
@@ -500,6 +698,13 @@ class MediaEntry(Base, MediaEntryMixin):
id=self.id,
title=safe_title)
+ def soft_delete(self, *args, **kwargs):
+ # Find all of the media comments for this and delete them
+ for comment in self.get_comments():
+ comment.delete(*args, **kwargs)
+
+ super(MediaEntry, self).soft_delete(*args, **kwargs)
+
def delete(self, del_orphan_tags=True, **kwargs):
"""Delete MediaEntry and all related files/attachments/comments
@@ -517,7 +722,7 @@ class MediaEntry(Base, MediaEntryMixin):
except OSError as error:
# Returns list of files we failed to delete
_log.error('No such files from the user "{1}" to delete: '
- '{0}'.format(str(error), self.get_uploader))
+ '{0}'.format(str(error), self.get_actor))
_log.info('Deleted Media entry id "{0}"'.format(self.id))
# Related MediaTag's are automatically cleaned, but we might
# want to clean out unused Tag's too.
@@ -531,25 +736,20 @@ class MediaEntry(Base, MediaEntryMixin):
def serialize(self, request, show_comments=True):
""" Unserialize MediaEntry to object """
- href = request.urlgen(
- "mediagoblin.api.object",
- object_type=self.object_type,
- id=self.id,
- qualified=True
- )
- author = self.get_uploader
+ author = self.get_actor
published = UTC.localize(self.created)
- updated = UTC.localize(self.created)
+ updated = UTC.localize(self.updated)
+ public_id = self.get_public_id(request.urlgen)
context = {
- "id": href,
+ "id": public_id,
"author": author.serialize(request),
"objectType": self.object_type,
"url": self.url_for_self(request.urlgen, qualified=True),
"image": {
- "url": request.host_url + self.thumb_url[1:],
+ "url": urljoin(request.host_url, self.thumb_url),
},
"fullImage":{
- "url": request.host_url + self.original_url[1:],
+ "url": urljoin(request.host_url, self.original_url),
},
"published": published.isoformat(),
"updated": updated.isoformat(),
@@ -558,7 +758,7 @@ class MediaEntry(Base, MediaEntryMixin):
},
"links": {
"self": {
- "href": href,
+ "href": public_id,
},
}
@@ -621,7 +821,7 @@ class MediaEntry(Base, MediaEntryMixin):
self.license = data["license"]
if "location" in data:
- Licence.create(data["location"], self)
+ License.create(data["location"], self)
return True
@@ -738,15 +938,63 @@ class MediaTag(Base):
"""A dict like view on this object"""
return DictReadAttrProxy(self)
+class Comment(Base):
+ """
+ Link table between a response and another object that can have replies.
+
+ This acts as a link table between an object and the comments on it, it's
+ done like this so that you can look up all the comments without knowing
+ whhich comments are on an object before hand. Any object can be a comment
+ and more or less any object can accept comments too.
+
+ Important: This is NOT the old MediaComment table.
+ """
+ __tablename__ = "core__comment_links"
+
+ id = Column(Integer, primary_key=True)
+
+ # The GMR to the object the comment is on.
+ target_id = Column(
+ Integer,
+ ForeignKey(GenericModelReference.id),
+ nullable=False
+ )
+ target_helper = relationship(
+ GenericModelReference,
+ foreign_keys=[target_id]
+ )
+ target = association_proxy("target_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
+
+ # The comment object
+ comment_id = Column(
+ Integer,
+ ForeignKey(GenericModelReference.id),
+ nullable=False
+ )
+ comment_helper = relationship(
+ GenericModelReference,
+ foreign_keys=[comment_id]
+ )
+ comment = association_proxy("comment_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
+
+ # When it was added
+ added = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
-class MediaComment(Base, MediaCommentMixin):
+class TextComment(Base, TextCommentMixin, CommentingMixin):
+ """
+ A basic text comment, this is a usually short amount of text and nothing else
+ """
+ # This is a legacy from when Comments where just on MediaEntry objects.
__tablename__ = "core__media_comments"
id = Column(Integer, primary_key=True)
- media_entry = Column(
- Integer, ForeignKey(MediaEntry.id), nullable=False, index=True)
- author = Column(Integer, ForeignKey(User.id), nullable=False)
+ public_id = Column(Unicode, unique=True)
+ actor = Column(Integer, ForeignKey(User.id), nullable=False)
created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
content = Column(UnicodeText, nullable=False)
location = Column(Integer, ForeignKey("core__locations.id"))
get_location = relationship("Location", lazy="joined")
@@ -754,43 +1002,29 @@ class MediaComment(Base, MediaCommentMixin):
# Cascade: Comments are owned by their creator. So do the full thing.
# lazy=dynamic: People might post a *lot* of comments,
# so make the "posted_comments" a query-like thing.
- get_author = relationship(User,
+ get_actor = relationship(User,
backref=backref("posted_comments",
lazy="dynamic",
cascade="all, delete-orphan"))
- get_entry = relationship(MediaEntry,
- backref=backref("comments",
- lazy="dynamic",
- cascade="all, delete-orphan"))
-
- # Cascade: Comments are somewhat owned by their MediaEntry.
- # So do the full thing.
- # lazy=dynamic: MediaEntries might have many comments,
- # so make the "all_comments" a query-like thing.
- get_media_entry = relationship(MediaEntry,
- backref=backref("all_comments",
- lazy="dynamic",
- cascade="all, delete-orphan"))
-
-
- activity = Column(Integer, ForeignKey("core__activity_intermediators.id"))
+ deletion_mode = Base.SOFT_DELETE
def serialize(self, request):
""" Unserialize to python dictionary for API """
- href = request.urlgen(
- "mediagoblin.api.object",
- object_type=self.object_type,
- id=self.id,
- qualified=True
- )
- media = MediaEntry.query.filter_by(id=self.media_entry).first()
- author = self.get_author
+ target = self.get_reply_to()
+ # If this is target just.. give them nothing?
+ if target is None:
+ target = {}
+ else:
+ target = target.serialize(request, show_comments=False)
+
+
+ author = self.get_actor
published = UTC.localize(self.created)
context = {
- "id": href,
+ "id": self.get_public_id(request.urlgen),
"objectType": self.object_type,
"content": self.content,
- "inReplyTo": media.serialize(request, show_comments=False),
+ "inReplyTo": target,
"author": author.serialize(request),
"published": published.isoformat(),
"updated": published.isoformat(),
@@ -803,64 +1037,101 @@ class MediaComment(Base, MediaCommentMixin):
def unserialize(self, data, request):
""" Takes API objects and unserializes on existing comment """
+ if "content" in data:
+ self.content = data["content"]
+
+ if "location" in data:
+ Location.create(data["location"], self)
+
+
# Handle changing the reply ID
if "inReplyTo" in data:
# Validate that the ID is correct
try:
- media_id = int(extract_url_arguments(
+ id = extract_url_arguments(
url=data["inReplyTo"]["id"],
urlmap=request.app.url_map
- )["id"])
+ )["id"]
except ValueError:
- return False
+ raise False
- media = MediaEntry.query.filter_by(id=media_id).first()
+ public_id = request.urlgen(
+ "mediagoblin.api.object",
+ id=id,
+ object_type=data["inReplyTo"]["objectType"],
+ qualified=True
+ )
+
+ media = MediaEntry.query.filter_by(public_id=public_id).first()
if media is None:
return False
- self.media_entry = media.id
-
- if "content" in data:
- self.content = data["content"]
-
- if "location" in data:
- Location.create(data["location"], self)
+ # We need an ID for this model.
+ self.save(commit=False)
+ # Create the link
+ link = Comment()
+ link.target = media
+ link.comment = self
+ link.save()
+
return True
+class Collection(Base, CollectionMixin, CommentingMixin):
+ """A representation of a collection of objects.
+ This holds a group/collection of objects that could be a user defined album
+ or their inbox, outbox, followers, etc. These are always ordered and accessable
+ via the API and web.
-class Collection(Base, CollectionMixin):
- """An 'album' or 'set' of media by a user.
+ The collection has a number of types which determine what kind of collection
+ it is, for example the users inbox will be of `Collection.INBOX_TYPE` that will
+ be stored on the `Collection.type` field. It's important to set the correct type.
On deletion, contained CollectionItems get automatically reaped via
SQL cascade"""
__tablename__ = "core__collections"
id = Column(Integer, primary_key=True)
+ public_id = Column(Unicode, unique=True)
title = Column(Unicode, nullable=False)
slug = Column(Unicode)
created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow,
index=True)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
description = Column(UnicodeText)
- creator = Column(Integer, ForeignKey(User.id), nullable=False)
+ actor = Column(Integer, ForeignKey(User.id), nullable=False)
+ num_items = Column(Integer, default=0)
+
+ # There are lots of different special types of collections in the pump.io API
+ # for example: followers, following, inbox, outbox, etc. See type constants
+ # below the fields on this model.
+ type = Column(Unicode, nullable=False)
+
+ # Location
location = Column(Integer, ForeignKey("core__locations.id"))
get_location = relationship("Location", lazy="joined")
- # TODO: No of items in Collection. Badly named, can we migrate to num_items?
- items = Column(Integer, default=0)
-
# Cascade: Collections are owned by their creator. So do the full thing.
- get_creator = relationship(User,
+ get_actor = relationship(User,
backref=backref("collections",
cascade="all, delete-orphan"))
-
- activity = Column(Integer, ForeignKey("core__activity_intermediators.id"))
-
__table_args__ = (
- UniqueConstraint('creator', 'slug'),
+ UniqueConstraint("actor", "slug"),
{})
+ deletion_mode = Base.SOFT_DELETE
+
+ # These are the types, It's strongly suggested if new ones are invented they
+ # are prefixed to ensure they're unique from other types. Any types used in
+ # the main mediagoblin should be prefixed "core-"
+ INBOX_TYPE = "core-inbox"
+ OUTBOX_TYPE = "core-outbox"
+ FOLLOWER_TYPE = "core-followers"
+ FOLLOWING_TYPE = "core-following"
+ COMMENT_TYPE = "core-comments"
+ USER_DEFINED_TYPE = "core-user-defined"
+
def get_collection_items(self, ascending=False):
#TODO, is this still needed with self.collection_items being available?
order_col = CollectionItem.position
@@ -871,20 +1142,17 @@ class Collection(Base, CollectionMixin):
def __repr__(self):
safe_title = self.title.encode('ascii', 'replace')
- return '<{classname} #{id}: {title} by {creator}>'.format(
+ return '<{classname} #{id}: {title} by {actor}>'.format(
id=self.id,
classname=self.__class__.__name__,
- creator=self.creator,
+ actor=self.actor,
title=safe_title)
def serialize(self, request):
# Get all serialized output in a list
- items = []
- for item in self.get_collection_items():
- items.append(item.serialize(request))
-
+ items = [i.serialize(request) for i in self.get_collection_items()]
return {
- "totalItems": self.items,
+ "totalItems": self.num_items,
"url": self.url_for_self(request.urlgen, qualified=True),
"items": items,
}
@@ -894,23 +1162,36 @@ class CollectionItem(Base, CollectionItemMixin):
__tablename__ = "core__collection_items"
id = Column(Integer, primary_key=True)
- media_entry = Column(
- Integer, ForeignKey(MediaEntry.id), nullable=False, index=True)
+
collection = Column(Integer, ForeignKey(Collection.id), nullable=False)
note = Column(UnicodeText, nullable=True)
added = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
position = Column(Integer)
-
# Cascade: CollectionItems are owned by their Collection. So do the full thing.
in_collection = relationship(Collection,
backref=backref(
"collection_items",
cascade="all, delete-orphan"))
- get_media_entry = relationship(MediaEntry)
+ # Link to the object (could be anything.
+ object_id = Column(
+ Integer,
+ ForeignKey(GenericModelReference.id),
+ nullable=False,
+ index=True
+ )
+ object_helper = relationship(
+ GenericModelReference,
+ foreign_keys=[object_id]
+ )
+ get_object = association_proxy(
+ "object_helper",
+ "get_object",
+ creator=GenericModelReference.find_or_new
+ )
__table_args__ = (
- UniqueConstraint('collection', 'media_entry'),
+ UniqueConstraint('collection', 'object_id'),
{})
@property
@@ -919,14 +1200,15 @@ class CollectionItem(Base, CollectionItemMixin):
return DictReadAttrProxy(self)
def __repr__(self):
- return '<{classname} #{id}: Entry {entry} in {collection}>'.format(
+ return '<{classname} #{id}: Object {obj} in {collection}>'.format(
id=self.id,
classname=self.__class__.__name__,
collection=self.collection,
- entry=self.media_entry)
+ obj=self.get_object()
+ )
def serialize(self, request):
- return self.get_media_entry.serialize(request)
+ return self.get_object().serialize(request)
class ProcessingMetaData(Base):
@@ -979,21 +1261,19 @@ class CommentSubscription(Base):
class Notification(Base):
__tablename__ = 'core__notifications'
id = Column(Integer, primary_key=True)
- type = Column(Unicode)
- created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ object_id = Column(Integer, ForeignKey(GenericModelReference.id))
+ object_helper = relationship(GenericModelReference)
+ obj = association_proxy("object_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
user_id = Column(Integer, ForeignKey('core__users.id'), nullable=False,
index=True)
seen = Column(Boolean, default=lambda: False, index=True)
user = relationship(
User,
- backref=backref('notifications', cascade='all, delete-orphan'))
-
- __mapper_args__ = {
- 'polymorphic_identity': 'notification',
- 'polymorphic_on': type
- }
+ backref=backref('notifications', cascade='all, delete-orphan'))
def __repr__(self):
return '<{klass} #{id}: {user}: {subject} ({seen})>'.format(
@@ -1011,42 +1291,9 @@ class Notification(Base):
subject=getattr(self, 'subject', None),
seen='unseen' if not self.seen else 'seen')
-
-class CommentNotification(Notification):
- __tablename__ = 'core__comment_notifications'
- id = Column(Integer, ForeignKey(Notification.id), primary_key=True)
-
- subject_id = Column(Integer, ForeignKey(MediaComment.id))
- subject = relationship(
- MediaComment,
- backref=backref('comment_notifications', cascade='all, delete-orphan'))
-
- __mapper_args__ = {
- 'polymorphic_identity': 'comment_notification'
- }
-
-
-class ProcessingNotification(Notification):
- __tablename__ = 'core__processing_notifications'
-
- id = Column(Integer, ForeignKey(Notification.id), primary_key=True)
-
- subject_id = Column(Integer, ForeignKey(MediaEntry.id))
- subject = relationship(
- MediaEntry,
- backref=backref('processing_notifications',
- cascade='all, delete-orphan'))
-
- __mapper_args__ = {
- 'polymorphic_identity': 'processing_notification'
- }
-
-# the with_polymorphic call has been moved to the bottom above MODELS
-# this is because it causes conflicts with relationship calls.
-
-class ReportBase(Base):
+class Report(Base):
"""
- This is the basic report object which the other reports are based off of.
+ Represents a report that someone might file against Media, Comments, etc.
:keyword reporter_id Holds the id of the user who created
the report, as an Integer column.
@@ -1059,8 +1306,6 @@ class ReportBase(Base):
an Integer column.
:keyword created Holds a datetime column of when the re-
-port was filed.
- :keyword discriminator This column distinguishes between the
- different types of reports.
:keyword resolver_id Holds the id of the moderator/admin who
resolved the report.
:keyword resolved Holds the DateTime object which descri-
@@ -1069,8 +1314,11 @@ class ReportBase(Base):
resolver's reasons for resolving
the report this way. Some of this
is auto-generated
+ :keyword object_id Holds the ID of the GenericModelReference
+ which points to the reported object.
"""
__tablename__ = 'core__reports'
+
id = Column(Integer, primary_key=True)
reporter_id = Column(Integer, ForeignKey(User.id), nullable=False)
reporter = relationship(
@@ -1078,7 +1326,7 @@ class ReportBase(Base):
backref=backref("reports_filed_by",
lazy="dynamic",
cascade="all, delete-orphan"),
- primaryjoin="User.id==ReportBase.reporter_id")
+ primaryjoin="User.id==Report.reporter_id")
report_content = Column(UnicodeText)
reported_user_id = Column(Integer, ForeignKey(User.id), nullable=False)
reported_user = relationship(
@@ -1086,70 +1334,42 @@ class ReportBase(Base):
backref=backref("reports_filed_on",
lazy="dynamic",
cascade="all, delete-orphan"),
- primaryjoin="User.id==ReportBase.reported_user_id")
+ primaryjoin="User.id==Report.reported_user_id")
created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
- discriminator = Column('type', Unicode(50))
resolver_id = Column(Integer, ForeignKey(User.id))
resolver = relationship(
User,
backref=backref("reports_resolved_by",
lazy="dynamic",
cascade="all, delete-orphan"),
- primaryjoin="User.id==ReportBase.resolver_id")
+ primaryjoin="User.id==Report.resolver_id")
resolved = Column(DateTime)
result = Column(UnicodeText)
- __mapper_args__ = {'polymorphic_on': discriminator}
+
+ object_id = Column(Integer, ForeignKey(GenericModelReference.id), nullable=False)
+ object_helper = relationship(GenericModelReference)
+ obj = association_proxy("object_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
+
+ def is_archived_report(self):
+ return self.resolved is not None
def is_comment_report(self):
- return self.discriminator=='comment_report'
+ if self.object_id is None:
+ return False
+ return isinstance(self.obj(), TextComment)
def is_media_entry_report(self):
- return self.discriminator=='media_report'
-
- def is_archived_report(self):
- return self.resolved is not None
+ if self.object_id is None:
+ return False
+ return isinstance(self.obj(), MediaEntry)
def archive(self,resolver_id, resolved, result):
self.resolver_id = resolver_id
self.resolved = resolved
self.result = result
-
-class CommentReport(ReportBase):
- """
- Reports that have been filed on comments.
- :keyword comment_id Holds the integer value of the reported
- comment's ID
- """
- __tablename__ = 'core__reports_on_comments'
- __mapper_args__ = {'polymorphic_identity': 'comment_report'}
-
- id = Column('id',Integer, ForeignKey('core__reports.id'),
- primary_key=True)
- comment_id = Column(Integer, ForeignKey(MediaComment.id), nullable=True)
- comment = relationship(
- MediaComment, backref=backref("reports_filed_on",
- lazy="dynamic"))
-
-
-class MediaReport(ReportBase):
- """
- Reports that have been filed on media entries
- :keyword media_entry_id Holds the integer value of the reported
- media entry's ID
- """
- __tablename__ = 'core__reports_on_media'
- __mapper_args__ = {'polymorphic_identity': 'media_report'}
-
- id = Column('id',Integer, ForeignKey('core__reports.id'),
- primary_key=True)
- media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=True)
- media_entry = relationship(
- MediaEntry,
- backref=backref("reports_filed_on",
- lazy="dynamic"))
-
class UserBan(Base):
"""
Holds the information on a specific user's ban-state. As long as one of
@@ -1235,6 +1455,8 @@ class Generator(Base):
updated = Column(DateTime, default=datetime.datetime.utcnow)
object_type = Column(Unicode, nullable=False)
+ deletion_mode = Base.SOFT_DELETE
+
def __repr__(self):
return "<{klass} {name}>".format(
klass=self.__class__.__name__,
@@ -1262,62 +1484,6 @@ class Generator(Base):
if "displayName" in data:
self.name = data["displayName"]
-
-class ActivityIntermediator(Base):
- """
- This is used so that objects/targets can have a foreign key back to this
- object and activities can a foreign key to this object. This objects to be
- used multiple times for the activity object or target and also allows for
- different types of objects to be used as an Activity.
- """
- __tablename__ = "core__activity_intermediators"
-
- id = Column(Integer, primary_key=True)
- type = Column(Unicode, nullable=False)
-
- TYPES = {
- "user": User,
- "media": MediaEntry,
- "comment": MediaComment,
- "collection": Collection,
- }
-
- def _find_model(self, obj):
- """ Finds the model for a given object """
- for key, model in self.TYPES.items():
- if isinstance(obj, model):
- return key, model
-
- return None, None
-
- def set(self, obj):
- """ This sets itself as the activity """
- key, model = self._find_model(obj)
- if key is None:
- raise ValueError("Invalid type of object given")
-
- self.type = key
-
- # We need to populate the self.id so we need to save but, we don't
- # want to save this AI in the database (yet) so commit=False.
- self.save(commit=False)
- obj.activity = self.id
- obj.save()
-
- def get(self):
- """ Finds the object for an activity """
- if self.type is None:
- return None
-
- model = self.TYPES[self.type]
- return model.query.filter_by(activity=self.id).first()
-
- @validates("type")
- def validate_type(self, key, value):
- """ Validate that the type set is a valid type """
- assert value in self.TYPES
- return value
-
class Activity(Base, ActivityMixin):
"""
This holds all the metadata about an activity such as uploading an image,
@@ -1326,29 +1492,39 @@ class Activity(Base, ActivityMixin):
__tablename__ = "core__activities"
id = Column(Integer, primary_key=True)
+ public_id = Column(Unicode, unique=True)
actor = Column(Integer,
ForeignKey("core__users.id"),
nullable=False)
published = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
verb = Column(Unicode, nullable=False)
content = Column(Unicode, nullable=True)
title = Column(Unicode, nullable=True)
generator = Column(Integer,
ForeignKey("core__generators.id"),
nullable=True)
- object = Column(Integer,
- ForeignKey("core__activity_intermediators.id"),
- nullable=False)
- target = Column(Integer,
- ForeignKey("core__activity_intermediators.id"),
- nullable=True)
+
+ # Create the generic foreign keys for the object
+ object_id = Column(Integer, ForeignKey(GenericModelReference.id), nullable=False)
+ object_helper = relationship(GenericModelReference, foreign_keys=[object_id])
+ object = association_proxy("object_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
+
+ # Create the generic foreign Key for the target
+ target_id = Column(Integer, ForeignKey(GenericModelReference.id), nullable=True)
+ target_helper = relationship(GenericModelReference, foreign_keys=[target_id])
+ target = association_proxy("target_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
get_actor = relationship(User,
backref=backref("activities",
cascade="all, delete-orphan"))
get_generator = relationship(Generator)
+ deletion_mode = Base.SOFT_DELETE
+
def __repr__(self):
if self.content is None:
return "<{klass} verb:{verb}>".format(
@@ -1361,62 +1537,54 @@ class Activity(Base, ActivityMixin):
content=self.content
)
- @property
- def get_object(self):
- if self.object is None:
- return None
-
- ai = ActivityIntermediator.query.filter_by(id=self.object).first()
- return ai.get()
-
- def set_object(self, obj):
- self.object = self._set_model(obj)
-
- @property
- def get_target(self):
- if self.target is None:
- return None
+ def save(self, set_updated=True, *args, **kwargs):
+ if set_updated:
+ self.updated = datetime.datetime.now()
+ super(Activity, self).save(*args, **kwargs)
- ai = ActivityIntermediator.query.filter_by(id=self.target).first()
- return ai.get()
+class Graveyard(Base):
+ """ Where models come to die """
+ __tablename__ = "core__graveyard"
- def set_target(self, obj):
- self.target = self._set_model(obj)
+ id = Column(Integer, primary_key=True)
+ public_id = Column(Unicode, nullable=True, unique=True)
- def _set_model(self, obj):
- # Firstly can we set obj
- if not hasattr(obj, "activity"):
- raise ValueError(
- "{0!r} is unable to be set on activity".format(obj))
+ deleted = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ object_type = Column(Unicode, nullable=False)
- if obj.activity is None:
- # We need to create a new AI
- ai = ActivityIntermediator()
- ai.set(obj)
- ai.save()
- return ai.id
+ # This could either be a deleted actor or a real actor, this must be
+ # nullable as it we shouldn't have it set for deleted actor
+ actor_id = Column(Integer, ForeignKey(GenericModelReference.id))
+ actor_helper = relationship(GenericModelReference)
+ actor = association_proxy("actor_helper", "get_object",
+ creator=GenericModelReference.find_or_new)
- # Okay we should have an existing AI
- return ActivityIntermediator.query.filter_by(id=obj.activity).first().id
+ def __repr__(self):
+ return "<{klass} deleted {obj_type}>".format(
+ klass=type(self).__name__,
+ obj_type=self.object_type
+ )
- def save(self, set_updated=True, *args, **kwargs):
- if set_updated:
- self.updated = datetime.datetime.now()
- super(Activity, self).save(*args, **kwargs)
+ def serialize(self, request):
+ deleted = UTC.localize(self.deleted).isoformat()
+ context = {
+ "id": self.public_id,
+ "objectType": self.object_type,
+ "published": deleted,
+ "updated": deleted,
+ "deleted": deleted,
+ }
-with_polymorphic(
- Notification,
- [ProcessingNotification, CommentNotification])
+ if self.actor_id is not None:
+ context["actor"] = self.actor().serialize(request)
+ return context
MODELS = [
- User, MediaEntry, Tag, MediaTag, MediaComment, Collection, CollectionItem,
- MediaFile, FileKeynames, MediaAttachmentFile, ProcessingMetaData,
- Notification, CommentNotification, ProcessingNotification, Client,
- CommentSubscription, ReportBase, CommentReport, MediaReport, UserBan,
- Privilege, PrivilegeUserAssociation,
- RequestToken, AccessToken, NonceTimestamp,
- Activity, ActivityIntermediator, Generator,
- Location]
+ LocalUser, RemoteUser, User, MediaEntry, Tag, MediaTag, Comment, TextComment,
+ Collection, CollectionItem, MediaFile, FileKeynames, MediaAttachmentFile,
+ ProcessingMetaData, Notification, Client, CommentSubscription, Report,
+ UserBan, Privilege, PrivilegeUserAssociation, RequestToken, AccessToken,
+ NonceTimestamp, Activity, Generator, Location, GenericModelReference, Graveyard]
"""
Foundations are the default rows that are created immediately after the tables
diff --git a/mediagoblin/db/util.py b/mediagoblin/db/util.py
index 7c026691..57e6b942 100644
--- a/mediagoblin/db/util.py
+++ b/mediagoblin/db/util.py
@@ -37,7 +37,7 @@ def atomic_update(table, query_dict, update_values):
def check_media_slug_used(uploader_id, slug, ignore_m_id):
- query = MediaEntry.query.filter_by(uploader=uploader_id, slug=slug)
+ query = MediaEntry.query.filter_by(actor=uploader_id, slug=slug)
if ignore_m_id is not None:
query = query.filter(MediaEntry.id != ignore_m_id)
does_exist = query.first() is not None
@@ -67,7 +67,7 @@ def clean_orphan_tags(commit=True):
def check_collection_slug_used(creator_id, slug, ignore_c_id):
- filt = (Collection.creator == creator_id) \
+ filt = (Collection.actor == creator_id) \
& (Collection.slug == slug)
if ignore_c_id is not None:
filt = filt & (Collection.id != ignore_c_id)