aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/db/migrations
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/db/migrations')
-rw-r--r--mediagoblin/db/migrations/README57
-rw-r--r--mediagoblin/db/migrations/env.py64
-rw-r--r--mediagoblin/db/migrations/script.py.mako24
-rw-r--r--mediagoblin/db/migrations/versions/.gitkeep0
-rw-r--r--mediagoblin/db/migrations/versions/101510e3a713_removes_graveyard_items_from_.py60
-rw-r--r--mediagoblin/db/migrations/versions/228916769bd2_ensure_report_object_id_is_nullable.py33
-rw-r--r--mediagoblin/db/migrations/versions/3145accb8fe3_remove_tombstone_comment_wrappers.py44
-rw-r--r--mediagoblin/db/migrations/versions/4066b9f8b84a_use_comment_link_ids_notifications.py103
-rw-r--r--mediagoblin/db/migrations/versions/52bf0ccbedc1_initial_revision.py422
-rw-r--r--mediagoblin/db/migrations/versions/8429e33fdf7_remove_the_graveyard_objects_from_.py62
10 files changed, 869 insertions, 0 deletions
diff --git a/mediagoblin/db/migrations/README b/mediagoblin/db/migrations/README
new file mode 100644
index 00000000..93d85eff
--- /dev/null
+++ b/mediagoblin/db/migrations/README
@@ -0,0 +1,57 @@
+Migration Guide
+---------------
+
+Alembic comes with a CLI called ``alembic``.
+
+Create a Migration
+^^^^^^^^^^^^^^^^^^
+
+Lets create our first migration::
+
+ $ alembic revision -m "add favourite_band field"
+ Generating
+ /your/gmg/path/mediagoblin/db/migrations/versions/1e3793de36a_add_favourite_band_field.py ... done
+
+By default, migration files have two methods: ``upgrade`` and ``downgrade``.
+Alembic will invoke these methods to apply the migrations to your current
+database.
+
+Now, we need to edit our newly created migration file
+``1e3793de36a_add_favourite_band_field.py`` to add a new column ``favourite_band``
+to ``core__users`` table::
+
+ def upgrade():
+ op.add_column('core__users', sa.Column('favourite_band', sa.Unicode(100)))
+
+
+ def downgrade():
+ op.drop_column('core__users', 'favourite_band')
+
+.. note::
+
+ Alembic can also generate `automatic migrations <http://alembic.readthedocs.org/en/latest/tutorial.html#auto-generating-migrations>`__.
+
+Then we can run ``gmg dbupdate`` to apply the new migration::
+
+ $ gmg dbupdate
+ INFO [alembic.migration] Context impl SQLiteImpl.
+ INFO [alembic.migration] Will assume non-transactional DDL.
+ INFO [alembic.migration] Running upgrade None -> 1e3793de36a, add favourite band field
+
+If you want to revert that migration, simply run::
+
+ $ alembic downgrade -1
+
+.. warning::
+
+ Currently, Alembic cannot do ``DROP COLUMN``, ``ALTER COLUMN`` etc.
+ operations in SQLite. Please see https://bitbucket.org/zzzeek/alembic/issue/21/column-renames-not-supported-on-sqlite
+ for detailed information.
+
+Glossary
+^^^^^^^^
+
+* ``alembic.ini``: The Alembic configuration file. The ``alembic`` CLI will
+ look that file everytime it invaked.
+* ``mediagoblin/db/migrations/versions/``: Alembic will add new migration files
+ to this directory.
diff --git a/mediagoblin/db/migrations/env.py b/mediagoblin/db/migrations/env.py
new file mode 100644
index 00000000..43b7b247
--- /dev/null
+++ b/mediagoblin/db/migrations/env.py
@@ -0,0 +1,64 @@
+from __future__ import with_statement
+from alembic import context
+from sqlalchemy import engine_from_config, pool
+from logging.config import fileConfig
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+from mediagoblin.db.models import Base
+target_metadata = Base.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(url=url, target_metadata=target_metadata)
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ connection = config.attributes["session"].get_bind()
+ context.configure(
+ connection=connection,
+ target_metadata=target_metadata
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
+
diff --git a/mediagoblin/db/migrations/script.py.mako b/mediagoblin/db/migrations/script.py.mako
new file mode 100644
index 00000000..43c09401
--- /dev/null
+++ b/mediagoblin/db/migrations/script.py.mako
@@ -0,0 +1,24 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+ ${downgrades if downgrades else "pass"}
diff --git a/mediagoblin/db/migrations/versions/.gitkeep b/mediagoblin/db/migrations/versions/.gitkeep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/.gitkeep
diff --git a/mediagoblin/db/migrations/versions/101510e3a713_removes_graveyard_items_from_.py b/mediagoblin/db/migrations/versions/101510e3a713_removes_graveyard_items_from_.py
new file mode 100644
index 00000000..723100c5
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/101510e3a713_removes_graveyard_items_from_.py
@@ -0,0 +1,60 @@
+"""#5382 Removes graveyard items from collections
+
+Revision ID: 101510e3a713
+Revises: 52bf0ccbedc1
+Create Date: 2016-01-12 10:46:26.486610
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '101510e3a713'
+down_revision = '52bf0ccbedc1'
+
+from alembic import op
+from sqlalchemy import MetaData
+from sqlalchemy.sql import and_
+from mediagoblin.db.migration_tools import inspect_table
+
+def upgrade():
+ """
+ The problem is deletions are occuring and as we expect the
+ GenericModelReference objects are being updated to point to the tombstone
+ object. The issue is that collections now contain deleted items, this
+ causes problems when it comes to rendering them for example.
+
+ This migration is to remove any Graveyard objects (tombstones) from any
+ Collection.
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+ collection_items_table = inspect_table(metadata, "core__collection_items")
+ graveyard_table = inspect_table(metadata, "core__graveyard")
+
+ res = list(db.execute(graveyard_table.select()))
+ for tombstone in res:
+ # Get GMR for tombstone
+ gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == tombstone.id,
+ gmr_table.c.model_type == "core__graveyard"
+ ))).first()
+
+ # If there is no GMR, we're all good as it's required to be in a
+ # collection
+ if gmr is None:
+ continue
+
+ # Delete all the CollectionItem objects for this GMR
+ db.execute(collection_items_table.delete().where(
+ collection_items_table.c.object_id == gmr.id
+ ))
+
+
+def downgrade():
+ """
+ Nothing to do here, the migration just deletes objects from collections.
+ There are no schema changes that have occured. This can be reverted without
+ any problems.
+ """
+ pass
diff --git a/mediagoblin/db/migrations/versions/228916769bd2_ensure_report_object_id_is_nullable.py b/mediagoblin/db/migrations/versions/228916769bd2_ensure_report_object_id_is_nullable.py
new file mode 100644
index 00000000..596b87de
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/228916769bd2_ensure_report_object_id_is_nullable.py
@@ -0,0 +1,33 @@
+"""ensure Report.object_id is nullable
+
+Revision ID: 228916769bd2
+Revises: 3145accb8fe3
+Create Date: 2016-02-29 18:54:37.295185
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '228916769bd2'
+down_revision = '3145accb8fe3'
+
+from alembic import op
+from sqlalchemy import MetaData
+from mediagoblin.db.migration_tools import inspect_table
+
+def upgrade():
+ """
+ This ensures that the Report.object_id field is nullable, it seems for a
+ short period of time it could have been NOT NULL but was fixed later.
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+ report_table = inspect_table(metadata, "core__reports")
+
+ # Check if the field has nullable on
+ object_id_field = report_table.columns["object_id"]
+ if object_id_field.nullable != True:
+ # We have to alter this.
+ object_id_field.alter(nullable=True)
+
+def downgrade():
+ pass
diff --git a/mediagoblin/db/migrations/versions/3145accb8fe3_remove_tombstone_comment_wrappers.py b/mediagoblin/db/migrations/versions/3145accb8fe3_remove_tombstone_comment_wrappers.py
new file mode 100644
index 00000000..1f336048
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/3145accb8fe3_remove_tombstone_comment_wrappers.py
@@ -0,0 +1,44 @@
+"""remove tombstone comment wrappers
+
+Revision ID: 3145accb8fe3
+Revises: 4066b9f8b84a
+Create Date: 2016-02-29 14:38:12.096859
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '3145accb8fe3'
+down_revision = '4066b9f8b84a'
+
+from alembic import op
+from sqlalchemy import MetaData, and_
+from mediagoblin.db.migration_tools import inspect_table
+
+def upgrade():
+ """
+ Removes comments which have been deleted and exist as a tombstone but still
+ have their Comment wrapper.
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+ comment_table = inspect_table(metadata, "core__comment_links")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # Get the Comment wrappers
+ comment_wrappers = list(db.execute(comment_table.select()))
+
+ for wrapper in comment_wrappers:
+ # Query for the graveyard GMR comment
+ gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.id == wrapper.comment_id,
+ gmr_table.c.model_type == "core__graveyard"
+ ))).first()
+
+ if gmr is not None:
+ # Okay delete this wrapper as it's to a deleted comment
+ db.execute(comment_table.delete().where(
+ comment_table.c.id == wrapper.id
+ ))
+
+def downgrade():
+ pass
diff --git a/mediagoblin/db/migrations/versions/4066b9f8b84a_use_comment_link_ids_notifications.py b/mediagoblin/db/migrations/versions/4066b9f8b84a_use_comment_link_ids_notifications.py
new file mode 100644
index 00000000..9dfef18d
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/4066b9f8b84a_use_comment_link_ids_notifications.py
@@ -0,0 +1,103 @@
+"""use_comment_link_ids_notifications
+
+Revision ID: 4066b9f8b84a
+Revises: 8429e33fdf7
+Create Date: 2016-02-29 11:46:13.511318
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '4066b9f8b84a'
+down_revision = '8429e33fdf7'
+
+from alembic import op
+from sqlalchemy import MetaData
+from sqlalchemy import and_
+from mediagoblin.db.migration_tools import inspect_table
+
+def upgrade():
+ """"
+ This replaces the Notification.obj with the ID of the Comment (i.e. comment
+ link) ID instead of the TextComment object.
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+ notification_table = inspect_table(metadata, "core__notifications")
+ comment_table = inspect_table(metadata, "core__comment_links")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ # Get the notifications.
+ notifications = list(db.execute(notification_table.select()))
+
+ # Iterate through all the notifications
+ for notification in notifications:
+ # Lookup the Comment link object from the notification's ID
+ comment_link = db.execute(comment_table.select().where(
+ comment_table.c.comment_id == notification.object_id
+ )).first()
+
+ # Find the GMR for this comment or make one if one doesn't exist.
+ gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment_link.id,
+ gmr_table.c.model_type == "core__comment_links"
+ ))).first()
+
+ # If it doesn't exist we need to create one.
+ if gmr is None:
+ gmr = db.execute(gmr_table.insert().values(
+ obj_pk=comment_link.id,
+ model_type="core__comment_links"
+ )).inserted_primary_key[0]
+ else:
+ gmr = gmr.id
+
+ # Okay now we need to update the notification with the ID of the link
+ # rather than the ID of TextComment object.
+ db.execute(notification_table.update().values(
+ object_id=gmr
+ ).where(
+ notification_table.c.id == notification.id
+ ))
+
+
+def downgrade():
+ """
+ This puts back the TextComment ID for the notification.object_id field
+ where we're using the Comment object (i.e. the comment link ID)
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+ notification_table = inspect_table(metadata, "core__notifications")
+ comment_table = inspect_table(metadata, "core__comment_links")
+
+ # Notificaitons
+ notifications = list(db.execute(notification_table.select()))
+
+ # Iterate through all the notifications
+ for notification in notifications:
+ # Lookup the Comment link object from the notification's ID
+ comment_link = db.execute(comment_table.select().where(
+ comment_table.c.id == notification.object_id
+ )).first()
+
+ # Find the GMR for the TextComment
+ gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == comment_link.id,
+ gmr_table.c.model_type == "core__comment_links"
+ ))).first()
+
+ if gmr is None:
+ gmr = db.execute(gmr_table.insert().values(
+ obj_pk=comment_link.id,
+ model_type="core__comment_links"
+ )).inserted_primary_key[0]
+ else:
+ gmr = gmr.id
+
+ # Update the notification with the TextComment (i.e. the comment object)
+ db.execute(notification_table.update().values(
+ object_id=gmr
+ ).where(
+ notification_table.c.id == notification.id
+ ))
+
diff --git a/mediagoblin/db/migrations/versions/52bf0ccbedc1_initial_revision.py b/mediagoblin/db/migrations/versions/52bf0ccbedc1_initial_revision.py
new file mode 100644
index 00000000..964cf5be
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/52bf0ccbedc1_initial_revision.py
@@ -0,0 +1,422 @@
+"""initial revision
+
+Revision ID: 52bf0ccbedc1
+Revises: None
+Create Date: 2015-11-07 17:00:28.191042
+Description: This is an initial Alembic migration
+"""
+
+# revision identifiers, used by Alembic.
+revision = '52bf0ccbedc1'
+down_revision = None
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade():
+ # Well we already appear to have some of the core data, presumably because
+ # this database precedes our alembic migrations with sqlalchemy-migrate, so
+ # we can bail out early.
+ if op.get_bind().engine.has_table("core__users"):
+ return
+
+ op.create_table(
+ 'core__clients',
+ sa.Column('id', sa.Unicode(), nullable=True),
+ sa.Column('secret', sa.Unicode(), nullable=False),
+ sa.Column('expirey', sa.DateTime(), nullable=True),
+ sa.Column('application_type', sa.Unicode(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('redirect_uri', sa.UnicodeText(),
+ nullable=True),
+ sa.Column('logo_url', sa.Unicode(), nullable=True),
+ sa.Column('application_name', sa.Unicode(), nullable=True),
+ sa.Column('contacts', sa.UnicodeText(),
+ nullable=True),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__file_keynames',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('name', sa.Unicode(), nullable=True),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('name'))
+
+ op.create_table(
+ 'core__generators',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('name', sa.Unicode(), nullable=False),
+ sa.Column('published', sa.DateTime(), nullable=True),
+ sa.Column('updated', sa.DateTime(), nullable=True),
+ sa.Column('object_type', sa.Unicode(), nullable=False),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__generic_model_reference',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('obj_pk', sa.Integer(), nullable=False),
+ sa.Column('model_type', sa.Unicode(), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('model_type', 'obj_pk'))
+
+ op.create_table(
+ 'core__locations',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('name', sa.Unicode(), nullable=True),
+ sa.Column('position', sa.UnicodeText(),
+ nullable=True),
+ sa.Column('address', sa.UnicodeText(),
+ nullable=True),
+ sa.PrimaryKeyConstraint('id'))
+
+ # We should remove this in a future migration, though
+ op.create_table(
+ 'core__migrations',
+ sa.Column('name', sa.Unicode(), nullable=False),
+ sa.Column('version', sa.Integer(), nullable=False),
+ sa.PrimaryKeyConstraint('name'))
+
+ op.create_table(
+ 'core__nonce_timestamps',
+ sa.Column('nonce', sa.Unicode(), nullable=False),
+ sa.Column('timestamp', sa.DateTime(), nullable=False),
+ sa.PrimaryKeyConstraint('nonce', 'timestamp'))
+
+ op.create_table(
+ 'core__privileges',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('privilege_name', sa.Unicode(), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('privilege_name'))
+
+ op.create_table(
+ 'core__tags',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('slug', sa.Unicode(), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('slug'))
+
+ op.create_table(
+ 'core__comment_links',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('target_id', sa.Integer(), nullable=False),
+ sa.Column('comment_id', sa.Integer(), nullable=False),
+ sa.Column('added', sa.DateTime(), nullable=False),
+ sa.ForeignKeyConstraint(['comment_id'],
+ ['core__generic_model_reference.id']),
+ sa.ForeignKeyConstraint(['target_id'],
+ ['core__generic_model_reference.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__graveyard',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('public_id', sa.Unicode(), nullable=True),
+ sa.Column('deleted', sa.DateTime(), nullable=False),
+ sa.Column('object_type', sa.Unicode(), nullable=False),
+ sa.Column('actor_id', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['actor_id'],
+ ['core__generic_model_reference.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('public_id'))
+
+ op.create_table(
+ 'core__users',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('url', sa.Unicode(), nullable=True),
+ sa.Column('bio', sa.UnicodeText(), nullable=True),
+ sa.Column('name', sa.Unicode(), nullable=True),
+ sa.Column('type', sa.Unicode(), nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('location', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['location'], ['core__locations.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__activities',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('public_id', sa.Unicode(), nullable=True),
+ sa.Column('actor', sa.Integer(), nullable=False),
+ sa.Column('published', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('verb', sa.Unicode(), nullable=False),
+ sa.Column('content', sa.Unicode(), nullable=True),
+ sa.Column('title', sa.Unicode(), nullable=True),
+ sa.Column('generator', sa.Integer(), nullable=True),
+ sa.Column('object_id', sa.Integer(), nullable=False),
+ sa.Column('target_id', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['generator'], ['core__generators.id']),
+ sa.ForeignKeyConstraint(['object_id'],
+ ['core__generic_model_reference.id']),
+ sa.ForeignKeyConstraint(['target_id'],
+ ['core__generic_model_reference.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('public_id'))
+
+ op.create_table(
+ 'core__collections',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('public_id', sa.Unicode(), nullable=True),
+ sa.Column('title', sa.Unicode(), nullable=False),
+ sa.Column('slug', sa.Unicode(), nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('description', sa.UnicodeText(), nullable=True),
+ sa.Column('actor', sa.Integer(), nullable=False),
+ sa.Column('num_items', sa.Integer(), nullable=True),
+ sa.Column('type', sa.Unicode(), nullable=False),
+ sa.Column('location', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['location'], ['core__locations.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('actor', 'slug'),
+ sa.UniqueConstraint('public_id'))
+
+ op.create_index(
+ op.f('ix_core__collections_created'),
+ 'core__collections', ['created'], unique=False)
+
+ op.create_table(
+ 'core__local_users',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('username', sa.Unicode(), nullable=False),
+ sa.Column('email', sa.Unicode(), nullable=False),
+ sa.Column('pw_hash', sa.Unicode(), nullable=True),
+ sa.Column('wants_comment_notification', sa.Boolean(), nullable=True),
+ sa.Column('wants_notifications', sa.Boolean(), nullable=True),
+ sa.Column('license_preference', sa.Unicode(), nullable=True),
+ sa.Column('uploaded', sa.Integer(), nullable=True),
+ sa.Column('upload_limit', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('username'))
+
+ op.create_table(
+ 'core__media_comments',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('public_id', sa.Unicode(), nullable=True),
+ sa.Column('actor', sa.Integer(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('content', sa.UnicodeText(), nullable=False),
+ sa.Column('location', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['location'], ['core__locations.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('public_id'))
+
+ op.create_table(
+ 'core__media_entries',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('public_id', sa.Unicode(), nullable=True),
+ sa.Column('remote', sa.Boolean(), nullable=True),
+ sa.Column('actor', sa.Integer(), nullable=False),
+ sa.Column('title', sa.Unicode(), nullable=False),
+ sa.Column('slug', sa.Unicode(), nullable=True),
+ sa.Column('description', sa.UnicodeText(), nullable=True),
+ sa.Column('media_type', sa.Unicode(), nullable=False),
+ sa.Column('state', sa.Unicode(), nullable=False),
+ sa.Column('license', sa.Unicode(), nullable=True),
+ sa.Column('file_size', sa.Integer(), nullable=True),
+ sa.Column('location', sa.Integer(), nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.Column('fail_error', sa.Unicode(), nullable=True),
+ sa.Column('fail_metadata', sa.UnicodeText(), nullable=True),
+ sa.Column('transcoding_progress', sa.SmallInteger(), nullable=True),
+ sa.Column('queued_media_file', sa.Unicode(), nullable=True),
+ sa.Column('queued_task_id', sa.Unicode(), nullable=True),
+ sa.Column('media_metadata', sa.UnicodeText(), nullable=True),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['location'], ['core__locations.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('actor', 'slug'),
+ sa.UniqueConstraint('public_id'))
+
+ op.create_index(
+ op.f('ix_core__media_entries_actor'),
+ 'core__media_entries', ['actor'], unique=False)
+ op.create_index(
+ op.f('ix_core__media_entries_created'),
+ 'core__media_entries', ['created'], unique=False)
+
+ op.create_table(
+ 'core__notifications',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('object_id', sa.Integer(), nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('user_id', sa.Integer(), nullable=False),
+ sa.Column('seen', sa.Boolean(), nullable=True),
+ sa.ForeignKeyConstraint(['object_id'],
+ ['core__generic_model_reference.id']),
+ sa.ForeignKeyConstraint(['user_id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_index(
+ op.f('ix_core__notifications_seen'),
+ 'core__notifications', ['seen'], unique=False)
+
+ op.create_index(
+ op.f('ix_core__notifications_user_id'),
+ 'core__notifications', ['user_id'], unique=False)
+
+ op.create_table(
+ 'core__privileges_users',
+ sa.Column('user', sa.Integer(), nullable=False),
+ sa.Column('privilege', sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(['privilege'], ['core__privileges.id']),
+ sa.ForeignKeyConstraint(['user'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('user', 'privilege'))
+
+ op.create_table(
+ 'core__remote_users',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('webfinger', sa.Unicode(), nullable=True),
+ sa.ForeignKeyConstraint(['id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('webfinger'))
+
+ op.create_table(
+ 'core__reports',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('reporter_id', sa.Integer(), nullable=False),
+ sa.Column('report_content', sa.UnicodeText(), nullable=True),
+ sa.Column('reported_user_id', sa.Integer(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('resolver_id', sa.Integer(), nullable=True),
+ sa.Column('resolved', sa.DateTime(), nullable=True),
+ sa.Column('result', sa.UnicodeText(), nullable=True),
+ sa.Column('object_id', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['object_id'],
+ ['core__generic_model_reference.id']),
+ sa.ForeignKeyConstraint(['reported_user_id'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['reporter_id'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['resolver_id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('id'))
+ op.create_table(
+ 'core__request_tokens',
+ sa.Column('token', sa.Unicode(), nullable=False),
+ sa.Column('secret', sa.Unicode(), nullable=False),
+ sa.Column('client', sa.Unicode(), nullable=True),
+ sa.Column('actor', sa.Integer(), nullable=True),
+ sa.Column('used', sa.Boolean(), nullable=True),
+ sa.Column('authenticated', sa.Boolean(), nullable=True),
+ sa.Column('verifier', sa.Unicode(), nullable=True),
+ sa.Column('callback', sa.Unicode(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['client'], ['core__clients.id']),
+ sa.PrimaryKeyConstraint('token'))
+
+ op.create_table(
+ 'core__user_bans',
+ sa.Column('user_id', sa.Integer(), nullable=False),
+ sa.Column('expiration_date', sa.Date(), nullable=True),
+ sa.Column('reason', sa.UnicodeText(), nullable=False),
+ sa.ForeignKeyConstraint(['user_id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('user_id'))
+
+ op.create_table(
+ 'core__access_tokens',
+ sa.Column('token', sa.Unicode(), nullable=False),
+ sa.Column('secret', sa.Unicode(), nullable=False),
+ sa.Column('actor', sa.Integer(), nullable=True),
+ sa.Column('request_token', sa.Unicode(), nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('updated', sa.DateTime(), nullable=False),
+ sa.ForeignKeyConstraint(['actor'], ['core__users.id']),
+ sa.ForeignKeyConstraint(['request_token'],
+ ['core__request_tokens.token']),
+ sa.PrimaryKeyConstraint('token'))
+
+ op.create_table(
+ 'core__attachment_files',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('media_entry', sa.Integer(), nullable=False),
+ sa.Column('name', sa.Unicode(), nullable=False),
+ sa.Column('filepath', sa.Unicode(),
+ nullable=True),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.ForeignKeyConstraint(['media_entry'], ['core__media_entries.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__collection_items',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('collection', sa.Integer(), nullable=False),
+ sa.Column('note', sa.UnicodeText(), nullable=True),
+ sa.Column('added', sa.DateTime(), nullable=False),
+ sa.Column('position', sa.Integer(), nullable=True),
+ sa.Column('object_id', sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(['collection'], ['core__collections.id']),
+ sa.ForeignKeyConstraint(['object_id'],
+ ['core__generic_model_reference.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('collection', 'object_id'))
+
+ op.create_index(
+ op.f('ix_core__collection_items_object_id'), 'core__collection_items',
+ ['object_id'], unique=False)
+
+ op.create_table(
+ 'core__comment_subscriptions',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('created', sa.DateTime(), nullable=False),
+ sa.Column('media_entry_id', sa.Integer(), nullable=False),
+ sa.Column('user_id', sa.Integer(), nullable=False),
+ sa.Column('notify', sa.Boolean(), nullable=False),
+ sa.Column('send_email', sa.Boolean(), nullable=False),
+ sa.ForeignKeyConstraint(['media_entry_id'], ['core__media_entries.id']),
+ sa.ForeignKeyConstraint(['user_id'], ['core__users.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_table(
+ 'core__media_tags',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('media_entry', sa.Integer(), nullable=False),
+ sa.Column('tag', sa.Integer(), nullable=False),
+ sa.Column('name', sa.Unicode(), nullable=True),
+ sa.ForeignKeyConstraint(['media_entry'], ['core__media_entries.id']),
+ sa.ForeignKeyConstraint(['tag'], ['core__tags.id']),
+ sa.PrimaryKeyConstraint('id'),
+ sa.UniqueConstraint('tag', 'media_entry'))
+
+ op.create_index(
+ op.f('ix_core__media_tags_media_entry'), 'core__media_tags',
+ ['media_entry'], unique=False)
+
+ op.create_index(
+ op.f('ix_core__media_tags_tag'), 'core__media_tags',
+ ['tag'], unique=False)
+
+ op.create_table(
+ 'core__mediafiles',
+ sa.Column('media_entry', sa.Integer(), nullable=False),
+ sa.Column('name_id', sa.SmallInteger(), nullable=False),
+ sa.Column('file_path', sa.Unicode(), nullable=True),
+ sa.Column('file_metadata', sa.UnicodeText(),
+ nullable=True),
+ sa.ForeignKeyConstraint(['media_entry'], ['core__media_entries.id']),
+ sa.ForeignKeyConstraint(['name_id'], ['core__file_keynames.id']),
+ sa.PrimaryKeyConstraint('media_entry', 'name_id'))
+
+ op.create_table(
+ 'core__processing_metadata',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('media_entry_id', sa.Integer(), nullable=False),
+ sa.Column('callback_url', sa.Unicode(), nullable=True),
+ sa.ForeignKeyConstraint(['media_entry_id'], ['core__media_entries.id']),
+ sa.PrimaryKeyConstraint('id'))
+
+ op.create_index(
+ op.f('ix_core__processing_metadata_media_entry_id'),
+ 'core__processing_metadata', ['media_entry_id'], unique=False)
+
+def downgrade():
+ # Downgrading from a first revision is nonsense.
+ pass
diff --git a/mediagoblin/db/migrations/versions/8429e33fdf7_remove_the_graveyard_objects_from_.py b/mediagoblin/db/migrations/versions/8429e33fdf7_remove_the_graveyard_objects_from_.py
new file mode 100644
index 00000000..978260df
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/8429e33fdf7_remove_the_graveyard_objects_from_.py
@@ -0,0 +1,62 @@
+"""Remove the Graveyard objects from CommentNotification objects
+
+Revision ID: 8429e33fdf7
+Revises: 101510e3a713
+Create Date: 2016-01-19 08:01:21.577274
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '8429e33fdf7'
+down_revision = '101510e3a713'
+
+from alembic import op
+from sqlalchemy import MetaData
+from sqlalchemy.sql import and_
+from mediagoblin.db.migration_tools import inspect_table
+
+def upgrade():
+ """
+ This migration is very similiar to that of 101510e3a713. It removes objects
+ from Notification objects which are from Graveyard. It also iterates through
+ any reports which might have been filed and sets the objects to None.
+ """
+ db = op.get_bind()
+ metadata = MetaData(bind=db)
+ notification_table = inspect_table(metadata, "core__notifications")
+ report_table = inspect_table(metadata, "core__reports")
+ graveyard_table = inspect_table(metadata, "core__graveyard")
+ gmr_table = inspect_table(metadata, "core__generic_model_reference")
+
+ res = list(db.execute(gmr_table.select()))
+ for tombstone in res:
+ # Look up the gmr for the tombstone8
+ gmr = db.execute(gmr_table.select().where(and_(
+ gmr_table.c.obj_pk == tombstone.id,
+ gmr_table.c.model_type == "core__graveyard"
+ ))).first()
+
+ # If we can't find one we can skip it as it needs one to be part of
+ # the notification objects
+ if gmr is None:
+ continue
+
+ # Delete all notifications which link to the GMR as that's invalid.
+ db.execute(notification_table.delete().where(
+ notification_table.c.object_id == gmr.id
+ ))
+
+ # Deal with reports, we don't want to delete these though, they want to
+ # still exist if the object that was reported was deleted as that can
+ # be part of the resolution, just set it to None.
+ db.execute(report_table.update().where(
+ report_table.c.object_id == gmr.id
+ ).values(object_id=None))
+
+
+def downgrade():
+ """
+ There is nothing to do as this was a data migration, it'll downgrade
+ just fine without any steps. It's not like we can undo the deletions.
+ """
+ pass