aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/db/migrations.py
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/db/migrations.py')
-rw-r--r--mediagoblin/db/migrations.py369
1 files changed, 364 insertions, 5 deletions
diff --git a/mediagoblin/db/migrations.py b/mediagoblin/db/migrations.py
index 04588ad1..74c1194f 100644
--- a/mediagoblin/db/migrations.py
+++ b/mediagoblin/db/migrations.py
@@ -17,19 +17,26 @@
import datetime
import uuid
+import six
+
+if six.PY2:
+ import migrate
+
+import pytz
+import dateutil.tz
from sqlalchemy import (MetaData, Table, Column, Boolean, SmallInteger,
Integer, Unicode, UnicodeText, DateTime,
ForeignKey, Date, Index)
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import and_
-from migrate.changeset.constraint import UniqueConstraint
+from sqlalchemy.schema import UniqueConstraint
from mediagoblin.db.extratypes import JSONEncoded, MutationDict
from mediagoblin.db.migration_tools import (
RegisterMigration, inspect_table, replace_table_hack)
from mediagoblin.db.models import (MediaEntry, Collection, MediaComment, User,
- Privilege)
+ Privilege, Generator)
from mediagoblin.db.extratypes import JSONEncoded, MutationDict
@@ -249,7 +256,7 @@ def mediaentry_new_slug_era(db):
for row in db.execute(media_table.select()):
# no slug, try setting to an id
if not row.slug:
- append_garbage_till_unique(row, unicode(row.id))
+ append_garbage_till_unique(row, six.text_type(row.id))
# has "=" or ":" in it... we're getting rid of those
elif u"=" in row.slug or u":" in row.slug:
append_garbage_till_unique(
@@ -278,7 +285,7 @@ def unique_collections_slug(db):
existing_slugs[row.creator].append(row.slug)
for row_id in slugs_to_change:
- new_slug = unicode(uuid.uuid4())
+ new_slug = six.text_type(uuid.uuid4())
db.execute(collection_table.update().
where(collection_table.c.id == row_id).
values(slug=new_slug))
@@ -578,7 +585,6 @@ PRIVILEGE_FOUNDATIONS_v0 = [{'privilege_name':u'admin'},
{'privilege_name':u'commenter'},
{'privilege_name':u'active'}]
-
# vR1 stands for "version Rename 1". This only exists because we need
# to deal with dropping some booleans and it's otherwise impossible
# with sqlite.
@@ -890,3 +896,356 @@ def revert_username_index(db):
db.rollback()
db.commit()
+
+class Generator_R0(declarative_base()):
+ __tablename__ = "core__generators"
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode, nullable=False)
+ published = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ object_type = Column(Unicode, nullable=False)
+
+class ActivityIntermediator_R0(declarative_base()):
+ __tablename__ = "core__activity_intermediators"
+ id = Column(Integer, primary_key=True)
+ type = Column(Unicode, nullable=False)
+
+class Activity_R0(declarative_base()):
+ __tablename__ = "core__activities"
+ id = Column(Integer, primary_key=True)
+ actor = Column(Integer, ForeignKey(User.id), nullable=False)
+ published = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ verb = Column(Unicode, nullable=False)
+ content = Column(Unicode, nullable=True)
+ title = Column(Unicode, nullable=True)
+ generator = Column(Integer, ForeignKey(Generator_R0.id), nullable=True)
+ object = Column(Integer,
+ ForeignKey(ActivityIntermediator_R0.id),
+ nullable=False)
+ target = Column(Integer,
+ ForeignKey(ActivityIntermediator_R0.id),
+ nullable=True)
+
+@RegisterMigration(24, MIGRATIONS)
+def activity_migration(db):
+ """
+ Creates everything to create activities in GMG
+ - Adds Activity, ActivityIntermediator and Generator table
+ - Creates GMG service generator for activities produced by the server
+ - Adds the activity_as_object and activity_as_target to objects/targets
+ - Retroactively adds activities for what we can acurately work out
+ """
+ # Set constants we'll use later
+ FOREIGN_KEY = "core__activity_intermediators.id"
+ ACTIVITY_COLUMN = "activity"
+
+ # Create the new tables.
+ ActivityIntermediator_R0.__table__.create(db.bind)
+ Generator_R0.__table__.create(db.bind)
+ Activity_R0.__table__.create(db.bind)
+ db.commit()
+
+ # Initiate the tables we want to use later
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, "core__users")
+ activity_table = inspect_table(metadata, "core__activities")
+ generator_table = inspect_table(metadata, "core__generators")
+ collection_table = inspect_table(metadata, "core__collections")
+ media_entry_table = inspect_table(metadata, "core__media_entries")
+ media_comments_table = inspect_table(metadata, "core__media_comments")
+ ai_table = inspect_table(metadata, "core__activity_intermediators")
+
+
+ # Create the foundations for Generator
+ db.execute(generator_table.insert().values(
+ name="GNU Mediagoblin",
+ object_type="service",
+ published=datetime.datetime.now(),
+ updated=datetime.datetime.now()
+ ))
+ db.commit()
+
+ # Get the ID of that generator
+ gmg_generator = db.execute(generator_table.select(
+ generator_table.c.name==u"GNU Mediagoblin")).first()
+
+
+ # Now we want to modify the tables which MAY have an activity at some point
+ media_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ media_col.create(media_entry_table)
+
+ user_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ user_col.create(user_table)
+
+ comments_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ comments_col.create(media_comments_table)
+
+ collection_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ collection_col.create(collection_table)
+ db.commit()
+
+
+ # Now we want to retroactively add what activities we can
+ # first we'll add activities when people uploaded media.
+ # these can't have content as it's not fesible to get the
+ # correct content strings.
+ for media in db.execute(media_entry_table.select()):
+ # Now we want to create the intermedaitory
+ db_ai = db.execute(ai_table.insert().values(
+ type="media",
+ ))
+ db_ai = db.execute(ai_table.select(
+ ai_table.c.id==db_ai.inserted_primary_key[0]
+ )).first()
+
+ # Add the activity
+ activity = {
+ "verb": "create",
+ "actor": media.uploader,
+ "published": media.created,
+ "updated": media.created,
+ "generator": gmg_generator.id,
+ "object": db_ai.id
+ }
+ db.execute(activity_table.insert().values(**activity))
+
+ # Add the AI to the media.
+ db.execute(media_entry_table.update().values(
+ activity=db_ai.id
+ ).where(media_entry_table.c.id==media.id))
+
+ # Now we want to add all the comments people made
+ for comment in db.execute(media_comments_table.select()):
+ # Get the MediaEntry for the comment
+ media_entry = db.execute(
+ media_entry_table.select(
+ media_entry_table.c.id==comment.media_entry
+ )).first()
+
+ # Create an AI for target
+ db_ai_media = db.execute(ai_table.select(
+ ai_table.c.id==media_entry.activity
+ )).first().id
+
+ db.execute(
+ media_comments_table.update().values(
+ activity=db_ai_media
+ ).where(media_comments_table.c.id==media_entry.id))
+
+ # Now create the AI for the comment
+ db_ai_comment = db.execute(ai_table.insert().values(
+ type="comment"
+ )).inserted_primary_key[0]
+
+ activity = {
+ "verb": "comment",
+ "actor": comment.author,
+ "published": comment.created,
+ "updated": comment.created,
+ "generator": gmg_generator.id,
+ "object": db_ai_comment,
+ "target": db_ai_media,
+ }
+
+ # Now add the comment object
+ db.execute(activity_table.insert().values(**activity))
+
+ # Now add activity to comment
+ db.execute(media_comments_table.update().values(
+ activity=db_ai_comment
+ ).where(media_comments_table.c.id==comment.id))
+
+ # Create 'create' activities for all collections
+ for collection in db.execute(collection_table.select()):
+ # create AI
+ db_ai = db.execute(ai_table.insert().values(
+ type="collection"
+ ))
+ db_ai = db.execute(ai_table.select(
+ ai_table.c.id==db_ai.inserted_primary_key[0]
+ )).first()
+
+ # Now add link the collection to the AI
+ db.execute(collection_table.update().values(
+ activity=db_ai.id
+ ).where(collection_table.c.id==collection.id))
+
+ activity = {
+ "verb": "create",
+ "actor": collection.creator,
+ "published": collection.created,
+ "updated": collection.created,
+ "generator": gmg_generator.id,
+ "object": db_ai.id,
+ }
+
+ db.execute(activity_table.insert().values(**activity))
+
+ # Now add the activity to the collection
+ db.execute(collection_table.update().values(
+ activity=db_ai.id
+ ).where(collection_table.c.id==collection.id))
+
+ db.commit()
+
+class Location_V0(declarative_base()):
+ __tablename__ = "core__locations"
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode)
+ position = Column(MutationDict.as_mutable(JSONEncoded))
+ address = Column(MutationDict.as_mutable(JSONEncoded))
+
+@RegisterMigration(25, MIGRATIONS)
+def add_location_model(db):
+ """ Add location model """
+ metadata = MetaData(bind=db.bind)
+
+ # Create location table
+ Location_V0.__table__.create(db.bind)
+ db.commit()
+
+ # Inspect the tables we need
+ user = inspect_table(metadata, "core__users")
+ collections = inspect_table(metadata, "core__collections")
+ media_entry = inspect_table(metadata, "core__media_entries")
+ media_comments = inspect_table(metadata, "core__media_comments")
+
+ # Now add location support to the various models
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(user)
+
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(collections)
+
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(media_entry)
+
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(media_comments)
+
+ db.commit()
+
+@RegisterMigration(26, MIGRATIONS)
+def datetime_to_utc(db):
+ """ Convert datetime stamps to UTC """
+ # Get the server's timezone, this is what the database has stored
+ server_timezone = dateutil.tz.tzlocal()
+
+ ##
+ # Look up all the timestamps and convert them to UTC
+ ##
+ metadata = MetaData(bind=db.bind)
+
+ def dt_to_utc(dt):
+ # Add the current timezone
+ dt = dt.replace(tzinfo=server_timezone)
+
+ # Convert to UTC
+ return dt.astimezone(pytz.UTC)
+
+ # Convert the User model
+ user_table = inspect_table(metadata, "core__users")
+ for user in db.execute(user_table.select()):
+ db.execute(user_table.update().values(
+ created=dt_to_utc(user.created)
+ ).where(user_table.c.id==user.id))
+
+ # Convert Client
+ client_table = inspect_table(metadata, "core__clients")
+ for client in db.execute(client_table.select()):
+ db.execute(client_table.update().values(
+ created=dt_to_utc(client.created),
+ updated=dt_to_utc(client.updated)
+ ).where(client_table.c.id==client.id))
+
+ # Convert RequestToken
+ rt_table = inspect_table(metadata, "core__request_tokens")
+ for request_token in db.execute(rt_table.select()):
+ db.execute(rt_table.update().values(
+ created=dt_to_utc(request_token.created),
+ updated=dt_to_utc(request_token.updated)
+ ).where(rt_table.c.token==request_token.token))
+
+ # Convert AccessToken
+ at_table = inspect_table(metadata, "core__access_tokens")
+ for access_token in db.execute(at_table.select()):
+ db.execute(at_table.update().values(
+ created=dt_to_utc(access_token.created),
+ updated=dt_to_utc(access_token.updated)
+ ).where(at_table.c.token==access_token.token))
+
+ # Convert MediaEntry
+ media_table = inspect_table(metadata, "core__media_entries")
+ for media in db.execute(media_table.select()):
+ db.execute(media_table.update().values(
+ created=dt_to_utc(media.created)
+ ).where(media_table.c.id==media.id))
+
+ # Convert Media Attachment File
+ media_attachment_table = inspect_table(metadata, "core__attachment_files")
+ for ma in db.execute(media_attachment_table.select()):
+ db.execute(media_attachment_table.update().values(
+ created=dt_to_utc(ma.created)
+ ).where(media_attachment_table.c.id==ma.id))
+
+ # Convert MediaComment
+ comment_table = inspect_table(metadata, "core__media_comments")
+ for comment in db.execute(comment_table.select()):
+ db.execute(comment_table.update().values(
+ created=dt_to_utc(comment.created)
+ ).where(comment_table.c.id==comment.id))
+
+ # Convert Collection
+ collection_table = inspect_table(metadata, "core__collections")
+ for collection in db.execute(collection_table.select()):
+ db.execute(collection_table.update().values(
+ created=dt_to_utc(collection.created)
+ ).where(collection_table.c.id==collection.id))
+
+ # Convert Collection Item
+ collection_item_table = inspect_table(metadata, "core__collection_items")
+ for ci in db.execute(collection_item_table.select()):
+ db.execute(collection_item_table.update().values(
+ added=dt_to_utc(ci.added)
+ ).where(collection_item_table.c.id==ci.id))
+
+ # Convert Comment subscription
+ comment_sub = inspect_table(metadata, "core__comment_subscriptions")
+ for sub in db.execute(comment_sub.select()):
+ db.execute(comment_sub.update().values(
+ created=dt_to_utc(sub.created)
+ ).where(comment_sub.c.id==sub.id))
+
+ # Convert Notification
+ notification_table = inspect_table(metadata, "core__notifications")
+ for notification in db.execute(notification_table.select()):
+ db.execute(notification_table.update().values(
+ created=dt_to_utc(notification.created)
+ ).where(notification_table.c.id==notification.id))
+
+ # Convert ReportBase
+ reportbase_table = inspect_table(metadata, "core__reports")
+ for report in db.execute(reportbase_table.select()):
+ db.execute(reportbase_table.update().values(
+ created=dt_to_utc(report.created)
+ ).where(reportbase_table.c.id==report.id))
+
+ # Convert Generator
+ generator_table = inspect_table(metadata, "core__generators")
+ for generator in db.execute(generator_table.select()):
+ db.execute(generator_table.update().values(
+ published=dt_to_utc(generator.published),
+ updated=dt_to_utc(generator.updated)
+ ).where(generator_table.c.id==generator.id))
+
+ # Convert Activity
+ activity_table = inspect_table(metadata, "core__activities")
+ for activity in db.execute(activity_table.select()):
+ db.execute(activity_table.update().values(
+ published=dt_to_utc(activity.published),
+ updated=dt_to_utc(activity.updated)
+ ).where(activity_table.c.id==activity.id))
+
+ # Commit this to the database
+ db.commit()