aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/db
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/db')
-rw-r--r--mediagoblin/db/base.py33
-rw-r--r--mediagoblin/db/migration_tools.py58
-rw-r--r--mediagoblin/db/migrations.py369
-rw-r--r--mediagoblin/db/migrations/README57
-rw-r--r--mediagoblin/db/migrations/env.py71
-rw-r--r--mediagoblin/db/migrations/script.py.mako22
-rw-r--r--mediagoblin/db/migrations/versions/.gitkeep0
-rw-r--r--mediagoblin/db/mixin.py201
-rw-r--r--mediagoblin/db/models.py500
-rw-r--r--mediagoblin/db/open.py134
-rw-r--r--mediagoblin/db/util.py6
11 files changed, 1313 insertions, 138 deletions
diff --git a/mediagoblin/db/base.py b/mediagoblin/db/base.py
index c0cefdc2..6acb0b79 100644
--- a/mediagoblin/db/base.py
+++ b/mediagoblin/db/base.py
@@ -16,13 +16,26 @@
from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import scoped_session, sessionmaker, object_session
+from sqlalchemy import inspect
-Session = scoped_session(sessionmaker())
+from mediagoblin.tools.transition import DISABLE_GLOBALS
+
+if not DISABLE_GLOBALS:
+ from sqlalchemy.orm import scoped_session, sessionmaker
+ Session = scoped_session(sessionmaker())
class GMGTableBase(object):
- query = Session.query_property()
+ @property
+ def _session(self):
+ return inspect(self).session
+
+ @property
+ def _app(self):
+ return self._session.bind.app
+
+ if not DISABLE_GLOBALS:
+ query = Session.query_property()
def get(self, key):
return getattr(self, key)
@@ -31,16 +44,20 @@ class GMGTableBase(object):
# The key *has* to exist on sql.
return getattr(self, key)
- def save(self):
- sess = object_session(self)
- if sess is None:
+ def save(self, commit=True):
+ sess = self._session
+ if sess is None and not DISABLE_GLOBALS:
sess = Session()
+ assert sess is not None, "Can't save, %r has a detached session" % self
sess.add(self)
- sess.commit()
+ if commit:
+ sess.commit()
+ else:
+ sess.flush()
def delete(self, commit=True):
"""Delete the object and commit the change immediately by default"""
- sess = object_session(self)
+ sess = self._session
assert sess is not None, "Not going to delete detached %r" % self
sess.delete(self)
if commit:
diff --git a/mediagoblin/db/migration_tools.py b/mediagoblin/db/migration_tools.py
index e39070c3..fae98643 100644
--- a/mediagoblin/db/migration_tools.py
+++ b/mediagoblin/db/migration_tools.py
@@ -14,14 +14,68 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+from __future__ import unicode_literals
+
+import logging
+import os
+
+from alembic import command
+from alembic.config import Config
+from alembic.migration import MigrationContext
+
+from mediagoblin.db.base import Base
from mediagoblin.tools.common import simple_printer
from sqlalchemy import Table
from sqlalchemy.sql import select
+log = logging.getLogger(__name__)
+
+
class TableAlreadyExists(Exception):
pass
+class AlembicMigrationManager(object):
+
+ def __init__(self, session):
+ root_dir = os.path.abspath(os.path.dirname(os.path.dirname(
+ os.path.dirname(__file__))))
+ alembic_cfg_path = os.path.join(root_dir, 'alembic.ini')
+ self.alembic_cfg = Config(alembic_cfg_path)
+ self.session = session
+
+ def get_current_revision(self):
+ context = MigrationContext.configure(self.session.bind)
+ return context.get_current_revision()
+
+ def upgrade(self, version):
+ return command.upgrade(self.alembic_cfg, version or 'head')
+
+ def downgrade(self, version):
+ if isinstance(version, int) or version is None or version.isdigit():
+ version = 'base'
+ return command.downgrade(self.alembic_cfg, version)
+
+ def stamp(self, revision):
+ return command.stamp(self.alembic_cfg, revision=revision)
+
+ def init_tables(self):
+ Base.metadata.create_all(self.session.bind)
+ # load the Alembic configuration and generate the
+ # version table, "stamping" it with the most recent rev:
+ # XXX: we need to find a better way to detect current installations
+ # using sqlalchemy-migrate because we don't have to create all table
+ # for them
+ command.stamp(self.alembic_cfg, 'head')
+
+ def init_or_migrate(self, version=None):
+ # XXX: we need to call this method when we ditch
+ # sqlalchemy-migrate entirely
+ # if self.get_current_revision() is None:
+ # self.init_tables()
+ self.upgrade(version)
+
+
class MigrationManager(object):
"""
Migration handling tool.
@@ -39,7 +93,7 @@ class MigrationManager(object):
- migration_registry: where we should find all migrations to
run
"""
- self.name = unicode(name)
+ self.name = name
self.models = models
self.foundations = foundations
self.session = session
@@ -230,7 +284,7 @@ class MigrationManager(object):
for migration_number, migration_func in migrations_to_run:
self.printer(
u' + Running migration %s, "%s"... ' % (
- migration_number, migration_func.func_name))
+ migration_number, migration_func.__name__))
migration_func(self.session)
self.set_current_migration(migration_number)
self.printer('done.\n')
diff --git a/mediagoblin/db/migrations.py b/mediagoblin/db/migrations.py
index 04588ad1..74c1194f 100644
--- a/mediagoblin/db/migrations.py
+++ b/mediagoblin/db/migrations.py
@@ -17,19 +17,26 @@
import datetime
import uuid
+import six
+
+if six.PY2:
+ import migrate
+
+import pytz
+import dateutil.tz
from sqlalchemy import (MetaData, Table, Column, Boolean, SmallInteger,
Integer, Unicode, UnicodeText, DateTime,
ForeignKey, Date, Index)
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.sql import and_
-from migrate.changeset.constraint import UniqueConstraint
+from sqlalchemy.schema import UniqueConstraint
from mediagoblin.db.extratypes import JSONEncoded, MutationDict
from mediagoblin.db.migration_tools import (
RegisterMigration, inspect_table, replace_table_hack)
from mediagoblin.db.models import (MediaEntry, Collection, MediaComment, User,
- Privilege)
+ Privilege, Generator)
from mediagoblin.db.extratypes import JSONEncoded, MutationDict
@@ -249,7 +256,7 @@ def mediaentry_new_slug_era(db):
for row in db.execute(media_table.select()):
# no slug, try setting to an id
if not row.slug:
- append_garbage_till_unique(row, unicode(row.id))
+ append_garbage_till_unique(row, six.text_type(row.id))
# has "=" or ":" in it... we're getting rid of those
elif u"=" in row.slug or u":" in row.slug:
append_garbage_till_unique(
@@ -278,7 +285,7 @@ def unique_collections_slug(db):
existing_slugs[row.creator].append(row.slug)
for row_id in slugs_to_change:
- new_slug = unicode(uuid.uuid4())
+ new_slug = six.text_type(uuid.uuid4())
db.execute(collection_table.update().
where(collection_table.c.id == row_id).
values(slug=new_slug))
@@ -578,7 +585,6 @@ PRIVILEGE_FOUNDATIONS_v0 = [{'privilege_name':u'admin'},
{'privilege_name':u'commenter'},
{'privilege_name':u'active'}]
-
# vR1 stands for "version Rename 1". This only exists because we need
# to deal with dropping some booleans and it's otherwise impossible
# with sqlite.
@@ -890,3 +896,356 @@ def revert_username_index(db):
db.rollback()
db.commit()
+
+class Generator_R0(declarative_base()):
+ __tablename__ = "core__generators"
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode, nullable=False)
+ published = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ object_type = Column(Unicode, nullable=False)
+
+class ActivityIntermediator_R0(declarative_base()):
+ __tablename__ = "core__activity_intermediators"
+ id = Column(Integer, primary_key=True)
+ type = Column(Unicode, nullable=False)
+
+class Activity_R0(declarative_base()):
+ __tablename__ = "core__activities"
+ id = Column(Integer, primary_key=True)
+ actor = Column(Integer, ForeignKey(User.id), nullable=False)
+ published = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ verb = Column(Unicode, nullable=False)
+ content = Column(Unicode, nullable=True)
+ title = Column(Unicode, nullable=True)
+ generator = Column(Integer, ForeignKey(Generator_R0.id), nullable=True)
+ object = Column(Integer,
+ ForeignKey(ActivityIntermediator_R0.id),
+ nullable=False)
+ target = Column(Integer,
+ ForeignKey(ActivityIntermediator_R0.id),
+ nullable=True)
+
+@RegisterMigration(24, MIGRATIONS)
+def activity_migration(db):
+ """
+ Creates everything to create activities in GMG
+ - Adds Activity, ActivityIntermediator and Generator table
+ - Creates GMG service generator for activities produced by the server
+ - Adds the activity_as_object and activity_as_target to objects/targets
+ - Retroactively adds activities for what we can acurately work out
+ """
+ # Set constants we'll use later
+ FOREIGN_KEY = "core__activity_intermediators.id"
+ ACTIVITY_COLUMN = "activity"
+
+ # Create the new tables.
+ ActivityIntermediator_R0.__table__.create(db.bind)
+ Generator_R0.__table__.create(db.bind)
+ Activity_R0.__table__.create(db.bind)
+ db.commit()
+
+ # Initiate the tables we want to use later
+ metadata = MetaData(bind=db.bind)
+ user_table = inspect_table(metadata, "core__users")
+ activity_table = inspect_table(metadata, "core__activities")
+ generator_table = inspect_table(metadata, "core__generators")
+ collection_table = inspect_table(metadata, "core__collections")
+ media_entry_table = inspect_table(metadata, "core__media_entries")
+ media_comments_table = inspect_table(metadata, "core__media_comments")
+ ai_table = inspect_table(metadata, "core__activity_intermediators")
+
+
+ # Create the foundations for Generator
+ db.execute(generator_table.insert().values(
+ name="GNU Mediagoblin",
+ object_type="service",
+ published=datetime.datetime.now(),
+ updated=datetime.datetime.now()
+ ))
+ db.commit()
+
+ # Get the ID of that generator
+ gmg_generator = db.execute(generator_table.select(
+ generator_table.c.name==u"GNU Mediagoblin")).first()
+
+
+ # Now we want to modify the tables which MAY have an activity at some point
+ media_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ media_col.create(media_entry_table)
+
+ user_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ user_col.create(user_table)
+
+ comments_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ comments_col.create(media_comments_table)
+
+ collection_col = Column(ACTIVITY_COLUMN, Integer, ForeignKey(FOREIGN_KEY))
+ collection_col.create(collection_table)
+ db.commit()
+
+
+ # Now we want to retroactively add what activities we can
+ # first we'll add activities when people uploaded media.
+ # these can't have content as it's not fesible to get the
+ # correct content strings.
+ for media in db.execute(media_entry_table.select()):
+ # Now we want to create the intermedaitory
+ db_ai = db.execute(ai_table.insert().values(
+ type="media",
+ ))
+ db_ai = db.execute(ai_table.select(
+ ai_table.c.id==db_ai.inserted_primary_key[0]
+ )).first()
+
+ # Add the activity
+ activity = {
+ "verb": "create",
+ "actor": media.uploader,
+ "published": media.created,
+ "updated": media.created,
+ "generator": gmg_generator.id,
+ "object": db_ai.id
+ }
+ db.execute(activity_table.insert().values(**activity))
+
+ # Add the AI to the media.
+ db.execute(media_entry_table.update().values(
+ activity=db_ai.id
+ ).where(media_entry_table.c.id==media.id))
+
+ # Now we want to add all the comments people made
+ for comment in db.execute(media_comments_table.select()):
+ # Get the MediaEntry for the comment
+ media_entry = db.execute(
+ media_entry_table.select(
+ media_entry_table.c.id==comment.media_entry
+ )).first()
+
+ # Create an AI for target
+ db_ai_media = db.execute(ai_table.select(
+ ai_table.c.id==media_entry.activity
+ )).first().id
+
+ db.execute(
+ media_comments_table.update().values(
+ activity=db_ai_media
+ ).where(media_comments_table.c.id==media_entry.id))
+
+ # Now create the AI for the comment
+ db_ai_comment = db.execute(ai_table.insert().values(
+ type="comment"
+ )).inserted_primary_key[0]
+
+ activity = {
+ "verb": "comment",
+ "actor": comment.author,
+ "published": comment.created,
+ "updated": comment.created,
+ "generator": gmg_generator.id,
+ "object": db_ai_comment,
+ "target": db_ai_media,
+ }
+
+ # Now add the comment object
+ db.execute(activity_table.insert().values(**activity))
+
+ # Now add activity to comment
+ db.execute(media_comments_table.update().values(
+ activity=db_ai_comment
+ ).where(media_comments_table.c.id==comment.id))
+
+ # Create 'create' activities for all collections
+ for collection in db.execute(collection_table.select()):
+ # create AI
+ db_ai = db.execute(ai_table.insert().values(
+ type="collection"
+ ))
+ db_ai = db.execute(ai_table.select(
+ ai_table.c.id==db_ai.inserted_primary_key[0]
+ )).first()
+
+ # Now add link the collection to the AI
+ db.execute(collection_table.update().values(
+ activity=db_ai.id
+ ).where(collection_table.c.id==collection.id))
+
+ activity = {
+ "verb": "create",
+ "actor": collection.creator,
+ "published": collection.created,
+ "updated": collection.created,
+ "generator": gmg_generator.id,
+ "object": db_ai.id,
+ }
+
+ db.execute(activity_table.insert().values(**activity))
+
+ # Now add the activity to the collection
+ db.execute(collection_table.update().values(
+ activity=db_ai.id
+ ).where(collection_table.c.id==collection.id))
+
+ db.commit()
+
+class Location_V0(declarative_base()):
+ __tablename__ = "core__locations"
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode)
+ position = Column(MutationDict.as_mutable(JSONEncoded))
+ address = Column(MutationDict.as_mutable(JSONEncoded))
+
+@RegisterMigration(25, MIGRATIONS)
+def add_location_model(db):
+ """ Add location model """
+ metadata = MetaData(bind=db.bind)
+
+ # Create location table
+ Location_V0.__table__.create(db.bind)
+ db.commit()
+
+ # Inspect the tables we need
+ user = inspect_table(metadata, "core__users")
+ collections = inspect_table(metadata, "core__collections")
+ media_entry = inspect_table(metadata, "core__media_entries")
+ media_comments = inspect_table(metadata, "core__media_comments")
+
+ # Now add location support to the various models
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(user)
+
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(collections)
+
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(media_entry)
+
+ col = Column("location", Integer, ForeignKey(Location_V0.id))
+ col.create(media_comments)
+
+ db.commit()
+
+@RegisterMigration(26, MIGRATIONS)
+def datetime_to_utc(db):
+ """ Convert datetime stamps to UTC """
+ # Get the server's timezone, this is what the database has stored
+ server_timezone = dateutil.tz.tzlocal()
+
+ ##
+ # Look up all the timestamps and convert them to UTC
+ ##
+ metadata = MetaData(bind=db.bind)
+
+ def dt_to_utc(dt):
+ # Add the current timezone
+ dt = dt.replace(tzinfo=server_timezone)
+
+ # Convert to UTC
+ return dt.astimezone(pytz.UTC)
+
+ # Convert the User model
+ user_table = inspect_table(metadata, "core__users")
+ for user in db.execute(user_table.select()):
+ db.execute(user_table.update().values(
+ created=dt_to_utc(user.created)
+ ).where(user_table.c.id==user.id))
+
+ # Convert Client
+ client_table = inspect_table(metadata, "core__clients")
+ for client in db.execute(client_table.select()):
+ db.execute(client_table.update().values(
+ created=dt_to_utc(client.created),
+ updated=dt_to_utc(client.updated)
+ ).where(client_table.c.id==client.id))
+
+ # Convert RequestToken
+ rt_table = inspect_table(metadata, "core__request_tokens")
+ for request_token in db.execute(rt_table.select()):
+ db.execute(rt_table.update().values(
+ created=dt_to_utc(request_token.created),
+ updated=dt_to_utc(request_token.updated)
+ ).where(rt_table.c.token==request_token.token))
+
+ # Convert AccessToken
+ at_table = inspect_table(metadata, "core__access_tokens")
+ for access_token in db.execute(at_table.select()):
+ db.execute(at_table.update().values(
+ created=dt_to_utc(access_token.created),
+ updated=dt_to_utc(access_token.updated)
+ ).where(at_table.c.token==access_token.token))
+
+ # Convert MediaEntry
+ media_table = inspect_table(metadata, "core__media_entries")
+ for media in db.execute(media_table.select()):
+ db.execute(media_table.update().values(
+ created=dt_to_utc(media.created)
+ ).where(media_table.c.id==media.id))
+
+ # Convert Media Attachment File
+ media_attachment_table = inspect_table(metadata, "core__attachment_files")
+ for ma in db.execute(media_attachment_table.select()):
+ db.execute(media_attachment_table.update().values(
+ created=dt_to_utc(ma.created)
+ ).where(media_attachment_table.c.id==ma.id))
+
+ # Convert MediaComment
+ comment_table = inspect_table(metadata, "core__media_comments")
+ for comment in db.execute(comment_table.select()):
+ db.execute(comment_table.update().values(
+ created=dt_to_utc(comment.created)
+ ).where(comment_table.c.id==comment.id))
+
+ # Convert Collection
+ collection_table = inspect_table(metadata, "core__collections")
+ for collection in db.execute(collection_table.select()):
+ db.execute(collection_table.update().values(
+ created=dt_to_utc(collection.created)
+ ).where(collection_table.c.id==collection.id))
+
+ # Convert Collection Item
+ collection_item_table = inspect_table(metadata, "core__collection_items")
+ for ci in db.execute(collection_item_table.select()):
+ db.execute(collection_item_table.update().values(
+ added=dt_to_utc(ci.added)
+ ).where(collection_item_table.c.id==ci.id))
+
+ # Convert Comment subscription
+ comment_sub = inspect_table(metadata, "core__comment_subscriptions")
+ for sub in db.execute(comment_sub.select()):
+ db.execute(comment_sub.update().values(
+ created=dt_to_utc(sub.created)
+ ).where(comment_sub.c.id==sub.id))
+
+ # Convert Notification
+ notification_table = inspect_table(metadata, "core__notifications")
+ for notification in db.execute(notification_table.select()):
+ db.execute(notification_table.update().values(
+ created=dt_to_utc(notification.created)
+ ).where(notification_table.c.id==notification.id))
+
+ # Convert ReportBase
+ reportbase_table = inspect_table(metadata, "core__reports")
+ for report in db.execute(reportbase_table.select()):
+ db.execute(reportbase_table.update().values(
+ created=dt_to_utc(report.created)
+ ).where(reportbase_table.c.id==report.id))
+
+ # Convert Generator
+ generator_table = inspect_table(metadata, "core__generators")
+ for generator in db.execute(generator_table.select()):
+ db.execute(generator_table.update().values(
+ published=dt_to_utc(generator.published),
+ updated=dt_to_utc(generator.updated)
+ ).where(generator_table.c.id==generator.id))
+
+ # Convert Activity
+ activity_table = inspect_table(metadata, "core__activities")
+ for activity in db.execute(activity_table.select()):
+ db.execute(activity_table.update().values(
+ published=dt_to_utc(activity.published),
+ updated=dt_to_utc(activity.updated)
+ ).where(activity_table.c.id==activity.id))
+
+ # Commit this to the database
+ db.commit()
diff --git a/mediagoblin/db/migrations/README b/mediagoblin/db/migrations/README
new file mode 100644
index 00000000..93d85eff
--- /dev/null
+++ b/mediagoblin/db/migrations/README
@@ -0,0 +1,57 @@
+Migration Guide
+---------------
+
+Alembic comes with a CLI called ``alembic``.
+
+Create a Migration
+^^^^^^^^^^^^^^^^^^
+
+Lets create our first migration::
+
+ $ alembic revision -m "add favourite_band field"
+ Generating
+ /your/gmg/path/mediagoblin/db/migrations/versions/1e3793de36a_add_favourite_band_field.py ... done
+
+By default, migration files have two methods: ``upgrade`` and ``downgrade``.
+Alembic will invoke these methods to apply the migrations to your current
+database.
+
+Now, we need to edit our newly created migration file
+``1e3793de36a_add_favourite_band_field.py`` to add a new column ``favourite_band``
+to ``core__users`` table::
+
+ def upgrade():
+ op.add_column('core__users', sa.Column('favourite_band', sa.Unicode(100)))
+
+
+ def downgrade():
+ op.drop_column('core__users', 'favourite_band')
+
+.. note::
+
+ Alembic can also generate `automatic migrations <http://alembic.readthedocs.org/en/latest/tutorial.html#auto-generating-migrations>`__.
+
+Then we can run ``gmg dbupdate`` to apply the new migration::
+
+ $ gmg dbupdate
+ INFO [alembic.migration] Context impl SQLiteImpl.
+ INFO [alembic.migration] Will assume non-transactional DDL.
+ INFO [alembic.migration] Running upgrade None -> 1e3793de36a, add favourite band field
+
+If you want to revert that migration, simply run::
+
+ $ alembic downgrade -1
+
+.. warning::
+
+ Currently, Alembic cannot do ``DROP COLUMN``, ``ALTER COLUMN`` etc.
+ operations in SQLite. Please see https://bitbucket.org/zzzeek/alembic/issue/21/column-renames-not-supported-on-sqlite
+ for detailed information.
+
+Glossary
+^^^^^^^^
+
+* ``alembic.ini``: The Alembic configuration file. The ``alembic`` CLI will
+ look that file everytime it invaked.
+* ``mediagoblin/db/migrations/versions/``: Alembic will add new migration files
+ to this directory.
diff --git a/mediagoblin/db/migrations/env.py b/mediagoblin/db/migrations/env.py
new file mode 100644
index 00000000..712b6164
--- /dev/null
+++ b/mediagoblin/db/migrations/env.py
@@ -0,0 +1,71 @@
+from __future__ import with_statement
+from alembic import context
+from sqlalchemy import engine_from_config, pool
+from logging.config import fileConfig
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+target_metadata = None
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+def run_migrations_offline():
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(url=url, target_metadata=target_metadata)
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+def run_migrations_online():
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ engine = engine_from_config(
+ config.get_section(config.config_ini_section),
+ prefix='sqlalchemy.',
+ poolclass=pool.NullPool)
+
+ connection = engine.connect()
+ context.configure(
+ connection=connection,
+ target_metadata=target_metadata
+ )
+
+ try:
+ with context.begin_transaction():
+ context.run_migrations()
+ finally:
+ connection.close()
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
+
diff --git a/mediagoblin/db/migrations/script.py.mako b/mediagoblin/db/migrations/script.py.mako
new file mode 100644
index 00000000..95702017
--- /dev/null
+++ b/mediagoblin/db/migrations/script.py.mako
@@ -0,0 +1,22 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision}
+Create Date: ${create_date}
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+def upgrade():
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade():
+ ${downgrades if downgrades else "pass"}
diff --git a/mediagoblin/db/migrations/versions/.gitkeep b/mediagoblin/db/migrations/versions/.gitkeep
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/mediagoblin/db/migrations/versions/.gitkeep
diff --git a/mediagoblin/db/mixin.py b/mediagoblin/db/mixin.py
index 1f2e7ec3..4602c709 100644
--- a/mediagoblin/db/mixin.py
+++ b/mediagoblin/db/mixin.py
@@ -31,17 +31,20 @@ import uuid
import re
from datetime import datetime
+from pytz import UTC
from werkzeug.utils import cached_property
-from mediagoblin import mg_globals
from mediagoblin.media_types import FileTypeNotSupported
from mediagoblin.tools import common, licenses
from mediagoblin.tools.pluginapi import hook_handle
from mediagoblin.tools.text import cleaned_markdown_conversion
from mediagoblin.tools.url import slugify
+from mediagoblin.tools.translate import pass_to_ugettext as _
class UserMixin(object):
+ object_type = "person"
+
@property
def bio_html(self):
return cleaned_markdown_conversion(self.bio)
@@ -84,42 +87,45 @@ class GenerateSlugMixin(object):
generated bits until it's unique. That'll be a little bit of junk,
but at least it has the basis of a nice slug.
"""
+
#Is already a slug assigned? Check if it is valid
if self.slug:
- self.slug = slugify(self.slug)
+ slug = slugify(self.slug)
# otherwise, try to use the title.
elif self.title:
# assign slug based on title
- self.slug = slugify(self.title)
+ slug = slugify(self.title)
- # We don't want any empty string slugs
- if self.slug == u"":
- self.slug = None
+ else:
+ # We don't have any information to set a slug
+ return
- # Do we have anything at this point?
- # If not, we're not going to get a slug
- # so just return... we're not going to force one.
- if not self.slug:
- return # giving up!
+ # We don't want any empty string slugs
+ if slug == u"":
+ return
# Otherwise, let's see if this is unique.
- if self.check_slug_used(self.slug):
+ if self.check_slug_used(slug):
# It looks like it's being used... lame.
# Can we just append the object's id to the end?
if self.id:
- slug_with_id = u"%s-%s" % (self.slug, self.id)
+ slug_with_id = u"%s-%s" % (slug, self.id)
if not self.check_slug_used(slug_with_id):
self.slug = slug_with_id
return # success!
# okay, still no success;
# let's whack junk on there till it's unique.
- self.slug += '-' + uuid.uuid4().hex[:4]
+ slug += '-' + uuid.uuid4().hex[:4]
# keep going if necessary!
- while self.check_slug_used(self.slug):
- self.slug += uuid.uuid4().hex[:4]
+ while self.check_slug_used(slug):
+ slug += uuid.uuid4().hex[:4]
+
+ # self.check_slug_used(slug) must be False now so we have a slug that
+ # we can use now.
+ self.slug = slug
class MediaEntryMixin(GenerateSlugMixin):
@@ -131,6 +137,11 @@ class MediaEntryMixin(GenerateSlugMixin):
return check_media_slug_used(self.uploader, slug, self.id)
@property
+ def object_type(self):
+ """ Converts media_type to pump-like type - don't use internally """
+ return self.media_type.split(".")[-1]
+
+ @property
def description_html(self):
"""
Rendered version of the description, run through
@@ -192,14 +203,14 @@ class MediaEntryMixin(GenerateSlugMixin):
# TODO: implement generic fallback in case MEDIA_MANAGER does
# not specify one?
if u'thumb' in self.media_files:
- thumb_url = mg_globals.app.public_store.file_url(
+ thumb_url = self._app.public_store.file_url(
self.media_files[u'thumb'])
else:
# No thumbnail in media available. Get the media's
# MEDIA_MANAGER for the fallback icon and return static URL
# Raises FileTypeNotSupported in case no such manager is enabled
manager = self.media_manager
- thumb_url = mg_globals.app.staticdirector(manager[u'default_thumb'])
+ thumb_url = self._app.staticdirector(manager[u'default_thumb'])
return thumb_url
@property
@@ -208,8 +219,8 @@ class MediaEntryMixin(GenerateSlugMixin):
will return self.thumb_url if original url doesn't exist"""
if u"original" not in self.media_files:
return self.thumb_url
-
- return mg_globals.app.public_store.file_url(
+
+ return self._app.public_store.file_url(
self.media_files[u"original"]
)
@@ -297,6 +308,8 @@ class MediaEntryMixin(GenerateSlugMixin):
class MediaCommentMixin(object):
+ object_type = "comment"
+
@property
def content_html(self):
"""
@@ -321,6 +334,8 @@ class MediaCommentMixin(object):
class CollectionMixin(GenerateSlugMixin):
+ object_type = "collection"
+
def check_slug_used(self, slug):
# import this here due to a cyclic import issue
# (db.models -> db.mixin -> db.util -> db.models)
@@ -363,3 +378,149 @@ class CollectionItemMixin(object):
Run through Markdown and the HTML cleaner.
"""
return cleaned_markdown_conversion(self.note)
+
+class ActivityMixin(object):
+ object_type = "activity"
+
+ VALID_VERBS = ["add", "author", "create", "delete", "dislike", "favorite",
+ "follow", "like", "post", "share", "unfavorite", "unfollow",
+ "unlike", "unshare", "update", "tag"]
+
+ def get_url(self, request):
+ return request.urlgen(
+ "mediagoblin.user_pages.activity_view",
+ username=self.get_actor.username,
+ id=self.id,
+ qualified=True
+ )
+
+ def generate_content(self):
+ """ Produces a HTML content for object """
+ # some of these have simple and targetted. If self.target it set
+ # it will pick the targetted. If they DON'T have a targetted version
+ # the information in targetted won't be added to the content.
+ verb_to_content = {
+ "add": {
+ "simple" : _("{username} added {object}"),
+ "targetted": _("{username} added {object} to {target}"),
+ },
+ "author": {"simple": _("{username} authored {object}")},
+ "create": {"simple": _("{username} created {object}")},
+ "delete": {"simple": _("{username} deleted {object}")},
+ "dislike": {"simple": _("{username} disliked {object}")},
+ "favorite": {"simple": _("{username} favorited {object}")},
+ "follow": {"simple": _("{username} followed {object}")},
+ "like": {"simple": _("{username} liked {object}")},
+ "post": {
+ "simple": _("{username} posted {object}"),
+ "targetted": _("{username} posted {object} to {target}"),
+ },
+ "share": {"simple": _("{username} shared {object}")},
+ "unfavorite": {"simple": _("{username} unfavorited {object}")},
+ "unfollow": {"simple": _("{username} stopped following {object}")},
+ "unlike": {"simple": _("{username} unliked {object}")},
+ "unshare": {"simple": _("{username} unshared {object}")},
+ "update": {"simple": _("{username} updated {object}")},
+ "tag": {"simple": _("{username} tagged {object}")},
+ }
+
+ object_map = {
+ "image": _("an image"),
+ "comment": _("a comment"),
+ "collection": _("a collection"),
+ "video": _("a video"),
+ "audio": _("audio"),
+ "person": _("a person"),
+ }
+
+ obj = self.get_object
+ target = self.get_target
+ actor = self.get_actor
+ content = verb_to_content.get(self.verb, None)
+
+ if content is None or obj is None:
+ return
+
+ # Decide what to fill the object with
+ if hasattr(obj, "title") and obj.title.strip(" "):
+ object_value = obj.title
+ elif obj.object_type in object_map:
+ object_value = object_map[obj.object_type]
+ else:
+ object_value = _("an object")
+
+ # Do we want to add a target (indirect object) to content?
+ if target is not None and "targetted" in content:
+ if hasattr(target, "title") and target.title.strip(" "):
+ target_value = target.title
+ elif target.object_type in object_map:
+ target_value = object_map[target.object_type]
+ else:
+ target_value = _("an object")
+
+ self.content = content["targetted"].format(
+ username=actor.username,
+ object=object_value,
+ target=target_value
+ )
+ else:
+ self.content = content["simple"].format(
+ username=actor.username,
+ object=object_value
+ )
+
+ return self.content
+
+ def serialize(self, request):
+ href = request.urlgen(
+ "mediagoblin.api.object",
+ object_type=self.object_type,
+ id=self.id,
+ qualified=True
+ )
+ published = UTC.localize(self.published)
+ updated = UTC.localize(self.updated)
+ obj = {
+ "id": href,
+ "actor": self.get_actor.serialize(request),
+ "verb": self.verb,
+ "published": published.isoformat(),
+ "updated": updated.isoformat(),
+ "content": self.content,
+ "url": self.get_url(request),
+ "object": self.get_object.serialize(request),
+ "objectType": self.object_type,
+ "links": {
+ "self": {
+ "href": href,
+ },
+ },
+ }
+
+ if self.generator:
+ obj["generator"] = self.get_generator.serialize(request)
+
+ if self.title:
+ obj["title"] = self.title
+
+ target = self.get_target
+ if target is not None:
+ obj["target"] = target.serialize(request)
+
+ return obj
+
+ def unseralize(self, data):
+ """
+ Takes data given and set it on this activity.
+
+ Several pieces of data are not written on because of security
+ reasons. For example changing the author or id of an activity.
+ """
+ if "verb" in data:
+ self.verb = data["verb"]
+
+ if "title" in data:
+ self.title = data["title"]
+
+ if "content" in data:
+ self.content = data["content"]
diff --git a/mediagoblin/db/models.py b/mediagoblin/db/models.py
index 2ff30d22..e8fb17a7 100644
--- a/mediagoblin/db/models.py
+++ b/mediagoblin/db/models.py
@@ -18,13 +18,15 @@
TODO: indexes on foreignkeys, where useful.
"""
+from __future__ import print_function
+
import logging
import datetime
from sqlalchemy import Column, Integer, Unicode, UnicodeText, DateTime, \
Boolean, ForeignKey, UniqueConstraint, PrimaryKeyConstraint, \
SmallInteger, Date
-from sqlalchemy.orm import relationship, backref, with_polymorphic
+from sqlalchemy.orm import relationship, backref, with_polymorphic, validates
from sqlalchemy.orm.collections import attribute_mapped_collection
from sqlalchemy.sql.expression import desc
from sqlalchemy.ext.associationproxy import association_proxy
@@ -34,20 +36,90 @@ from mediagoblin.db.extratypes import (PathTupleWithSlashes, JSONEncoded,
MutationDict)
from mediagoblin.db.base import Base, DictReadAttrProxy
from mediagoblin.db.mixin import UserMixin, MediaEntryMixin, \
- MediaCommentMixin, CollectionMixin, CollectionItemMixin
+ MediaCommentMixin, CollectionMixin, CollectionItemMixin, \
+ ActivityMixin
from mediagoblin.tools.files import delete_media_files
from mediagoblin.tools.common import import_component
+from mediagoblin.tools.routing import extract_url_arguments
-# It's actually kind of annoying how sqlalchemy-migrate does this, if
-# I understand it right, but whatever. Anyway, don't remove this :P
-#
-# We could do migration calls more manually instead of relying on
-# this import-based meddling...
-from migrate import changeset
+import six
+from pytz import UTC
_log = logging.getLogger(__name__)
+class Location(Base):
+ """ Represents a physical location """
+ __tablename__ = "core__locations"
+
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode)
+
+ # GPS coordinates
+ position = Column(MutationDict.as_mutable(JSONEncoded))
+ address = Column(MutationDict.as_mutable(JSONEncoded))
+
+ @classmethod
+ def create(cls, data, obj):
+ location = cls()
+ location.unserialize(data)
+ location.save()
+ obj.location = location.id
+ return location
+
+ def serialize(self, request):
+ location = {"objectType": "place"}
+
+ if self.name is not None:
+ location["displayName"] = self.name
+
+ if self.position:
+ location["position"] = self.position
+ if self.address:
+ location["address"] = self.address
+
+ return location
+
+ def unserialize(self, data):
+ if "displayName" in data:
+ self.name = data["displayName"]
+
+ self.position = {}
+ self.address = {}
+
+ # nicer way to do this?
+ if "position" in data:
+ # TODO: deal with ISO 9709 formatted string as position
+ if "altitude" in data["position"]:
+ self.position["altitude"] = data["position"]["altitude"]
+
+ if "direction" in data["position"]:
+ self.position["direction"] = data["position"]["direction"]
+
+ if "longitude" in data["position"]:
+ self.position["longitude"] = data["position"]["longitude"]
+
+ if "latitude" in data["position"]:
+ self.position["latitude"] = data["position"]["latitude"]
+
+ if "address" in data:
+ if "formatted" in data["address"]:
+ self.address["formatted"] = data["address"]["formatted"]
+
+ if "streetAddress" in data["address"]:
+ self.address["streetAddress"] = data["address"]["streetAddress"]
+
+ if "locality" in data["address"]:
+ self.address["locality"] = data["address"]["locality"]
+
+ if "region" in data["address"]:
+ self.address["region"] = data["address"]["region"]
+
+ if "postalCode" in data["address"]:
+ self.address["postalCode"] = data["addresss"]["postalCode"]
+
+ if "country" in data["address"]:
+ self.address["country"] = data["address"]["country"]
class User(Base, UserMixin):
"""
@@ -64,7 +136,7 @@ class User(Base, UserMixin):
# point.
email = Column(Unicode, nullable=False)
pw_hash = Column(Unicode)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
# Intented to be nullable=False, but migrations would not work for it
# set to nullable=True implicitly.
wants_comment_notification = Column(Boolean, default=True)
@@ -74,6 +146,10 @@ class User(Base, UserMixin):
bio = Column(UnicodeText) # ??
uploaded = Column(Integer, default=0)
upload_limit = Column(Integer)
+ location = Column(Integer, ForeignKey("core__locations.id"))
+ get_location = relationship("Location", lazy="joined")
+
+ activity = Column(Integer, ForeignKey("core__activity_intermediators.id"))
## TODO
# plugin data would be in a separate model
@@ -138,11 +214,13 @@ class User(Base, UserMixin):
def serialize(self, request):
+ published = UTC.localize(self.created)
user = {
"id": "acct:{0}@{1}".format(self.username, request.host),
+ "published": published.isoformat(),
"preferredUsername": self.username,
"displayName": "{0}@{1}".format(self.username, request.host),
- "objectType": "person",
+ "objectType": self.object_type,
"pump_io": {
"shared": False,
"followed": False,
@@ -150,21 +228,21 @@ class User(Base, UserMixin):
"links": {
"self": {
"href": request.urlgen(
- "mediagoblin.federation.user.profile",
+ "mediagoblin.api.user.profile",
username=self.username,
qualified=True
),
},
"activity-inbox": {
"href": request.urlgen(
- "mediagoblin.federation.inbox",
+ "mediagoblin.api.inbox",
username=self.username,
qualified=True
)
},
"activity-outbox": {
"href": request.urlgen(
- "mediagoblin.federation.feed",
+ "mediagoblin.api.feed",
username=self.username,
qualified=True
)
@@ -176,9 +254,18 @@ class User(Base, UserMixin):
user.update({"summary": self.bio})
if self.url:
user.update({"url": self.url})
+ if self.location:
+ user.update({"location": self.get_location.serialize(request)})
return user
+ def unserialize(self, data):
+ if "summary" in data:
+ self.bio = data["summary"]
+
+ if "location" in data:
+ Location.create(data, self)
+
class Client(Base):
"""
Model representing a client - Used for API Auth
@@ -189,8 +276,8 @@ class Client(Base):
secret = Column(Unicode, nullable=False)
expirey = Column(DateTime, nullable=True)
application_type = Column(Unicode, nullable=False)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
- updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
# optional stuff
redirect_uri = Column(JSONEncoded, nullable=True)
@@ -218,8 +305,10 @@ class RequestToken(Base):
authenticated = Column(Boolean, default=False)
verifier = Column(Unicode, nullable=True)
callback = Column(Unicode, nullable=False, default=u"oob")
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
- updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
+ get_client = relationship(Client)
class AccessToken(Base):
"""
@@ -231,8 +320,10 @@ class AccessToken(Base):
secret = Column(Unicode, nullable=False)
user = Column(Integer, ForeignKey(User.id))
request_token = Column(Unicode, ForeignKey(RequestToken.token))
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
- updated = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+
+ get_requesttoken = relationship(RequestToken)
class NonceTimestamp(Base):
@@ -254,7 +345,7 @@ class MediaEntry(Base, MediaEntryMixin):
uploader = Column(Integer, ForeignKey(User.id), nullable=False, index=True)
title = Column(Unicode, nullable=False)
slug = Column(Unicode)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now,
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow,
index=True)
description = Column(UnicodeText) # ??
media_type = Column(Unicode, nullable=False)
@@ -262,6 +353,8 @@ class MediaEntry(Base, MediaEntryMixin):
# or use sqlalchemy.types.Enum?
license = Column(Unicode)
file_size = Column(Integer, default=0)
+ location = Column(Integer, ForeignKey("core__locations.id"))
+ get_location = relationship("Location", lazy="joined")
fail_error = Column(Unicode)
fail_metadata = Column(JSONEncoded)
@@ -309,6 +402,8 @@ class MediaEntry(Base, MediaEntryMixin):
media_metadata = Column(MutationDict.as_mutable(JSONEncoded),
default=MutationDict())
+ activity = Column(Integer, ForeignKey("core__activity_intermediators.id"))
+
## TODO
# fail_error
@@ -344,7 +439,7 @@ class MediaEntry(Base, MediaEntryMixin):
return the value of the key.
"""
media_file = MediaFile.query.filter_by(media_entry=self.id,
- name=unicode(file_key)).first()
+ name=six.text_type(file_key)).first()
if media_file:
if metadata_key:
@@ -357,11 +452,11 @@ class MediaEntry(Base, MediaEntryMixin):
Update the file_metadata of a MediaFile.
"""
media_file = MediaFile.query.filter_by(media_entry=self.id,
- name=unicode(file_key)).first()
+ name=six.text_type(file_key)).first()
file_metadata = media_file.file_metadata or {}
- for key, value in kwargs.iteritems():
+ for key, value in six.iteritems(kwargs):
file_metadata[key] = value
media_file.file_metadata = file_metadata
@@ -386,7 +481,7 @@ class MediaEntry(Base, MediaEntryMixin):
media_data.get_media_entry = self
else:
# Update old media data
- for field, value in kwargs.iteritems():
+ for field, value in six.iteritems(kwargs):
setattr(media_data, field, value)
@memoized_property
@@ -394,7 +489,11 @@ class MediaEntry(Base, MediaEntryMixin):
return import_component(self.media_type + '.models:BACKREF_NAME')
def __repr__(self):
- safe_title = self.title.encode('ascii', 'replace')
+ if six.PY2:
+ # obj.__repr__() should return a str on Python 2
+ safe_title = self.title.encode('utf-8', 'replace')
+ else:
+ safe_title = self.title
return '<{classname} {id}: {title}>'.format(
classname=self.__class__.__name__,
@@ -415,7 +514,7 @@ class MediaEntry(Base, MediaEntryMixin):
# Delete all related files/attachments
try:
delete_media_files(self)
- except OSError, error:
+ except OSError as error:
# Returns list of files we failed to delete
_log.error('No such files from the user "{1}" to delete: '
'{0}'.format(str(error), self.get_uploader))
@@ -430,38 +529,36 @@ class MediaEntry(Base, MediaEntryMixin):
# pass through commit=False/True in kwargs
super(MediaEntry, self).delete(**kwargs)
- @property
- def objectType(self):
- """ Converts media_type to pump-like type - don't use internally """
- return self.media_type.split(".")[-1]
-
def serialize(self, request, show_comments=True):
""" Unserialize MediaEntry to object """
+ href = request.urlgen(
+ "mediagoblin.api.object",
+ object_type=self.object_type,
+ id=self.id,
+ qualified=True
+ )
author = self.get_uploader
+ published = UTC.localize(self.created)
+ updated = UTC.localize(self.created)
context = {
- "id": self.id,
+ "id": href,
"author": author.serialize(request),
- "objectType": self.objectType,
- "url": self.url_for_self(request.urlgen),
+ "objectType": self.object_type,
+ "url": self.url_for_self(request.urlgen, qualified=True),
"image": {
"url": request.host_url + self.thumb_url[1:],
},
"fullImage":{
"url": request.host_url + self.original_url[1:],
},
- "published": self.created.isoformat(),
- "updated": self.created.isoformat(),
+ "published": published.isoformat(),
+ "updated": updated.isoformat(),
"pump_io": {
"shared": False,
},
"links": {
"self": {
- "href": request.urlgen(
- "mediagoblin.federation.object",
- objectType=self.objectType,
- id=self.id,
- qualified=True
- ),
+ "href": href,
},
}
@@ -476,20 +573,40 @@ class MediaEntry(Base, MediaEntryMixin):
if self.license:
context["license"] = self.license
+ if self.location:
+ context["location"] = self.get_location.serialize(request)
+
if show_comments:
- comments = [comment.serialize(request) for comment in self.get_comments()]
+ comments = [
+ comment.serialize(request) for comment in self.get_comments()]
total = len(comments)
context["replies"] = {
"totalItems": total,
"items": comments,
"url": request.urlgen(
- "mediagoblin.federation.object.comments",
- objectType=self.objectType,
+ "mediagoblin.api.object.comments",
+ object_type=self.object_type,
id=self.id,
qualified=True
),
}
+ # Add image height and width if possible. We didn't use to store this
+ # data and we're not able (and maybe not willing) to re-process all
+ # images so it's possible this might not exist.
+ if self.get_file_metadata("thumb", "height"):
+ height = self.get_file_metadata("thumb", "height")
+ context["image"]["height"] = height
+ if self.get_file_metadata("thumb", "width"):
+ width = self.get_file_metadata("thumb", "width")
+ context["image"]["width"] = width
+ if self.get_file_metadata("original", "height"):
+ height = self.get_file_metadata("original", "height")
+ context["fullImage"]["height"] = height
+ if self.get_file_metadata("original", "height"):
+ width = self.get_file_metadata("original", "width")
+ context["fullImage"]["width"] = width
+
return context
def unserialize(self, data):
@@ -503,6 +620,9 @@ class MediaEntry(Base, MediaEntryMixin):
if "license" in data:
self.license = data["license"]
+ if "location" in data:
+ Licence.create(data["location"], self)
+
return True
class FileKeynames(Base):
@@ -561,7 +681,7 @@ class MediaAttachmentFile(Base):
nullable=False)
name = Column(Unicode, nullable=False)
filepath = Column(PathTupleWithSlashes)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
@property
def dict_view(self):
@@ -595,7 +715,7 @@ class MediaTag(Base):
nullable=False, index=True)
tag = Column(Integer, ForeignKey(Tag.id), nullable=False, index=True)
name = Column(Unicode)
- # created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ # created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
__table_args__ = (
UniqueConstraint('tag', 'media_entry'),
@@ -626,8 +746,10 @@ class MediaComment(Base, MediaCommentMixin):
media_entry = Column(
Integer, ForeignKey(MediaEntry.id), nullable=False, index=True)
author = Column(Integer, ForeignKey(User.id), nullable=False)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
content = Column(UnicodeText, nullable=False)
+ location = Column(Integer, ForeignKey("core__locations.id"))
+ get_location = relationship("Location", lazy="joined")
# Cascade: Comments are owned by their creator. So do the full thing.
# lazy=dynamic: People might post a *lot* of comments,
@@ -650,44 +772,60 @@ class MediaComment(Base, MediaCommentMixin):
lazy="dynamic",
cascade="all, delete-orphan"))
+
+ activity = Column(Integer, ForeignKey("core__activity_intermediators.id"))
+
def serialize(self, request):
""" Unserialize to python dictionary for API """
+ href = request.urlgen(
+ "mediagoblin.api.object",
+ object_type=self.object_type,
+ id=self.id,
+ qualified=True
+ )
media = MediaEntry.query.filter_by(id=self.media_entry).first()
author = self.get_author
+ published = UTC.localize(self.created)
context = {
- "id": self.id,
- "objectType": "comment",
+ "id": href,
+ "objectType": self.object_type,
"content": self.content,
"inReplyTo": media.serialize(request, show_comments=False),
- "author": author.serialize(request)
+ "author": author.serialize(request),
+ "published": published.isoformat(),
+ "updated": published.isoformat(),
}
+ if self.location:
+ context["location"] = self.get_location.seralize(request)
+
return context
- def unserialize(self, data):
+ def unserialize(self, data, request):
""" Takes API objects and unserializes on existing comment """
- # Do initial checks to verify the object is correct
- required_attributes = ["content", "inReplyTo"]
- for attr in required_attributes:
- if attr not in data:
+ # Handle changing the reply ID
+ if "inReplyTo" in data:
+ # Validate that the ID is correct
+ try:
+ media_id = int(extract_url_arguments(
+ url=data["inReplyTo"]["id"],
+ urlmap=request.app.url_map
+ )["id"])
+ except ValueError:
return False
- # Validate inReplyTo has ID
- if "id" not in data["inReplyTo"]:
- return False
+ media = MediaEntry.query.filter_by(id=media_id).first()
+ if media is None:
+ return False
- # Validate that the ID is correct
- try:
- media_id = int(data["inReplyTo"]["id"])
- except ValueError:
- return False
+ self.media_entry = media.id
- media = MediaEntry.query.filter_by(id=media_id).first()
- if media is None:
- return False
+ if "content" in data:
+ self.content = data["content"]
+
+ if "location" in data:
+ Location.create(data["location"], self)
- self.media_entry = media.id
- self.content = data["content"]
return True
@@ -702,10 +840,13 @@ class Collection(Base, CollectionMixin):
id = Column(Integer, primary_key=True)
title = Column(Unicode, nullable=False)
slug = Column(Unicode)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now,
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow,
index=True)
description = Column(UnicodeText)
creator = Column(Integer, ForeignKey(User.id), nullable=False)
+ location = Column(Integer, ForeignKey("core__locations.id"))
+ get_location = relationship("Location", lazy="joined")
+
# TODO: No of items in Collection. Badly named, can we migrate to num_items?
items = Column(Integer, default=0)
@@ -714,6 +855,8 @@ class Collection(Base, CollectionMixin):
backref=backref("collections",
cascade="all, delete-orphan"))
+ activity = Column(Integer, ForeignKey("core__activity_intermediators.id"))
+
__table_args__ = (
UniqueConstraint('creator', 'slug'),
{})
@@ -734,6 +877,18 @@ class Collection(Base, CollectionMixin):
creator=self.creator,
title=safe_title)
+ def serialize(self, request):
+ # Get all serialized output in a list
+ items = []
+ for item in self.get_collection_items():
+ items.append(item.serialize(request))
+
+ return {
+ "totalItems": self.items,
+ "url": self.url_for_self(request.urlgen, qualified=True),
+ "items": items,
+ }
+
class CollectionItem(Base, CollectionItemMixin):
__tablename__ = "core__collection_items"
@@ -743,7 +898,7 @@ class CollectionItem(Base, CollectionItemMixin):
Integer, ForeignKey(MediaEntry.id), nullable=False, index=True)
collection = Column(Integer, ForeignKey(Collection.id), nullable=False)
note = Column(UnicodeText, nullable=True)
- added = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ added = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
position = Column(Integer)
# Cascade: CollectionItems are owned by their Collection. So do the full thing.
@@ -770,6 +925,9 @@ class CollectionItem(Base, CollectionItemMixin):
collection=self.collection,
entry=self.media_entry)
+ def serialize(self, request):
+ return self.get_media_entry.serialize(request)
+
class ProcessingMetaData(Base):
__tablename__ = 'core__processing_metadata'
@@ -792,7 +950,7 @@ class CommentSubscription(Base):
__tablename__ = 'core__comment_subscriptions'
id = Column(Integer, primary_key=True)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
media_entry_id = Column(Integer, ForeignKey(MediaEntry.id), nullable=False)
media_entry = relationship(MediaEntry,
@@ -823,7 +981,7 @@ class Notification(Base):
id = Column(Integer, primary_key=True)
type = Column(Unicode)
- created = Column(DateTime, nullable=False, default=datetime.datetime.now)
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
user_id = Column(Integer, ForeignKey('core__users.id'), nullable=False,
index=True)
@@ -883,9 +1041,8 @@ class ProcessingNotification(Notification):
'polymorphic_identity': 'processing_notification'
}
-with_polymorphic(
- Notification,
- [ProcessingNotification, CommentNotification])
+# the with_polymorphic call has been moved to the bottom above MODELS
+# this is because it causes conflicts with relationship calls.
class ReportBase(Base):
"""
@@ -930,7 +1087,7 @@ class ReportBase(Base):
lazy="dynamic",
cascade="all, delete-orphan"),
primaryjoin="User.id==ReportBase.reported_user_id")
- created = Column(DateTime, nullable=False, default=datetime.datetime.now())
+ created = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
discriminator = Column('type', Unicode(50))
resolver_id = Column(Integer, ForeignKey(User.id))
resolver = relationship(
@@ -1068,13 +1225,198 @@ class PrivilegeUserAssociation(Base):
ForeignKey(Privilege.id),
primary_key=True)
+class Generator(Base):
+ """ Information about what created an activity """
+ __tablename__ = "core__generators"
+
+ id = Column(Integer, primary_key=True)
+ name = Column(Unicode, nullable=False)
+ published = Column(DateTime, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, default=datetime.datetime.utcnow)
+ object_type = Column(Unicode, nullable=False)
+
+ def __repr__(self):
+ return "<{klass} {name}>".format(
+ klass=self.__class__.__name__,
+ name=self.name
+ )
+
+ def serialize(self, request):
+ href = request.urlgen(
+ "mediagoblin.api.object",
+ object_type=self.object_type,
+ id=self.id,
+ qualified=True
+ )
+ published = UTC.localize(self.published)
+ updated = UTC.localize(self.updated)
+ return {
+ "id": href,
+ "displayName": self.name,
+ "published": published.isoformat(),
+ "updated": updated.isoformat(),
+ "objectType": self.object_type,
+ }
+
+ def unserialize(self, data):
+ if "displayName" in data:
+ self.name = data["displayName"]
+
+
+class ActivityIntermediator(Base):
+ """
+ This is used so that objects/targets can have a foreign key back to this
+ object and activities can a foreign key to this object. This objects to be
+ used multiple times for the activity object or target and also allows for
+ different types of objects to be used as an Activity.
+ """
+ __tablename__ = "core__activity_intermediators"
+
+ id = Column(Integer, primary_key=True)
+ type = Column(Unicode, nullable=False)
+
+ TYPES = {
+ "user": User,
+ "media": MediaEntry,
+ "comment": MediaComment,
+ "collection": Collection,
+ }
+
+ def _find_model(self, obj):
+ """ Finds the model for a given object """
+ for key, model in self.TYPES.items():
+ if isinstance(obj, model):
+ return key, model
+
+ return None, None
+
+ def set(self, obj):
+ """ This sets itself as the activity """
+ key, model = self._find_model(obj)
+ if key is None:
+ raise ValueError("Invalid type of object given")
+
+ self.type = key
+
+ # We need to populate the self.id so we need to save but, we don't
+ # want to save this AI in the database (yet) so commit=False.
+ self.save(commit=False)
+ obj.activity = self.id
+ obj.save()
+
+ def get(self):
+ """ Finds the object for an activity """
+ if self.type is None:
+ return None
+
+ model = self.TYPES[self.type]
+ return model.query.filter_by(activity=self.id).first()
+
+ @validates("type")
+ def validate_type(self, key, value):
+ """ Validate that the type set is a valid type """
+ assert value in self.TYPES
+ return value
+
+class Activity(Base, ActivityMixin):
+ """
+ This holds all the metadata about an activity such as uploading an image,
+ posting a comment, etc.
+ """
+ __tablename__ = "core__activities"
+
+ id = Column(Integer, primary_key=True)
+ actor = Column(Integer,
+ ForeignKey("core__users.id"),
+ nullable=False)
+ published = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ updated = Column(DateTime, nullable=False, default=datetime.datetime.utcnow)
+ verb = Column(Unicode, nullable=False)
+ content = Column(Unicode, nullable=True)
+ title = Column(Unicode, nullable=True)
+ generator = Column(Integer,
+ ForeignKey("core__generators.id"),
+ nullable=True)
+ object = Column(Integer,
+ ForeignKey("core__activity_intermediators.id"),
+ nullable=False)
+ target = Column(Integer,
+ ForeignKey("core__activity_intermediators.id"),
+ nullable=True)
+
+ get_actor = relationship(User,
+ backref=backref("activities",
+ cascade="all, delete-orphan"))
+ get_generator = relationship(Generator)
+
+ def __repr__(self):
+ if self.content is None:
+ return "<{klass} verb:{verb}>".format(
+ klass=self.__class__.__name__,
+ verb=self.verb
+ )
+ else:
+ return "<{klass} {content}>".format(
+ klass=self.__class__.__name__,
+ content=self.content
+ )
+
+ @property
+ def get_object(self):
+ if self.object is None:
+ return None
+
+ ai = ActivityIntermediator.query.filter_by(id=self.object).first()
+ return ai.get()
+
+ def set_object(self, obj):
+ self.object = self._set_model(obj)
+
+ @property
+ def get_target(self):
+ if self.target is None:
+ return None
+
+ ai = ActivityIntermediator.query.filter_by(id=self.target).first()
+ return ai.get()
+
+ def set_target(self, obj):
+ self.target = self._set_model(obj)
+
+ def _set_model(self, obj):
+ # Firstly can we set obj
+ if not hasattr(obj, "activity"):
+ raise ValueError(
+ "{0!r} is unable to be set on activity".format(obj))
+
+ if obj.activity is None:
+ # We need to create a new AI
+ ai = ActivityIntermediator()
+ ai.set(obj)
+ ai.save()
+ return ai.id
+
+ # Okay we should have an existing AI
+ return ActivityIntermediator.query.filter_by(id=obj.activity).first().id
+
+ def save(self, set_updated=True, *args, **kwargs):
+ if set_updated:
+ self.updated = datetime.datetime.now()
+ super(Activity, self).save(*args, **kwargs)
+
+with_polymorphic(
+ Notification,
+ [ProcessingNotification, CommentNotification])
+
MODELS = [
User, MediaEntry, Tag, MediaTag, MediaComment, Collection, CollectionItem,
MediaFile, FileKeynames, MediaAttachmentFile, ProcessingMetaData,
Notification, CommentNotification, ProcessingNotification, Client,
CommentSubscription, ReportBase, CommentReport, MediaReport, UserBan,
Privilege, PrivilegeUserAssociation,
- RequestToken, AccessToken, NonceTimestamp]
+ RequestToken, AccessToken, NonceTimestamp,
+ Activity, ActivityIntermediator, Generator,
+ Location]
"""
Foundations are the default rows that are created immediately after the tables
@@ -1125,7 +1467,7 @@ def show_table_init(engine_uri):
if __name__ == '__main__':
from sys import argv
- print repr(argv)
+ print(repr(argv))
if len(argv) == 2:
uri = argv[1]
else:
diff --git a/mediagoblin/db/open.py b/mediagoblin/db/open.py
index 4ff0945f..8f81c8d9 100644
--- a/mediagoblin/db/open.py
+++ b/mediagoblin/db/open.py
@@ -15,38 +15,117 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from sqlalchemy import create_engine, event
+from contextlib import contextmanager
import logging
-from mediagoblin.db.base import Base, Session
+import six
+from sqlalchemy import create_engine, event
+
from mediagoblin import mg_globals
+from mediagoblin.db.base import Base
_log = logging.getLogger(__name__)
+from mediagoblin.tools.transition import DISABLE_GLOBALS
+
+def set_models_as_attributes(obj):
+ """
+ Set all models as attributes on this object, for convenience
+
+ TODO: This should eventually be deprecated.
+ """
+ for k, v in six.iteritems(Base._decl_class_registry):
+ setattr(obj, k, v)
+
+
+if not DISABLE_GLOBALS:
+ from mediagoblin.db.base import Session
+
+ class DatabaseMaster(object):
+ def __init__(self, engine):
+ self.engine = engine
+
+ set_models_as_attributes(self)
+
+ def commit(self):
+ Session.commit()
-class DatabaseMaster(object):
- def __init__(self, engine):
- self.engine = engine
+ def save(self, obj):
+ Session.add(obj)
+ Session.flush()
- for k, v in Base._decl_class_registry.iteritems():
- setattr(self, k, v)
+ def check_session_clean(self):
+ for dummy in Session():
+ _log.warn("STRANGE: There are elements in the sql session. "
+ "Please report this and help us track this down.")
+ break
- def commit(self):
- Session.commit()
+ def reset_after_request(self):
+ Session.rollback()
+ Session.remove()
- def save(self, obj):
- Session.add(obj)
- Session.flush()
+ @property
+ def query(self):
+ return Session.query
- def check_session_clean(self):
- for dummy in Session():
- _log.warn("STRANGE: There are elements in the sql session. "
- "Please report this and help us track this down.")
- break
+else:
+ from sqlalchemy.orm import sessionmaker
+
+ class DatabaseManager(object):
+ """
+ Manage database connections.
+
+ The main method here is session_scope which can be used with a
+ "with" statement to get a session that is properly torn down
+ by the end of execution.
+ """
+ def __init__(self, engine):
+ self.engine = engine
+ self.Session = sessionmaker(bind=engine)
+ set_models_as_attributes(self)
+
+ @contextmanager
+ def session_scope(self):
+ """
+ This is a context manager, use like::
+
+ with dbmanager.session_scope() as request.db:
+ some_view(request)
+ """
+ session = self.Session()
+
+ #####################################
+ # Functions to emulate DatabaseMaster
+ #####################################
+ def save(obj):
+ session.add(obj)
+ session.flush()
+
+ def check_session_clean():
+ # Is this implemented right?
+ for dummy in session:
+ _log.warn("STRANGE: There are elements in the sql session. "
+ "Please report this and help us track this down.")
+ break
+
+ def reset_after_request():
+ session.rollback()
+ session.remove()
+
+ # now attach
+ session.save = save
+ session.check_session_clean = check_session_clean
+ session.reset_after_request = reset_after_request
+
+ set_models_as_attributes(session)
+ #####################################
+
+ try:
+ yield session
+ finally:
+ session.rollback()
+ session.close()
- def reset_after_request(self):
- Session.rollback()
- Session.remove()
def load_models(app_config):
@@ -75,9 +154,14 @@ def _sqlite_disable_fk_pragma_on_connect(dbapi_con, con_record):
dbapi_con.execute('pragma foreign_keys=off')
-def setup_connection_and_db_from_config(app_config, migrations=False):
+def setup_connection_and_db_from_config(app_config, migrations=False, app=None):
engine = create_engine(app_config['sql_engine'])
+ # @@: Maybe make a weak-ref so an engine can get garbage
+ # collected? Not that we expect to make a lot of MediaGoblinApp
+ # instances in a single process...
+ engine.app = app
+
# Enable foreign key checking for sqlite
if app_config['sql_engine'].startswith('sqlite://'):
if migrations:
@@ -88,9 +172,13 @@ def setup_connection_and_db_from_config(app_config, migrations=False):
# logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
- Session.configure(bind=engine)
+ if DISABLE_GLOBALS:
+ return DatabaseManager(engine)
+
+ else:
+ Session.configure(bind=engine)
- return DatabaseMaster(engine)
+ return DatabaseMaster(engine)
def check_db_migrations_current(db):
diff --git a/mediagoblin/db/util.py b/mediagoblin/db/util.py
index 515fd6cd..7c026691 100644
--- a/mediagoblin/db/util.py
+++ b/mediagoblin/db/util.py
@@ -17,10 +17,14 @@
import sys
from mediagoblin import mg_globals as mgg
-from mediagoblin.db.base import Session
from mediagoblin.db.models import MediaEntry, Tag, MediaTag, Collection
from mediagoblin.gmg_commands.dbupdate import gather_database_data
+from mediagoblin.tools.transition import DISABLE_GLOBALS
+
+if not DISABLE_GLOBALS:
+ from mediagoblin.db.base import Session
+
##########################
# Random utility functions
##########################