aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/submit/lib.py
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/submit/lib.py')
-rw-r--r--mediagoblin/submit/lib.py83
1 files changed, 67 insertions, 16 deletions
diff --git a/mediagoblin/submit/lib.py b/mediagoblin/submit/lib.py
index d54591d6..08a603e9 100644
--- a/mediagoblin/submit/lib.py
+++ b/mediagoblin/submit/lib.py
@@ -18,11 +18,15 @@ import logging
import uuid
from os.path import splitext
+import six
+
from werkzeug.utils import secure_filename
from werkzeug.datastructures import FileStorage
from mediagoblin import mg_globals
+from mediagoblin.tools.response import json_response
from mediagoblin.tools.text import convert_to_tag_list_of_dicts
+from mediagoblin.tools.federation import create_activity, create_generator
from mediagoblin.db.models import Collection, MediaEntry, ProcessingMetaData
from mediagoblin.processing import mark_entry_failed
from mediagoblin.processing.task import ProcessMedia
@@ -49,7 +53,7 @@ def new_upload_entry(user):
Create a new MediaEntry for uploading
"""
entry = MediaEntry()
- entry.uploader = user.id
+ entry.actor = user.id
entry.license = user.license_preference
return entry
@@ -58,7 +62,7 @@ def get_upload_file_limits(user):
"""
Get the upload_limit and max_file_size for this user
"""
- if user.upload_limit >= 0:
+ if user.upload_limit is not None and user.upload_limit >= 0: # TODO: debug this
upload_limit = user.upload_limit
else:
upload_limit = mg_globals.app_config.get('upload_limit', None)
@@ -100,10 +104,7 @@ class UserPastUploadLimit(UploadLimitError):
def submit_media(mg_app, user, submitted_file, filename,
title=None, description=None, collection_slug=None,
license=None, metadata=None, tags_string=u"",
- upload_limit=None, max_file_size=None,
- callback_url=None,
- # If provided we'll do the feed_url update, otherwise ignore
- urlgen=None,):
+ callback_url=None, urlgen=None,):
"""
Args:
- mg_app: The MediaGoblinApp instantiated for this process
@@ -119,17 +120,17 @@ def submit_media(mg_app, user, submitted_file, filename,
- license: license for this media entry
- tags_string: comma separated string of tags to be associated
with this entry
- - upload_limit: size in megabytes that's the per-user upload limit
- - max_file_size: maximum size each file can be that's uploaded
- callback_url: possible post-hook to call after submission
- - urlgen: if provided, used to do the feed_url update
+ - urlgen: if provided, used to do the feed_url update and assign a public
+ ID used in the API (very important).
"""
+ upload_limit, max_file_size = get_upload_file_limits(user)
if upload_limit and user.uploaded >= upload_limit:
raise UserPastUploadLimit()
# If the filename contains non ascii generate a unique name
if not all(ord(c) < 128 for c in filename):
- filename = unicode(uuid.uuid4()) + splitext(filename)[-1]
+ filename = six.text_type(uuid.uuid4()) + splitext(filename)[-1]
# Sniff the submitted media to determine which
# media plugin should handle processing
@@ -138,7 +139,7 @@ def submit_media(mg_app, user, submitted_file, filename,
# create entry and save in database
entry = new_upload_entry(user)
entry.media_type = media_type
- entry.title = (title or unicode(splitext(filename)[0]))
+ entry.title = (title or six.text_type(splitext(filename)[0]))
entry.description = description or u""
@@ -155,7 +156,7 @@ def submit_media(mg_app, user, submitted_file, filename,
queue_file = prepare_queue_task(mg_app, entry, filename)
with queue_file:
- queue_file.write(submitted_file.read())
+ queue_file.write(submitted_file)
# Get file size and round to 2 decimal places
file_size = mg_app.queue_store.get_file_size(
@@ -187,15 +188,27 @@ def submit_media(mg_app, user, submitted_file, filename,
metadata.save()
if urlgen:
+ # Generate the public_id, this is very importent, especially relating
+ # to deletion, it allows the shell to be accessable post-delete!
+ entry.get_public_id(urlgen)
+
+ # Generate the feed URL
feed_url = urlgen(
'mediagoblin.user_pages.atom_feed',
qualified=True, user=user.username)
else:
feed_url = None
+ add_comment_subscription(user, entry)
+
+ # Create activity
+ create_activity("post", entry, entry.actor)
+ entry.save()
+
# add to collection
if collection_slug:
- collection = Collection.query.filter_by(slug=collection_slug, creator=user.id).first()
+ collection = Collection.query.filter_by(slug=collection_slug,
+ actor=user.id).first()
if collection:
add_media_to_collection(collection, entry)
@@ -205,8 +218,6 @@ def submit_media(mg_app, user, submitted_file, filename,
# conditions with changes to the document via processing code)
run_process_media(entry, feed_url)
- add_comment_subscription(user, entry)
-
return entry
@@ -220,7 +231,7 @@ def prepare_queue_task(app, entry, filename):
# (If we got it off the task's auto-generation, there'd be
# a risk of a race condition when we'd save after sending
# off the task)
- task_id = unicode(uuid.uuid4())
+ task_id = six.text_type(uuid.uuid4())
entry.queued_task_id = task_id
# Now store generate the queueing related filename
@@ -267,3 +278,43 @@ def run_process_media(entry, feed_url=None,
mark_entry_failed(entry.id, exc)
# re-raise the exception
raise
+
+
+def api_upload_request(request, file_data, entry):
+ """ This handles a image upload request """
+ # Use the same kind of method from mediagoblin/submit/views:submit_start
+ entry.title = file_data.filename
+
+ # This will be set later but currently we just don't have enough information
+ entry.slug = None
+
+ # This is a MUST.
+ entry.get_public_id(request.urlgen)
+
+ queue_file = prepare_queue_task(request.app, entry, file_data.filename)
+ with queue_file:
+ queue_file.write(request.data)
+
+ entry.save()
+ return json_response(entry.serialize(request))
+
+def api_add_to_feed(request, entry):
+ """ Add media to Feed """
+ feed_url = request.urlgen(
+ 'mediagoblin.user_pages.atom_feed',
+ qualified=True, user=request.user.username
+ )
+
+ add_comment_subscription(request.user, entry)
+
+ # Create activity
+ activity = create_activity(
+ verb="post",
+ obj=entry,
+ actor=entry.actor,
+ generator=create_generator(request)
+ )
+ entry.save()
+ run_process_media(entry, feed_url)
+
+ return activity