aboutsummaryrefslogtreecommitdiffstats
path: root/mediagoblin/submit/lib.py
diff options
context:
space:
mode:
Diffstat (limited to 'mediagoblin/submit/lib.py')
-rw-r--r--mediagoblin/submit/lib.py155
1 files changed, 154 insertions, 1 deletions
diff --git a/mediagoblin/submit/lib.py b/mediagoblin/submit/lib.py
index 1bbf2cb8..c70e2731 100644
--- a/mediagoblin/submit/lib.py
+++ b/mediagoblin/submit/lib.py
@@ -16,12 +16,18 @@
import logging
import uuid
+from os.path import splitext
+
from werkzeug.utils import secure_filename
from werkzeug.datastructures import FileStorage
-from mediagoblin.db.models import MediaEntry
+from mediagoblin import mg_globals
+from mediagoblin.tools.text import convert_to_tag_list_of_dicts
+from mediagoblin.db.models import MediaEntry, ProcessingMetaData
from mediagoblin.processing import mark_entry_failed
from mediagoblin.processing.task import ProcessMedia
+from mediagoblin.notifications import add_comment_subscription
+from mediagoblin.media_types import sniff_media
_log = logging.getLogger(__name__)
@@ -47,6 +53,153 @@ def new_upload_entry(user):
return entry
+def get_upload_file_limits(user):
+ """
+ Get the upload_limit and max_file_size for this user
+ """
+ if user.upload_limit >= 0:
+ upload_limit = user.upload_limit
+ else:
+ upload_limit = mg_globals.app_config.get('upload_limit', None)
+
+ max_file_size = mg_globals.app_config.get('max_file_size', None)
+
+ return upload_limit, max_file_size
+
+
+class UploadLimitError(Exception):
+ """
+ General exception for when an upload will be over some upload limit
+ """
+ pass
+
+
+class FileUploadLimit(UploadLimitError):
+ """
+ This file is over the site upload limit
+ """
+ pass
+
+
+class UserUploadLimit(UploadLimitError):
+ """
+ This file is over the user's particular upload limit
+ """
+ pass
+
+
+class UserPastUploadLimit(UploadLimitError):
+ """
+ The user is *already* past their upload limit!
+ """
+ pass
+
+
+
+def submit_media(mg_app, user, submitted_file, filename,
+ title=None, description=None,
+ license=None, tags_string=u"",
+ upload_limit=None, max_file_size=None,
+ callback_url=None,
+ # If provided we'll do the feed_url update, otherwise ignore
+ urlgen=None,):
+ """
+ Args:
+ - mg_app: The MediaGoblinApp instantiated for this process
+ - user: the user object this media entry should be associated with
+ - submitted_file: the file-like object that has the
+ being-submitted file data in it (this object should really have
+ a .name attribute which is the filename on disk!)
+ - filename: the *original* filename of this. Not necessarily the
+ one on disk being referenced by submitted_file.
+ - title: title for this media entry
+ - description: description for this media entry
+ - license: license for this media entry
+ - tags_string: comma separated string of tags to be associated
+ with this entry
+ - upload_limit: size in megabytes that's the per-user upload limit
+ - max_file_size: maximum size each file can be that's uploaded
+ - callback_url: possible post-hook to call after submission
+ - urlgen: if provided, used to do the feed_url update
+ """
+ if upload_limit and user.uploaded >= upload_limit:
+ raise UserPastUploadLimit()
+
+ # If the filename contains non ascii generate a unique name
+ if not all(ord(c) < 128 for c in filename):
+ filename = unicode(uuid.uuid4()) + splitext(filename)[-1]
+
+ # Sniff the submitted media to determine which
+ # media plugin should handle processing
+ media_type, media_manager = sniff_media(submitted_file, filename)
+
+ # create entry and save in database
+ entry = new_upload_entry(user)
+ entry.media_type = media_type
+ entry.title = (title or unicode(splitext(filename)[0]))
+
+ entry.description = description or u""
+
+ entry.license = license or None
+
+ # Process the user's folksonomy "tags"
+ entry.tags = convert_to_tag_list_of_dicts(tags_string)
+
+ # Generate a slug from the title
+ entry.generate_slug()
+
+ queue_file = prepare_queue_task(mg_app, entry, filename)
+
+ with queue_file:
+ queue_file.write(submitted_file.read())
+
+ # Get file size and round to 2 decimal places
+ file_size = mg_app.queue_store.get_file_size(
+ entry.queued_media_file) / (1024.0 * 1024)
+ file_size = float('{0:.2f}'.format(file_size))
+
+ # Check if file size is over the limit
+ if max_file_size and file_size >= max_file_size:
+ raise FileUploadLimit()
+
+ # Check if user is over upload limit
+ if upload_limit and (user.uploaded + file_size) >= upload_limit:
+ raise UserUploadLimit()
+
+ user.uploaded = user.uploaded + file_size
+ user.save()
+
+ entry.file_size = file_size
+
+ # Save now so we have this data before kicking off processing
+ entry.save()
+
+ # Various "submit to stuff" things, callbackurl and this silly urlgen
+ # thing
+ if callback_url:
+ metadata = ProcessingMetaData()
+ metadata.media_entry = entry
+ metadata.callback_url = callback_url
+ metadata.save()
+
+ if urlgen:
+ feed_url = urlgen(
+ 'mediagoblin.user_pages.atom_feed',
+ qualified=True, user=user.username)
+ else:
+ feed_url = None
+
+ # Pass off to processing
+ #
+ # (... don't change entry after this point to avoid race
+ # conditions with changes to the document via processing code)
+ run_process_media(entry, feed_url)
+
+ add_comment_subscription(user, entry)
+
+ return entry
+
+
def prepare_queue_task(app, entry, filename):
"""
Prepare a MediaEntry for the processing queue and get a queue file