From 9a2c66ca9ef763fa68dc09a483c02fe2ee02d78f Mon Sep 17 00:00:00 2001 From: Rodney Ewing Date: Fri, 2 Aug 2013 11:40:41 -0700 Subject: added image reprocessing --- mediagoblin/submit/lib.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) (limited to 'mediagoblin/submit') diff --git a/mediagoblin/submit/lib.py b/mediagoblin/submit/lib.py index 7e85696b..3619a329 100644 --- a/mediagoblin/submit/lib.py +++ b/mediagoblin/submit/lib.py @@ -76,17 +76,19 @@ def prepare_queue_task(app, entry, filename): return queue_file -def run_process_media(entry, feed_url=None): +def run_process_media(entry, feed_url=None, reprocess_info=None): """Process the media asynchronously :param entry: MediaEntry() instance to be processed. :param feed_url: A string indicating the feed_url that the PuSH servers should be notified of. This will be sth like: `request.urlgen( 'mediagoblin.user_pages.atom_feed',qualified=True, - user=request.user.username)`""" + user=request.user.username)` + :param reprocess: A dict containing all of the necessary reprocessing + info for the given media_type""" try: process_media.apply_async( - [entry.id, feed_url], {}, + [entry.id, feed_url, reprocess_info], {}, task_id=entry.queued_task_id) except BaseException as exc: # The purpose of this section is because when running in "lazy" -- cgit v1.2.3 From 77ea4c9bd1e8372fb7206596ca5125738033ced5 Mon Sep 17 00:00:00 2001 From: Christopher Allan Webber Date: Sun, 11 Aug 2013 14:34:45 -0500 Subject: Updating to the point where we can allllmost run with the new reprocessing code MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit sponsored by Odin Hørthe Omdal. Thank you! --- mediagoblin/submit/lib.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) (limited to 'mediagoblin/submit') diff --git a/mediagoblin/submit/lib.py b/mediagoblin/submit/lib.py index 3619a329..ad37203d 100644 --- a/mediagoblin/submit/lib.py +++ b/mediagoblin/submit/lib.py @@ -76,7 +76,8 @@ def prepare_queue_task(app, entry, filename): return queue_file -def run_process_media(entry, feed_url=None, reprocess_info=None): +def run_process_media(entry, feed_url=None, + reprocess_action="inital", reprocess_info=None): """Process the media asynchronously :param entry: MediaEntry() instance to be processed. @@ -84,11 +85,12 @@ def run_process_media(entry, feed_url=None, reprocess_info=None): should be notified of. This will be sth like: `request.urlgen( 'mediagoblin.user_pages.atom_feed',qualified=True, user=request.user.username)` - :param reprocess: A dict containing all of the necessary reprocessing + :param reprocess_action: What particular action should be run. + :param reprocess_info: A dict containing all of the necessary reprocessing info for the given media_type""" try: process_media.apply_async( - [entry.id, feed_url, reprocess_info], {}, + [entry.id, feed_url, reprocess_action, reprocess_info], {}, task_id=entry.queued_task_id) except BaseException as exc: # The purpose of this section is because when running in "lazy" -- cgit v1.2.3 From 98d1fa3beddfc602c541fe7f538ca882ad6c7e9c Mon Sep 17 00:00:00 2001 From: Christopher Allan Webber Date: Mon, 12 Aug 2013 11:00:15 -0500 Subject: Fixing normal submission of media (well for images anyway) --- mediagoblin/submit/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'mediagoblin/submit') diff --git a/mediagoblin/submit/lib.py b/mediagoblin/submit/lib.py index ad37203d..1a45e447 100644 --- a/mediagoblin/submit/lib.py +++ b/mediagoblin/submit/lib.py @@ -77,7 +77,7 @@ def prepare_queue_task(app, entry, filename): def run_process_media(entry, feed_url=None, - reprocess_action="inital", reprocess_info=None): + reprocess_action="initial", reprocess_info=None): """Process the media asynchronously :param entry: MediaEntry() instance to be processed. -- cgit v1.2.3 From bf2dafd1a04ef8050ebf08bb512862a1592998c0 Mon Sep 17 00:00:00 2001 From: Sebastian Spaeth Date: Thu, 20 Dec 2012 13:42:37 +0100 Subject: Tweak Celery Task - Make sure Exceptions are pickleable (not sure if this was not the case but this is the pattern as documented in the celery docs. - Don't create a task_id in the GMG code, but save the one implicitely created by celery. - Don't create a task-id directory per upload. Just store queued uploads in a single directory (this is the most controversial change and might need discussion!!!) Signed-off-by: Sebastian Spaeth --- mediagoblin/submit/views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'mediagoblin/submit') diff --git a/mediagoblin/submit/views.py b/mediagoblin/submit/views.py index 3f9d5b2d..6bb95ecb 100644 --- a/mediagoblin/submit/views.py +++ b/mediagoblin/submit/views.py @@ -89,7 +89,7 @@ def submit_start(request): # Save now so we have this data before kicking off processing entry.save() - # Pass off to processing + # Pass off to async processing # # (... don't change entry after this point to avoid race # conditions with changes to the document via processing code) @@ -97,6 +97,7 @@ def submit_start(request): 'mediagoblin.user_pages.atom_feed', qualified=True, user=request.user.username) run_process_media(entry, feed_url) + add_message(request, SUCCESS, _('Woohoo! Submitted!')) add_comment_subscription(request.user, entry) -- cgit v1.2.3 From b505952508f717e0d4f59d24c87a54ef42673c3f Mon Sep 17 00:00:00 2001 From: Rodney Ewing Date: Mon, 19 Aug 2013 12:58:00 -0700 Subject: -update to latest master - have mg generate task_id remove --- mediagoblin/submit/lib.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'mediagoblin/submit') diff --git a/mediagoblin/submit/lib.py b/mediagoblin/submit/lib.py index 7e85696b..33687a72 100644 --- a/mediagoblin/submit/lib.py +++ b/mediagoblin/submit/lib.py @@ -21,7 +21,7 @@ from werkzeug.datastructures import FileStorage from mediagoblin.db.models import MediaEntry from mediagoblin.processing import mark_entry_failed -from mediagoblin.processing.task import process_media +from mediagoblin.processing.task import ProcessMedia _log = logging.getLogger(__name__) @@ -85,7 +85,7 @@ def run_process_media(entry, feed_url=None): 'mediagoblin.user_pages.atom_feed',qualified=True, user=request.user.username)`""" try: - process_media.apply_async( + ProcessMedia().apply_async( [entry.id, feed_url], {}, task_id=entry.queued_task_id) except BaseException as exc: -- cgit v1.2.3