aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorvijeth-aradhya <vijthaaa@gmail.com>2017-06-21 10:00:56 +0530
committervijeth-aradhya <vijthaaa@gmail.com>2017-06-21 10:03:40 +0530
commit602cfcb78916ac54bb415d9c2023124b6f3ad5b6 (patch)
tree042053d534d102679291020a035c56e2714e01cd
parent5161533a6fb66e24f6906d6476069277d594e738 (diff)
downloadmediagoblin-602cfcb78916ac54bb415d9c2023124b6f3ad5b6.tar.lz
mediagoblin-602cfcb78916ac54bb415d9c2023124b6f3ad5b6.tar.xz
mediagoblin-602cfcb78916ac54bb415d9c2023124b6f3ad5b6.zip
Use tuple as default return type of workflow()
If None, then schedule the old Celery call, else the format is group and a single task in the tuple. Hence, Celery chord would be ideal to use. Closes #4
-rw-r--r--mediagoblin/media_types/video/processing.py7
-rw-r--r--mediagoblin/submit/lib.py4
2 files changed, 6 insertions, 5 deletions
diff --git a/mediagoblin/media_types/video/processing.py b/mediagoblin/media_types/video/processing.py
index 2a6a716f..ee2aa443 100644
--- a/mediagoblin/media_types/video/processing.py
+++ b/mediagoblin/media_types/video/processing.py
@@ -22,7 +22,7 @@ import celery
import six
-from celery import group, chord
+from celery import group
from mediagoblin import mg_globals as mgg
from mediagoblin.processing import (
FilenameBuilder, BaseProcessingFail,
@@ -585,7 +585,4 @@ class VideoProcessingManager(ProcessingManager):
cleanup_task = processing_cleanup.signature(args=(entry.id,),
queue='default', immutable=True)
- chord(transcoding_tasks)(cleanup_task)
-
- # Not sure what to return since we are scheduling the task here itself
- return 1
+ return (transcoding_tasks, cleanup_task)
diff --git a/mediagoblin/submit/lib.py b/mediagoblin/submit/lib.py
index b228dbd1..9ec96923 100644
--- a/mediagoblin/submit/lib.py
+++ b/mediagoblin/submit/lib.py
@@ -20,6 +20,8 @@ from os.path import splitext
import six
+from celery import chord
+
from werkzeug.utils import secure_filename
from werkzeug.datastructures import FileStorage
@@ -271,6 +273,8 @@ def run_process_media(entry, feed_url=None,
ProcessMedia().apply_async(
[entry.id, feed_url, reprocess_action, reprocess_info], {},
task_id=entry.queued_task_id)
+ else:
+ chord(wf[0])(wf[1])
except BaseException as exc:
# The purpose of this section is because when running in "lazy"
# or always-eager-with-exceptions-propagated celery mode that