oncall-engine/engine/common/custom_celery_tasks/dedicated_queue_retry_task.py
teslaedison 335d7a8f7f
chore: fix some comments (#4055)
# What this PR does

Fix typos in comments

## Checklist

- [ ] Unit, integration, and e2e (if applicable) tests updated
- [x] Documentation added (or `pr:no public docs` PR label added if not
required)
- [x] Added the relevant release notes label (see labels prefixed w/
`release:`). These labels dictate how your PR will
    show up in the autogenerated release notes.

Signed-off-by: teslaedison <qingchengqiushuang@gmail.com>
Co-authored-by: Joey Orlando <joey.orlando@grafana.com>
2024-03-14 15:36:00 +00:00

37 lines
1.1 KiB
Python

from celery import shared_task
from celery.utils.log import get_task_logger
from common.custom_celery_tasks.log_exception_on_failure_task import LogExceptionOnFailureTask
RETRY_QUEUE = "retry"
logger = logger = get_task_logger(__name__)
class DedicatedQueueRetryTask(LogExceptionOnFailureTask):
"""
Custom task sends all retried task to the dedicated retry queue.
It is needed to not to overload regular (high, medium, low) queues with retried tasks.
"""
def retry(
self, args=None, kwargs=None, exc=None, throw=True, eta=None, countdown=None, max_retries=None, **options
):
logger.warning("Retrying celery task", exc_info=exc)
# Just call retry with queue argument
return super().retry(
args=args,
kwargs=kwargs,
exc=exc,
throw=throw,
eta=eta,
countdown=countdown,
max_retries=max_retries,
queue=RETRY_QUEUE,
**options,
)
def shared_dedicated_queue_retry_task(*args, **kwargs):
return shared_task(*args, base=DedicatedQueueRetryTask, **kwargs)