1
# Python
2 3
import logging
3

4 3
from django.conf import settings
5

6
# Celery
7 3
from celery import Task, shared_task, current_app
8

9 3
from elasticsearch import Elasticsearch
10

11
# CyBorgBackup
12 3
from cyborgbackup.main.utils.task_manager import TaskManager
13

14 3
logger = logging.getLogger('cyborgbackup.main.utils.task_manager')
15

16

17 3
class LogErrorsTask(Task):
18 3
    def on_failure(self, exc, task_id, args, kwargs, einfo):
19 0
        logger.exception('Task {} encountered exception.'.format(self.name), exc_info=exc)
20 0
        super(LogErrorsTask, self).on_failure(exc, task_id, args, kwargs, einfo)
21

22

23 3
def catalog_is_running():
24 0
    try:
25 0
        es_conf = settings.ELASTICSEARCH_DSL['default']['hosts'].split(':')
26 0
        es = Elasticsearch([{'host': es_conf[0], 'port': int(es_conf[1])}], max_retries=0)
27 0
        es.cluster.state()
28 0
        return True
29 0
    except Exception:
30 0
        return False
31

32

33 3
def celery_worker_is_running():
34 0
    if len(current_app.control.ping()) > 0:
35 0
        return True
36
    else:
37 0
        return False
38

39

40 3
@shared_task(base=LogErrorsTask)
41
def run_job_launch(job_id):
42 0
    TaskManager().schedule()
43

44

45 3
@shared_task(base=LogErrorsTask)
46
def run_job_complete(job_id):
47 0
    TaskManager().schedule()
48

49

50 3
@shared_task(base=LogErrorsTask)
51
def run_task_manager():
52 0
    logger.debug("Running CyBorgBackup task manager.")
53 0
    TaskManager().schedule()

Read our documentation on viewing source code .

Loading