2017-03-09 06:55:33 +00:00
|
|
|
# coding: utf-8
|
2023-10-20 08:31:47 +00:00
|
|
|
import datetime
|
2024-03-18 06:19:15 +00:00
|
|
|
import time
|
2019-01-08 03:15:09 +00:00
|
|
|
|
2022-11-15 08:29:40 +00:00
|
|
|
from celery import shared_task
|
2019-03-27 03:12:34 +00:00
|
|
|
from celery.exceptions import SoftTimeLimitExceeded
|
2023-10-20 08:31:47 +00:00
|
|
|
from django.utils import timezone
|
2024-03-18 06:19:15 +00:00
|
|
|
from django.utils.translation import gettext_lazy as _
|
2023-02-24 09:59:32 +00:00
|
|
|
from django_celery_beat.models import PeriodicTask
|
2017-12-22 13:42:12 +00:00
|
|
|
|
2023-10-20 08:31:47 +00:00
|
|
|
from common.const.crontab import CRONTAB_AT_AM_TWO
|
|
|
|
from common.utils import get_logger, get_object_or_none, get_log_keep_day
|
2023-02-24 09:59:32 +00:00
|
|
|
from ops.celery import app
|
2023-02-23 09:01:24 +00:00
|
|
|
from orgs.utils import tmp_to_org, tmp_to_root_org
|
2019-01-15 02:23:30 +00:00
|
|
|
from .celery.decorator import (
|
2023-10-20 08:31:47 +00:00
|
|
|
register_as_period_task, after_app_ready_start, after_app_shutdown_clean_periodic
|
2019-01-15 02:23:30 +00:00
|
|
|
)
|
2020-07-21 07:33:33 +00:00
|
|
|
from .celery.utils import (
|
|
|
|
create_or_update_celery_periodic_tasks, get_celery_periodic_task,
|
|
|
|
disable_celery_periodic_task, delete_celery_periodic_task
|
|
|
|
)
|
2023-02-16 10:32:04 +00:00
|
|
|
from .models import Job, JobExecution
|
2021-07-30 07:42:06 +00:00
|
|
|
from .notifications import ServerPerformanceCheckUtil
|
2017-12-22 13:42:12 +00:00
|
|
|
|
|
|
|
logger = get_logger(__file__)
|
2017-12-10 16:29:25 +00:00
|
|
|
|
2017-03-09 06:55:33 +00:00
|
|
|
|
2023-02-24 09:59:32 +00:00
|
|
|
def job_task_activity_callback(self, job_id, *args, **kwargs):
|
2023-02-17 09:14:53 +00:00
|
|
|
job = get_object_or_none(Job, id=job_id)
|
|
|
|
if not job:
|
|
|
|
return
|
|
|
|
resource_ids = [job.id]
|
|
|
|
org_id = job.org_id
|
|
|
|
return resource_ids, org_id
|
|
|
|
|
|
|
|
|
|
|
|
@shared_task(
|
|
|
|
soft_time_limit=60, queue="ansible", verbose_name=_("Run ansible task"),
|
|
|
|
activity_callback=job_task_activity_callback
|
|
|
|
)
|
2022-11-18 10:46:48 +00:00
|
|
|
def run_ops_job(job_id):
|
2023-09-20 10:14:39 +00:00
|
|
|
with tmp_to_root_org():
|
|
|
|
job = get_object_or_none(Job, id=job_id)
|
|
|
|
if not job:
|
|
|
|
logger.error("Did not get the execution: {}".format(job_id))
|
|
|
|
return
|
|
|
|
|
2023-02-10 10:37:53 +00:00
|
|
|
with tmp_to_org(job.org):
|
|
|
|
execution = job.create_execution()
|
|
|
|
execution.creator = job.creator
|
|
|
|
run_ops_job_execution(execution.id)
|
|
|
|
try:
|
|
|
|
execution.start()
|
|
|
|
except SoftTimeLimitExceeded:
|
|
|
|
execution.set_error('Run timeout')
|
|
|
|
logger.error("Run adhoc timeout")
|
|
|
|
except Exception as e:
|
|
|
|
execution.set_error(e)
|
|
|
|
logger.error("Start adhoc execution error: {}".format(e))
|
2023-01-16 11:02:09 +00:00
|
|
|
|
|
|
|
|
2023-02-24 09:59:32 +00:00
|
|
|
def job_execution_task_activity_callback(self, execution_id, *args, **kwargs):
|
2023-02-17 09:14:53 +00:00
|
|
|
execution = get_object_or_none(JobExecution, id=execution_id)
|
|
|
|
if not execution:
|
|
|
|
return
|
|
|
|
resource_ids = [execution.id]
|
|
|
|
org_id = execution.org_id
|
|
|
|
return resource_ids, org_id
|
|
|
|
|
|
|
|
|
|
|
|
@shared_task(
|
|
|
|
soft_time_limit=60, queue="ansible", verbose_name=_("Run ansible task execution"),
|
|
|
|
activity_callback=job_execution_task_activity_callback
|
|
|
|
)
|
2022-11-29 11:44:12 +00:00
|
|
|
def run_ops_job_execution(execution_id, **kwargs):
|
2023-06-14 11:52:54 +00:00
|
|
|
with tmp_to_root_org():
|
|
|
|
execution = get_object_or_none(JobExecution, id=execution_id)
|
|
|
|
|
|
|
|
if not execution:
|
|
|
|
logger.error("Did not get the execution: {}".format(execution_id))
|
|
|
|
return
|
|
|
|
|
2022-12-06 09:30:54 +00:00
|
|
|
try:
|
2022-12-15 09:25:21 +00:00
|
|
|
with tmp_to_org(execution.org):
|
|
|
|
execution.start()
|
2022-12-06 09:30:54 +00:00
|
|
|
except SoftTimeLimitExceeded:
|
|
|
|
execution.set_error('Run timeout')
|
|
|
|
logger.error("Run adhoc timeout")
|
|
|
|
except Exception as e:
|
|
|
|
execution.set_error(e)
|
|
|
|
logger.error("Start adhoc execution error: {}".format(e))
|
2022-11-11 11:20:17 +00:00
|
|
|
|
|
|
|
|
2022-11-15 08:29:40 +00:00
|
|
|
@shared_task(verbose_name=_('Clear celery periodic tasks'))
|
2020-07-21 07:33:33 +00:00
|
|
|
@after_app_ready_start
|
|
|
|
def clean_celery_periodic_tasks():
|
|
|
|
"""清除celery定时任务"""
|
2023-02-24 09:59:32 +00:00
|
|
|
logger.info('Start clean celery periodic tasks.')
|
|
|
|
register_tasks = PeriodicTask.objects.all()
|
|
|
|
for task in register_tasks:
|
|
|
|
if task.task in app.tasks:
|
2020-07-21 07:33:33 +00:00
|
|
|
continue
|
2023-02-24 09:59:32 +00:00
|
|
|
|
|
|
|
task_name = task.name
|
|
|
|
logger.info('Start clean task: {}'.format(task_name))
|
2020-07-21 07:33:33 +00:00
|
|
|
disable_celery_periodic_task(task_name)
|
|
|
|
delete_celery_periodic_task(task_name)
|
|
|
|
task = get_celery_periodic_task(task_name)
|
|
|
|
if task is None:
|
|
|
|
logger.info('Clean task success: {}'.format(task_name))
|
|
|
|
else:
|
|
|
|
logger.info('Clean task failure: {}'.format(task))
|
|
|
|
|
|
|
|
|
2022-11-15 08:29:40 +00:00
|
|
|
@shared_task(verbose_name=_('Create or update periodic tasks'))
|
2019-01-15 02:23:30 +00:00
|
|
|
@after_app_ready_start
|
|
|
|
def create_or_update_registered_periodic_tasks():
|
|
|
|
from .celery.decorator import get_register_period_tasks
|
|
|
|
for task in get_register_period_tasks():
|
|
|
|
create_or_update_celery_periodic_tasks(task)
|
|
|
|
|
|
|
|
|
2022-11-15 08:29:40 +00:00
|
|
|
@shared_task(verbose_name=_("Periodic check service performance"))
|
2019-12-05 07:09:25 +00:00
|
|
|
@register_as_period_task(interval=3600)
|
|
|
|
def check_server_performance_period():
|
2021-07-30 07:42:06 +00:00
|
|
|
ServerPerformanceCheckUtil().check_and_publish()
|
2023-02-23 09:01:24 +00:00
|
|
|
|
|
|
|
|
|
|
|
@shared_task(verbose_name=_("Clean up unexpected jobs"))
|
|
|
|
@register_as_period_task(interval=3600)
|
|
|
|
def clean_up_unexpected_jobs():
|
|
|
|
with tmp_to_root_org():
|
|
|
|
JobExecution.clean_unexpected_execution()
|
2023-10-20 08:31:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
@shared_task(verbose_name=_('Clean job_execution db record'))
|
|
|
|
@register_as_period_task(crontab=CRONTAB_AT_AM_TWO)
|
|
|
|
def clean_job_execution_period():
|
|
|
|
logger.info("Start clean job_execution db record")
|
|
|
|
now = timezone.now()
|
|
|
|
days = get_log_keep_day('JOB_EXECUTION_KEEP_DAYS')
|
|
|
|
expired_day = now - datetime.timedelta(days=days)
|
2023-11-14 06:33:03 +00:00
|
|
|
with tmp_to_root_org():
|
|
|
|
del_res = JobExecution.objects.filter(date_created__lt=expired_day).delete()
|
|
|
|
logger.info(f"clean job_execution db record success! delete {days} days {del_res[0]} records")
|
2024-03-18 06:19:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
@shared_task
|
|
|
|
def longtime_add(x, y):
|
|
|
|
print('long time task begins')
|
|
|
|
time.sleep(50)
|
|
|
|
print('long time task finished')
|
|
|
|
return x + y
|