[Update] 迁移celery到ops

pull/1156/head
ibuler 2018-04-02 13:19:31 +08:00
parent df80e8047a
commit a4c843ff13
21 changed files with 224 additions and 135 deletions

View File

@ -3,15 +3,15 @@ import json
import re
import os
import paramiko
from celery import shared_task
from django.core.cache import cache
from django.utils.translation import ugettext as _
from common.utils import get_object_or_none, capacity_convert, \
sum_capacity, encrypt_password, get_logger
from common.celery import register_as_period_task, after_app_shutdown_clean, \
after_app_ready_start, app as celery_app
from ops.celery.utils import register_as_period_task, after_app_shutdown_clean, \
after_app_ready_start
from ops.celery import app as celery_app
from .models import SystemUser, AdminUser, Asset
from . import const

View File

@ -2,4 +2,4 @@ from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app

View File

@ -5,8 +5,8 @@ import json
import uuid
from django.core.cache import cache
from rest_framework.views import APIView
from rest_framework.views import Response
from rest_framework.generics import RetrieveAPIView
from rest_framework.views import Response, APIView
from ldap3 import Server, Connection
from django.core.mail import get_connection, send_mail
from django.utils.translation import ugettext_lazy as _
@ -14,7 +14,8 @@ from django.conf import settings
from .permissions import IsSuperUser, IsAppUser
from .serializers import MailTestSerializer, LDAPTestSerializer
from .const import FILE_END_GUARD
from .celery import FINISHED
from .const import FILE_END_GUARD, celery_task_pre_key
class MailTestingAPI(APIView):
@ -111,28 +112,27 @@ class DjangoSettingsAPI(APIView):
return Response(configs)
class FileTailApi(APIView):
class CeleryTaskLogApi(APIView):
permission_classes = (IsSuperUser,)
default_buff_size = 1024 * 10
buff_size = 1024 * 10
end = False
buff_size = None
def get(self, request, *args, **kwargs):
file_path = request.query_params.get("file")
self.buff_size = request.query_params.get('buffer') or self.default_buff_size
task_id = kwargs.get('pk')
info = cache.get(celery_task_pre_key + task_id, {})
log_path = info.get("log_path")
mark = request.query_params.get("mark") or str(uuid.uuid4())
if not os.path.isfile(file_path):
if not log_path or not os.path.isfile(log_path):
return Response({"data": _("Waiting ...")}, status=203)
with open(file_path, 'r') as f:
with open(log_path, 'r') as f:
offset = cache.get(mark, 0)
f.seek(offset)
data = f.read(self.buff_size).replace('\n', '\r\n')
mark = str(uuid.uuid4())
cache.set(mark, f.tell(), 5)
if FILE_END_GUARD in data:
data = data.replace(FILE_END_GUARD, '')
if data == '' and info["status"] == FINISHED:
self.end = True
return Response({"data": data, 'end': self.end, 'mark': mark})
return Response({"data": data, 'end': self.end, 'mark': mark})

View File

@ -1,8 +1,9 @@
# -*- coding: utf-8 -*-
#
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy as _
create_success_msg = _("<b>%(name)s</b> was created successfully")
update_success_msg = _("<b>%(name)s</b> was updated successfully")
FILE_END_GUARD = ">>> Content End <<<"
celery_task_pre_key = "CELERY_"

View File

@ -79,3 +79,12 @@ class Setting(models.Model):
class Meta:
db_table = "settings"
class CeleryTask(models.Model):
id = models.UUIDField()
name = models.CharField(max_length=1024)
status = models.CharField(max_length=128)
date_published = models.DateTimeField(auto_now_add=True)
date_start = models.DateTimeField(null=True)
date_finished = models.DateTimeField(null=True)

View File

@ -1,13 +1,13 @@
from django.core.mail import send_mail
from django.conf import settings
from .celery import app
from celery import shared_task
from .utils import get_logger
logger = get_logger(__file__)
@app.task
@shared_task
def send_mail_async(*args, **kwargs):
""" Using celery to send email async

View File

@ -35,7 +35,7 @@
var rowHeight = 1;
var colWidth = 1;
var mark = '';
var url = "{% url 'api-common:tail-file' %}?file={{ file_path }}";
var url = "{% url 'api-common:celery-task-log' pk=task_id %}";
var term;
var end = false;
var error = false;
@ -54,7 +54,7 @@
function requestAndWrite() {
if (!end) {
$.ajax({
url: url + '&mark=' + mark,
url: url + '?mark=' + mark,
method: "GET",
contentType: "application/json; charset=utf-8"
}).done(function(data, textStatue, jqXHR) {

View File

@ -10,5 +10,5 @@ urlpatterns = [
url(r'^v1/mail/testing/$', api.MailTestingAPI.as_view(), name='mail-testing'),
url(r'^v1/ldap/testing/$', api.LDAPTestingAPI.as_view(), name='ldap-testing'),
url(r'^v1/django-settings/$', api.DjangoSettingsAPI.as_view(), name='django-settings'),
url(r'^v1/tail-file/$', api.FileTailApi.as_view(), name='tail-file'),
url(r'^v1/celery/task/(?P<pk>[0-9a-zA-Z\-]{36})/log/$', api.CeleryTaskLogApi.as_view(), name='celery-task-log'),
]

View File

@ -12,6 +12,5 @@ urlpatterns = [
url(r'^ldap/$', views.LDAPSettingView.as_view(), name='ldap-setting'),
url(r'^terminal/$', views.TerminalSettingView.as_view(), name='terminal-setting'),
url(r'^tail-file/$', views.TailFileView.as_view(), name='tail-file'),
url(r'^celery/task/log/$', views.CeleryTaskLogView.as_view(), name='celery-task-log'),
url(r'^celery/task/(?P<pk>[0-9a-zA-Z\-]{36})/log/$', views.CeleryTaskLogView.as_view(), name='celery-task-log'),
]

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
#
import re
import sys
from collections import OrderedDict
from six import string_types
import base64
@ -360,3 +361,20 @@ def get_signer():
signer = Signer(settings.SECRET_KEY)
return signer
class TeeObj:
origin_stdout = sys.stdout
def __init__(self, file_obj):
self.file_obj = file_obj
def write(self, msg):
self.origin_stdout.write(msg)
self.file_obj.write(msg.replace('*', ''))
def flush(self):
self.origin_stdout.flush()
self.file_obj.flush()
def close(self):
self.file_obj.close()

View File

@ -1,6 +1,6 @@
from django.core.cache import cache
from django.views.generic import TemplateView, View
from django.views.generic import TemplateView, View, DetailView
from django.shortcuts import render, redirect, Http404, reverse
from django.contrib import messages
from django.utils.translation import ugettext as _
@ -8,7 +8,6 @@ from django.conf import settings
from .forms import EmailSettingForm, LDAPSettingForm, BasicSettingForm, \
TerminalSettingForm
from .models import Setting
from .mixins import AdminUserRequiredMixin
from .signals import ldap_auth_enable
@ -123,33 +122,18 @@ class TerminalSettingView(AdminUserRequiredMixin, TemplateView):
return render(request, self.template_name, context)
class TailFileView(AdminUserRequiredMixin, TemplateView):
template_name = 'common/tail_file.html'
def get_context_data(self, **kwargs):
file_path = self.request.GET.get("file")
context = super().get_context_data(**kwargs)
context.update({"file_path": file_path})
return context
class CeleryTaskLogView(AdminUserRequiredMixin, TemplateView):
template_name = 'common/tail_file.html'
template_name = 'common/celery_task_log.html'
task_log_path = None
def get(self, request, *args, **kwargs):
task = self.request.GET.get('task')
if not task:
raise Http404("Not found task")
self.task_log_path = cache.get(task)
if not self.task_log_path:
raise Http404("Not found task log file")
return super().get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
task_id = self.kwargs.get("pk")
if cache.get(celery_task_pre_key+task_id) is None:
raise Http404()
context.update({
'file_path': self.task_log_path
"task_id": self.kwargs.get("pk")
})
return context

View File

@ -1 +1 @@
from .celery import app as celery_app

View File

@ -0,0 +1,18 @@
# -*- coding: utf-8 -*-
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'jumpserver.settings')
from django.conf import settings
app = Celery('jumpserver')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks(lambda: [app_config.split('.')[0] for app_config in settings.INSTALLED_APPS])

3
apps/ops/celery/const.py Normal file
View File

@ -0,0 +1,3 @@
# -*- coding: utf-8 -*-
#

View File

@ -0,0 +1,89 @@
# -*- coding: utf-8 -*-
#
import os
import datetime
import sys
from django.conf import settings
from django.core.cache import cache
from celery import subtask
from celery.signals import worker_ready, worker_shutdown, task_prerun, \
task_postrun, after_task_publish
from django_celery_beat.models import PeriodicTask
from common.utils import get_logger, TeeObj
from common.const import celery_task_pre_key
from .utils import get_after_app_ready_tasks, get_after_app_shutdown_clean_tasks
logger = get_logger(__file__)
WAITING = "waiting"
RUNNING = "running"
FINISHED = "finished"
EXPIRE_TIME = 3600
@worker_ready.connect
def on_app_ready(sender=None, headers=None, body=None, **kwargs):
if cache.get("CELERY_APP_READY", 0) == 1:
return
cache.set("CELERY_APP_READY", 1, 10)
logger.debug("App ready signal recv")
tasks = get_after_app_ready_tasks()
logger.debug("Start need start task: [{}]".format(
", ".join(tasks))
)
for task in tasks:
subtask(task).delay()
@worker_shutdown.connect
def after_app_shutdown(sender=None, headers=None, body=None, **kwargs):
if cache.get("CELERY_APP_SHUTDOWN", 0) == 1:
return
cache.set("CELERY_APP_SHUTDOWN", 1, 10)
tasks = get_after_app_shutdown_clean_tasks()
logger.debug("App shutdown signal recv")
logger.debug("Clean need cleaned period tasks: [{}]".format(
', '.join(tasks))
)
PeriodicTask.objects.filter(name__in=tasks).delete()
@task_prerun.connect
def pre_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
task_key = celery_task_pre_key + task_id
info = cache.get(task_key, {})
now = datetime.datetime.now().strftime("%Y-%m-%d")
log_dir = os.path.join(settings.PROJECT_DIR, "data", "celery", now)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
log_path = os.path.join(log_dir, task_id + '.log')
info.update({"status": RUNNING, "log_path": log_path})
cache.set(task_key, info, EXPIRE_TIME)
f = open(log_path, 'w')
tee = TeeObj(f)
sys.stdout = tee
task.log_f = tee
@task_postrun.connect
def post_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
task_key = celery_task_pre_key + task_id
info = cache.get(task_key, {})
info.update({"status": FINISHED})
cache.set(task_key, info, EXPIRE_TIME)
task.log_f.flush()
sys.stdout = task.log_f.origin_stdout
task.log_f.close()
@after_task_publish.connect
def after_task_publish_signal_handler(sender, headers=None, **kwargs):
task_id = headers["id"]
key = celery_task_pre_key + task_id
cache.set(key, {"status": WAITING}, EXPIRE_TIME)

View File

@ -1,33 +1,50 @@
# ~*~ coding: utf-8 ~*~
import os
# -*- coding: utf-8 -*-
#
import json
from functools import wraps
from celery import Celery, subtask
from celery.signals import worker_ready, worker_shutdown, task_prerun, task_postrun
from django.db.utils import ProgrammingError, OperationalError
from .utils import get_logger
logger = get_logger(__file__)
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'jumpserver.settings')
from django.conf import settings
from django.core.cache import cache
from django_celery_beat.models import PeriodicTask, IntervalSchedule, CrontabSchedule
app = Celery('jumpserver')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks(lambda: [app_config.split('.')[0] for app_config in settings.INSTALLED_APPS])
def add_register_period_task(name):
key = "__REGISTER_PERIODIC_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_register_period_tasks():
key = "__REGISTER_PERIODIC_TASKS"
return cache.get(key, [])
def add_after_app_shutdown_clean_task(name):
key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_after_app_shutdown_clean_tasks():
key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
return cache.get(key, [])
def add_after_app_ready_task(name):
key = "__AFTER_APP_READY_RUN_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_after_app_ready_tasks():
key = "__AFTER_APP_READY_RUN_TASKS"
return cache.get(key, [])
def create_or_update_celery_periodic_tasks(tasks):
from django_celery_beat.models import PeriodicTask, IntervalSchedule, CrontabSchedule
"""
:param tasks: {
'add-every-monday-morning': {
@ -106,11 +123,6 @@ def delete_celery_periodic_task(task_name):
PeriodicTask.objects.filter(name=task_name).delete()
__REGISTER_PERIODIC_TASKS = []
__AFTER_APP_SHUTDOWN_CLEAN_TASKS = []
__AFTER_APP_READY_RUN_TASKS = []
def register_as_period_task(crontab=None, interval=None):
"""
Warning: Task must be have not any args and kwargs
@ -128,7 +140,7 @@ def register_as_period_task(crontab=None, interval=None):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in __REGISTER_PERIODIC_TASKS:
if name not in get_register_period_tasks():
create_or_update_celery_periodic_tasks({
name: {
'task': name,
@ -138,7 +150,7 @@ def register_as_period_task(crontab=None, interval=None):
'enabled': True,
}
})
__REGISTER_PERIODIC_TASKS.append(name)
add_register_period_task(name)
@wraps(func)
def wrapper(*args, **kwargs):
@ -151,13 +163,12 @@ def after_app_ready_start(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in __AFTER_APP_READY_RUN_TASKS:
__AFTER_APP_READY_RUN_TASKS.append(name)
if name not in get_after_app_ready_tasks():
add_after_app_ready_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
@ -165,50 +176,10 @@ def after_app_shutdown_clean(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in __AFTER_APP_READY_RUN_TASKS:
__AFTER_APP_SHUTDOWN_CLEAN_TASKS.append(name)
if name not in get_after_app_shutdown_clean_tasks():
add_after_app_shutdown_clean_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
@worker_ready.connect
def on_app_ready(sender=None, headers=None, body=None, **kwargs):
if cache.get("CELERY_APP_READY", 0) == 1:
return
cache.set("CELERY_APP_READY", 1, 10)
logger.debug("App ready signal recv")
logger.debug("Start need start task: [{}]".format(
", ".join(__AFTER_APP_READY_RUN_TASKS))
)
for task in __AFTER_APP_READY_RUN_TASKS:
subtask(task).delay()
@worker_shutdown.connect
def after_app_shutdown(sender=None, headers=None, body=None, **kwargs):
if cache.get("CELERY_APP_SHUTDOWN", 0) == 1:
return
cache.set("CELERY_APP_SHUTDOWN", 1, 10)
from django_celery_beat.models import PeriodicTask
logger.debug("App shutdown signal recv")
logger.debug("Clean need cleaned period tasks: [{}]".format(
', '.join(__AFTER_APP_SHUTDOWN_CLEAN_TASKS))
)
PeriodicTask.objects.filter(name__in=__AFTER_APP_SHUTDOWN_CLEAN_TASKS).delete()
@task_prerun.connect
def pre_run_task_signal_handler(sender, task, *args, **kwargs):
print("Sender: {}".format(sender))
print("Task: {}".format(task))
@task_postrun.connect
def post_run_task_signal_handler(sender, task, *args, **kwargs):
print("Sender: {}".format(sender))
print("Task: {}".format(task))

View File

@ -15,7 +15,7 @@ from django_celery_beat.models import CrontabSchedule, IntervalSchedule, \
PeriodicTask
from common.utils import get_signer, get_logger
from common.celery import delete_celery_periodic_task, \
from .celery.utils import delete_celery_periodic_task, \
create_or_update_celery_periodic_tasks, \
disable_celery_periodic_task
from .ansible import AdHocRunner, AnsibleError
@ -218,14 +218,12 @@ class AdHoc(models.Model):
hid = str(uuid.uuid4())
history = AdHocRunHistory(id=hid, adhoc=self, task=self.task)
time_start = time.time()
# f = open(history.log_path, 'w')
try:
date_start = timezone.now().strftime('%Y-%m-%d %H:%M:%S')
# f.write("{} {}\r\n\r\n".format(date_start, self.task.name))
date_start = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("{} Start task: {}\r\n".format(date_start, self.task.name))
raw, summary = self._run_only()
# raw, summary = self._run_only(file_obj=f)
date_end = timezone.now().strftime('%Y-%m-%d %H:%M:%S')
# f.write("\r\n{} Task finish\r\n".format(date_end))
date_end = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("\r\n{} Task finished".format(date_end))
history.is_finished = True
if summary.get('dark'):
history.is_success = False

View File

@ -113,7 +113,8 @@ $(document).ready(function() {
};
var success = function(data) {
var task_id = data.task;
window.location = "{% url 'ops:adhoc-history-output' pk=DEFAULT_PK %}".replace("{{ DEFAULT_PK }}", task_id);
var url = '{% url "common:celery-task-log" pk=DEFAULT_PK %}'.replace("{{ DEFAULT_PK }}", task_id);
window.open(url, '', 'width=800,height=800')
};
APIUpdateAttr({
url: the_url,

View File

@ -5,8 +5,6 @@ from django.core.cache import cache
from django.db.utils import ProgrammingError, OperationalError
from common.utils import get_logger
from common.celery import after_app_ready_start, register_as_period_task, \
after_app_shutdown_clean
from .const import ASSETS_CACHE_KEY, USERS_CACHE_KEY, SYSTEM_USER_CACHE_KEY
RUNNING = False

View File

@ -6,7 +6,7 @@ import datetime
from celery import shared_task
from django.utils import timezone
from common.celery import register_as_period_task, after_app_ready_start, \
from ops.celery.utils import register_as_period_task, after_app_ready_start, \
after_app_shutdown_clean
from .models import Status, Session

4
jms
View File

@ -155,7 +155,7 @@ def start_celery():
cmd = [
'celery', 'worker',
'-A', 'common',
'-A', 'ops',
'-l', LOG_LEVEL.lower(),
'--pidfile', pid_file,
'-c', str(WORKERS),
@ -182,7 +182,7 @@ def start_beat():
scheduler = "django_celery_beat.schedulers:DatabaseScheduler"
cmd = [
'celery', 'beat',
'-A', 'common',
'-A', 'ops',
'--pidfile', pid_file,
'-l', LOG_LEVEL,
'--scheduler', scheduler,