2023-12-05 11:09:17 +00:00
|
|
|
import json
|
|
|
|
import os
|
2023-12-18 09:56:16 +00:00
|
|
|
|
2024-02-26 10:04:21 +00:00
|
|
|
from celery.result import AsyncResult
|
2023-02-14 10:54:04 +00:00
|
|
|
from django.conf import settings
|
2023-09-21 07:39:46 +00:00
|
|
|
from django.db import transaction
|
2022-12-26 11:02:05 +00:00
|
|
|
from django.db.models import Count
|
2024-02-26 10:04:21 +00:00
|
|
|
from django.http import Http404
|
2022-12-07 12:13:26 +00:00
|
|
|
from django.shortcuts import get_object_or_404
|
2023-12-05 11:09:17 +00:00
|
|
|
from django.utils._os import safe_join
|
|
|
|
from django.utils.translation import gettext_lazy as _
|
|
|
|
from rest_framework.decorators import action
|
2022-12-06 11:43:32 +00:00
|
|
|
from rest_framework.response import Response
|
2023-05-09 09:18:52 +00:00
|
|
|
from rest_framework.views import APIView
|
2022-11-11 11:20:17 +00:00
|
|
|
|
2023-05-10 07:31:43 +00:00
|
|
|
from assets.models import Asset
|
2023-12-05 11:09:17 +00:00
|
|
|
from common.const.http import POST
|
2023-05-09 09:18:52 +00:00
|
|
|
from common.permissions import IsValidUser
|
2024-02-26 10:04:21 +00:00
|
|
|
from ops.celery import app
|
2023-02-09 09:00:54 +00:00
|
|
|
from ops.const import Types
|
2022-11-11 11:20:17 +00:00
|
|
|
from ops.models import Job, JobExecution
|
2024-03-18 06:19:15 +00:00
|
|
|
from ops.serializers.job import (
|
|
|
|
JobSerializer, JobExecutionSerializer, FileSerializer, JobTaskStopSerializer
|
|
|
|
)
|
2022-11-11 11:20:17 +00:00
|
|
|
|
2023-05-09 09:18:52 +00:00
|
|
|
__all__ = [
|
2024-03-18 09:32:25 +00:00
|
|
|
'JobViewSet', 'JobExecutionViewSet', 'JobRunVariableHelpAPIView', 'JobExecutionTaskDetail', 'UsernameHintsAPI'
|
2023-05-09 09:18:52 +00:00
|
|
|
]
|
2022-11-11 11:20:17 +00:00
|
|
|
|
2022-11-29 11:44:12 +00:00
|
|
|
from ops.tasks import run_ops_job_execution
|
2022-12-06 11:43:32 +00:00
|
|
|
from ops.variables import JMS_JOB_VARIABLE_HELP
|
2022-12-15 09:25:21 +00:00
|
|
|
from orgs.mixins.api import OrgBulkModelViewSet
|
2023-02-10 06:07:20 +00:00
|
|
|
from orgs.utils import tmp_to_org, get_current_org
|
2023-01-16 11:02:09 +00:00
|
|
|
from accounts.models import Account
|
2024-04-03 07:44:32 +00:00
|
|
|
from assets.const import Protocol
|
|
|
|
from perms.const import ActionChoices
|
|
|
|
from perms.utils.asset_perm import PermAssetDetailUtil
|
2023-05-10 07:31:43 +00:00
|
|
|
from perms.models import PermNode
|
|
|
|
from perms.utils import UserPermAssetUtil
|
2023-12-05 11:09:17 +00:00
|
|
|
from jumpserver.settings import get_file_md5
|
2022-11-11 11:20:17 +00:00
|
|
|
|
|
|
|
|
2023-09-15 03:25:02 +00:00
|
|
|
def set_task_to_serializer_data(serializer, task_id):
|
2022-11-29 11:44:12 +00:00
|
|
|
data = getattr(serializer, "_data", {})
|
2023-09-15 03:25:02 +00:00
|
|
|
data["task_id"] = task_id
|
2022-11-29 11:44:12 +00:00
|
|
|
setattr(serializer, "_data", data)
|
|
|
|
|
|
|
|
|
2023-05-10 07:31:43 +00:00
|
|
|
def merge_nodes_and_assets(nodes, assets, user):
|
2024-03-18 06:19:15 +00:00
|
|
|
if not nodes:
|
|
|
|
return assets
|
|
|
|
perm_util = UserPermAssetUtil(user=user)
|
|
|
|
for node_id in nodes:
|
|
|
|
if node_id == PermNode.FAVORITE_NODE_KEY:
|
|
|
|
node_assets = perm_util.get_favorite_assets()
|
|
|
|
elif node_id == PermNode.UNGROUPED_NODE_KEY:
|
|
|
|
node_assets = perm_util.get_ungroup_assets()
|
|
|
|
else:
|
|
|
|
_, node_assets = perm_util.get_node_all_assets(node_id)
|
|
|
|
assets.extend(node_assets.exclude(id__in=[asset.id for asset in assets]))
|
2023-05-10 07:31:43 +00:00
|
|
|
return assets
|
|
|
|
|
|
|
|
|
2022-12-15 09:25:21 +00:00
|
|
|
class JobViewSet(OrgBulkModelViewSet):
|
2022-11-11 11:20:17 +00:00
|
|
|
serializer_class = JobSerializer
|
2023-02-09 09:44:35 +00:00
|
|
|
search_fields = ('name', 'comment')
|
2022-12-06 09:30:54 +00:00
|
|
|
model = Job
|
2022-11-11 11:20:17 +00:00
|
|
|
|
2023-02-14 10:54:04 +00:00
|
|
|
def check_permissions(self, request):
|
2024-03-18 07:49:13 +00:00
|
|
|
# job: upload_file
|
|
|
|
if self.action == 'upload' or request.data.get('type') == Types.upload_file:
|
|
|
|
return super().check_permissions(request)
|
|
|
|
# job: adhoc, playbook
|
2023-02-14 10:54:04 +00:00
|
|
|
if not settings.SECURITY_COMMAND_EXECUTION:
|
|
|
|
return self.permission_denied(request, "Command execution disabled")
|
|
|
|
return super().check_permissions(request)
|
|
|
|
|
2024-04-03 07:44:32 +00:00
|
|
|
def check_upload_permission(self, assets, account_name):
|
|
|
|
protocols_required = {Protocol.ssh, Protocol.sftp, Protocol.winrm}
|
|
|
|
error_msg_missing_protocol = _(
|
|
|
|
"Asset ({asset}) must have at least one of the following protocols added: SSH, SFTP, or WinRM")
|
|
|
|
error_msg_auth_missing_protocol = _("Asset ({asset}) authorization is missing SSH, SFTP, or WinRM protocol")
|
|
|
|
error_msg_auth_missing_upload = _("Asset ({asset}) authorization lacks upload permissions")
|
|
|
|
for asset in assets:
|
|
|
|
protocols = asset.protocols.values_list("name", flat=True)
|
|
|
|
if not set(protocols).intersection(protocols_required):
|
|
|
|
self.permission_denied(self.request, error_msg_missing_protocol.format(asset=asset.name))
|
|
|
|
util = PermAssetDetailUtil(self.request.user, asset)
|
|
|
|
if not util.check_perm_protocols(protocols_required):
|
|
|
|
self.permission_denied(self.request, error_msg_auth_missing_protocol.format(asset=asset.name))
|
|
|
|
if not util.check_perm_actions(account_name, [ActionChoices.upload.value]):
|
|
|
|
self.permission_denied(self.request, error_msg_auth_missing_upload.format(asset=asset.name))
|
|
|
|
|
2022-11-11 11:20:17 +00:00
|
|
|
def get_queryset(self):
|
2022-12-15 09:25:21 +00:00
|
|
|
queryset = super().get_queryset()
|
2024-03-18 06:19:15 +00:00
|
|
|
queryset = queryset \
|
|
|
|
.filter(creator=self.request.user) \
|
|
|
|
.exclude(type=Types.upload_file)
|
|
|
|
|
|
|
|
# Job 列表不显示 adhoc, retrieve 要取状态
|
2022-11-25 19:13:06 +00:00
|
|
|
if self.action != 'retrieve':
|
2022-12-15 09:25:21 +00:00
|
|
|
return queryset.filter(instant=False)
|
|
|
|
return queryset
|
2022-11-11 11:20:17 +00:00
|
|
|
|
|
|
|
def perform_create(self, serializer):
|
2022-12-21 09:14:07 +00:00
|
|
|
run_after_save = serializer.validated_data.pop('run_after_save', False)
|
2023-05-10 07:31:43 +00:00
|
|
|
node_ids = serializer.validated_data.pop('nodes', [])
|
2024-03-18 06:19:15 +00:00
|
|
|
assets = serializer.validated_data.get('assets')
|
2023-05-10 07:31:43 +00:00
|
|
|
assets = merge_nodes_and_assets(node_ids, assets, self.request.user)
|
2024-03-18 06:19:15 +00:00
|
|
|
serializer.validated_data['assets'] = assets
|
2024-04-03 07:44:32 +00:00
|
|
|
if serializer.validated_data.get('type') == Types.upload_file:
|
|
|
|
account_name = serializer.validated_data.get('runas')
|
|
|
|
self.check_upload_permission(assets, account_name)
|
2022-11-11 11:20:17 +00:00
|
|
|
instance = serializer.save()
|
2024-03-18 06:19:15 +00:00
|
|
|
|
2022-12-02 04:21:56 +00:00
|
|
|
if instance.instant or run_after_save:
|
|
|
|
self.run_job(instance, serializer)
|
|
|
|
|
|
|
|
def perform_update(self, serializer):
|
2022-12-21 09:14:07 +00:00
|
|
|
run_after_save = serializer.validated_data.pop('run_after_save', False)
|
2022-12-02 04:21:56 +00:00
|
|
|
instance = serializer.save()
|
|
|
|
if run_after_save:
|
|
|
|
self.run_job(instance, serializer)
|
|
|
|
|
2022-12-21 09:14:07 +00:00
|
|
|
def run_job(self, job, serializer):
|
2022-12-02 04:21:56 +00:00
|
|
|
execution = job.create_execution()
|
2022-12-21 09:14:07 +00:00
|
|
|
execution.creator = self.request.user
|
|
|
|
execution.save()
|
2023-09-15 03:25:02 +00:00
|
|
|
|
|
|
|
set_task_to_serializer_data(serializer, execution.id)
|
2023-09-21 07:39:46 +00:00
|
|
|
transaction.on_commit(
|
2024-03-18 06:19:15 +00:00
|
|
|
lambda: run_ops_job_execution.apply_async(
|
|
|
|
(str(execution.id),), task_id=str(execution.id)
|
|
|
|
)
|
|
|
|
)
|
2022-11-11 11:20:17 +00:00
|
|
|
|
2023-12-11 10:06:22 +00:00
|
|
|
@staticmethod
|
2023-12-12 11:15:47 +00:00
|
|
|
def get_duplicates_files(files):
|
2023-12-12 02:32:42 +00:00
|
|
|
seen = set()
|
|
|
|
duplicates = set()
|
2023-12-11 10:06:22 +00:00
|
|
|
for file in files:
|
2023-12-12 02:32:42 +00:00
|
|
|
if file in seen:
|
|
|
|
duplicates.add(file)
|
|
|
|
else:
|
|
|
|
seen.add(file)
|
|
|
|
return list(duplicates)
|
2023-12-11 10:06:22 +00:00
|
|
|
|
2023-12-12 11:15:47 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_exceeds_limit_files(files):
|
|
|
|
exceeds_limit_files = []
|
|
|
|
for file in files:
|
|
|
|
if file.size > settings.FILE_UPLOAD_SIZE_LIMIT_MB * 1024 * 1024:
|
|
|
|
exceeds_limit_files.append(file)
|
|
|
|
return exceeds_limit_files
|
|
|
|
|
2024-03-18 06:19:15 +00:00
|
|
|
@action(methods=[POST], detail=False, serializer_class=FileSerializer,
|
|
|
|
permission_classes=[IsValidUser, ], url_path='upload')
|
2023-12-05 11:09:17 +00:00
|
|
|
def upload(self, request, *args, **kwargs):
|
2023-12-11 10:06:22 +00:00
|
|
|
uploaded_files = request.FILES.getlist('files')
|
2023-12-05 11:09:17 +00:00
|
|
|
serializer = self.get_serializer(data=request.data)
|
2023-12-11 10:06:22 +00:00
|
|
|
|
2023-12-05 11:09:17 +00:00
|
|
|
if not serializer.is_valid():
|
|
|
|
msg = 'Upload data invalid: {}'.format(serializer.errors)
|
2023-12-12 11:15:47 +00:00
|
|
|
return Response({'error': msg}, status=400)
|
|
|
|
|
|
|
|
same_files = self.get_duplicates_files(uploaded_files)
|
|
|
|
if same_files:
|
|
|
|
return Response({'error': _("Duplicate file exists")}, status=400)
|
|
|
|
|
|
|
|
exceeds_limit_files = self.get_exceeds_limit_files(uploaded_files)
|
|
|
|
if exceeds_limit_files:
|
|
|
|
return Response(
|
|
|
|
{'error': _("File size exceeds maximum limit. Please select a file smaller than {limit}MB").format(
|
|
|
|
limit=settings.FILE_UPLOAD_SIZE_LIMIT_MB)},
|
|
|
|
status=400)
|
2023-12-11 10:06:22 +00:00
|
|
|
|
2023-12-05 11:09:17 +00:00
|
|
|
job_id = request.data.get('job_id', '')
|
2024-03-18 03:00:42 +00:00
|
|
|
job = get_object_or_404(Job, pk=job_id, creator=request.user)
|
2023-12-05 11:09:17 +00:00
|
|
|
job_args = json.loads(job.args)
|
|
|
|
src_path_info = []
|
2024-04-11 10:19:07 +00:00
|
|
|
upload_file_dir = safe_join(settings.SHARE_DIR, 'job_upload_file', job_id)
|
2023-12-05 11:09:17 +00:00
|
|
|
for uploaded_file in uploaded_files:
|
|
|
|
filename = uploaded_file.name
|
2023-12-11 10:06:22 +00:00
|
|
|
saved_path = safe_join(upload_file_dir, f'{filename}')
|
2023-12-05 11:09:17 +00:00
|
|
|
os.makedirs(os.path.dirname(saved_path), exist_ok=True)
|
|
|
|
with open(saved_path, 'wb+') as destination:
|
|
|
|
for chunk in uploaded_file.chunks():
|
|
|
|
destination.write(chunk)
|
|
|
|
src_path_info.append({'filename': filename, 'md5': get_file_md5(saved_path)})
|
|
|
|
job_args['src_path_info'] = src_path_info
|
|
|
|
job.args = json.dumps(job_args)
|
|
|
|
job.save()
|
|
|
|
self.run_job(job, serializer)
|
|
|
|
return Response({'task_id': serializer.data.get('task_id')}, status=201)
|
|
|
|
|
2022-11-11 11:20:17 +00:00
|
|
|
|
2022-12-15 09:25:21 +00:00
|
|
|
class JobExecutionViewSet(OrgBulkModelViewSet):
|
2022-11-11 11:20:17 +00:00
|
|
|
serializer_class = JobExecutionSerializer
|
|
|
|
http_method_names = ('get', 'post', 'head', 'options',)
|
2022-12-06 09:30:54 +00:00
|
|
|
model = JobExecution
|
2023-02-09 09:44:35 +00:00
|
|
|
search_fields = ('material',)
|
2023-05-09 09:23:21 +00:00
|
|
|
filterset_fields = ['status', 'job_id']
|
2022-11-11 11:20:17 +00:00
|
|
|
|
2023-09-15 03:25:02 +00:00
|
|
|
@staticmethod
|
|
|
|
def start_deploy(instance, serializer):
|
2023-12-18 09:56:16 +00:00
|
|
|
run_ops_job_execution.apply_async((str(instance.id),), task_id=str(instance.id))
|
2023-09-15 03:25:02 +00:00
|
|
|
|
2022-11-11 11:20:17 +00:00
|
|
|
def perform_create(self, serializer):
|
|
|
|
instance = serializer.save()
|
2022-12-20 11:46:19 +00:00
|
|
|
instance.job_version = instance.job.version
|
2023-02-09 09:00:54 +00:00
|
|
|
instance.material = instance.job.material
|
2023-02-10 10:37:53 +00:00
|
|
|
instance.job_type = Types[instance.job.type].value
|
2022-12-21 09:14:07 +00:00
|
|
|
instance.creator = self.request.user
|
2022-12-20 11:46:19 +00:00
|
|
|
instance.save()
|
2023-09-15 03:25:02 +00:00
|
|
|
|
|
|
|
set_task_to_serializer_data(serializer, instance.id)
|
2023-09-21 07:39:46 +00:00
|
|
|
transaction.on_commit(
|
2023-12-18 09:56:16 +00:00
|
|
|
lambda: run_ops_job_execution.apply_async((str(instance.id),), task_id=str(instance.id))
|
|
|
|
)
|
2022-11-11 11:20:17 +00:00
|
|
|
|
|
|
|
def get_queryset(self):
|
2022-12-15 09:25:21 +00:00
|
|
|
queryset = super().get_queryset()
|
|
|
|
queryset = queryset.filter(creator=self.request.user)
|
|
|
|
return queryset
|
2022-12-06 11:43:32 +00:00
|
|
|
|
2024-02-19 06:47:32 +00:00
|
|
|
@action(methods=[POST], detail=False, serializer_class=JobTaskStopSerializer, permission_classes=[IsValidUser, ],
|
|
|
|
url_path='stop')
|
|
|
|
def stop(self, request, *args, **kwargs):
|
|
|
|
serializer = self.get_serializer(data=request.data)
|
|
|
|
if not serializer.is_valid():
|
|
|
|
return Response({'error': serializer.errors}, status=400)
|
|
|
|
task_id = serializer.validated_data['task_id']
|
2024-02-26 10:04:21 +00:00
|
|
|
try:
|
2024-03-13 10:18:24 +00:00
|
|
|
instance = get_object_or_404(JobExecution, pk=task_id, creator=request.user)
|
2024-02-26 10:04:21 +00:00
|
|
|
except Http404:
|
|
|
|
return Response(
|
|
|
|
{'error': _('The task is being created and cannot be interrupted. Please try again later.')},
|
|
|
|
status=400
|
|
|
|
)
|
2024-04-11 03:41:07 +00:00
|
|
|
try:
|
|
|
|
task = AsyncResult(task_id, app=app)
|
|
|
|
inspect = app.control.inspect()
|
|
|
|
|
|
|
|
for worker in inspect.registered().keys():
|
|
|
|
if not worker.startswith('ansible'):
|
|
|
|
continue
|
|
|
|
if task_id not in [at['id'] for at in inspect.active().get(worker, [])]:
|
|
|
|
# 在队列中未执行使用revoke执行
|
|
|
|
task.revoke(terminate=True)
|
|
|
|
instance.set_error('Job stop by "revoke task {}"'.format(task_id))
|
|
|
|
return Response({'task_id': task_id}, status=200)
|
|
|
|
except Exception as e:
|
|
|
|
instance.set_error(str(e))
|
|
|
|
return Response({'error': f'Error while stopping the task {task_id}: {e}'}, status=400)
|
2024-02-26 10:04:21 +00:00
|
|
|
|
2024-02-19 06:47:32 +00:00
|
|
|
instance.stop()
|
|
|
|
return Response({'task_id': task_id}, status=200)
|
|
|
|
|
2022-12-06 11:43:32 +00:00
|
|
|
|
2022-12-23 10:23:04 +00:00
|
|
|
class JobExecutionTaskDetail(APIView):
|
2023-05-09 09:18:52 +00:00
|
|
|
rbac_perms = {
|
2023-05-10 07:31:43 +00:00
|
|
|
'GET': ['ops.view_jobexecution'],
|
2023-05-09 09:18:52 +00:00
|
|
|
}
|
2022-12-23 10:23:04 +00:00
|
|
|
|
|
|
|
def get(self, request, **kwargs):
|
|
|
|
org = get_current_org()
|
2023-01-16 11:02:09 +00:00
|
|
|
task_id = str(kwargs.get('task_id'))
|
2023-05-09 09:18:52 +00:00
|
|
|
|
|
|
|
with tmp_to_org(org):
|
2024-03-18 03:00:42 +00:00
|
|
|
execution = get_object_or_404(JobExecution, pk=task_id, creator=request.user)
|
2023-05-09 09:18:52 +00:00
|
|
|
|
|
|
|
return Response(data={
|
|
|
|
'status': execution.status,
|
|
|
|
'is_finished': execution.is_finished,
|
|
|
|
'is_success': execution.is_success,
|
|
|
|
'time_cost': execution.time_cost,
|
|
|
|
'job_id': execution.job.id,
|
2023-12-22 08:51:46 +00:00
|
|
|
'summary': execution.summary
|
2023-05-09 09:18:52 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
class JobRunVariableHelpAPIView(APIView):
|
|
|
|
permission_classes = [IsValidUser]
|
|
|
|
|
|
|
|
def get(self, request, **kwargs):
|
|
|
|
return Response(data=JMS_JOB_VARIABLE_HELP)
|
2022-12-26 11:02:05 +00:00
|
|
|
|
|
|
|
|
2023-05-10 07:31:43 +00:00
|
|
|
class UsernameHintsAPI(APIView):
|
2023-05-09 09:18:52 +00:00
|
|
|
permission_classes = [IsValidUser]
|
2022-12-26 11:02:05 +00:00
|
|
|
|
2023-05-10 07:31:43 +00:00
|
|
|
def post(self, request, **kwargs):
|
|
|
|
node_ids = request.data.get('nodes', None)
|
|
|
|
asset_ids = request.data.get('assets', [])
|
2023-05-17 08:16:34 +00:00
|
|
|
query = request.data.get('query', None)
|
|
|
|
|
2023-05-10 07:31:43 +00:00
|
|
|
assets = list(Asset.objects.filter(id__in=asset_ids).all())
|
|
|
|
|
|
|
|
assets = merge_nodes_and_assets(node_ids, assets, request.user)
|
|
|
|
|
2023-05-17 08:16:34 +00:00
|
|
|
top_accounts = Account.objects \
|
2023-05-09 09:18:52 +00:00
|
|
|
.exclude(username__startswith='jms_') \
|
2023-05-19 07:06:23 +00:00
|
|
|
.exclude(username__startswith='js_') \
|
2023-05-17 08:16:34 +00:00
|
|
|
.filter(username__icontains=query) \
|
2023-05-10 07:31:43 +00:00
|
|
|
.filter(asset__in=assets) \
|
2023-05-09 09:18:52 +00:00
|
|
|
.values('username') \
|
2024-04-19 09:02:50 +00:00
|
|
|
.annotate(total=Count('username')) \
|
|
|
|
.order_by('-total', '-username')[:10]
|
2022-12-26 11:02:05 +00:00
|
|
|
return Response(data=top_accounts)
|