mirror of https://github.com/jumpserver/jumpserver
perf: update change status
parent
528c333a6d
commit
fc3fc40341
|
@ -9,14 +9,22 @@ from rest_framework.response import Response
|
|||
|
||||
from accounts import serializers
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.models import CheckAccountAutomation, AccountRisk, RiskChoice, CheckAccountEngine
|
||||
from accounts.models import (
|
||||
CheckAccountAutomation,
|
||||
AccountRisk,
|
||||
RiskChoice,
|
||||
CheckAccountEngine,
|
||||
)
|
||||
from common.api import JMSModelViewSet
|
||||
from common.utils import many_get
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from .base import AutomationExecutionViewSet
|
||||
|
||||
__all__ = [
|
||||
'CheckAccountAutomationViewSet', 'CheckAccountExecutionViewSet',
|
||||
'AccountRiskViewSet', 'CheckAccountEngineViewSet',
|
||||
"CheckAccountAutomationViewSet",
|
||||
"CheckAccountExecutionViewSet",
|
||||
"AccountRiskViewSet",
|
||||
"CheckAccountEngineViewSet",
|
||||
]
|
||||
|
||||
from ...risk_handlers import RiskHandler
|
||||
|
@ -24,7 +32,7 @@ from ...risk_handlers import RiskHandler
|
|||
|
||||
class CheckAccountAutomationViewSet(OrgBulkModelViewSet):
|
||||
model = CheckAccountAutomation
|
||||
filterset_fields = ('name',)
|
||||
filterset_fields = ("name",)
|
||||
search_fields = filterset_fields
|
||||
serializer_class = serializers.CheckAccountAutomationSerializer
|
||||
|
||||
|
@ -36,7 +44,7 @@ class CheckAccountExecutionViewSet(AutomationExecutionViewSet):
|
|||
("create", "accounts.add_checkaccountexecution"),
|
||||
("report", "accounts.view_checkaccountsexecution"),
|
||||
)
|
||||
ordering = ('-date_created',)
|
||||
ordering = ("-date_created",)
|
||||
tp = AutomationTypes.check_account
|
||||
|
||||
def get_queryset(self):
|
||||
|
@ -47,61 +55,86 @@ class CheckAccountExecutionViewSet(AutomationExecutionViewSet):
|
|||
|
||||
class AccountRiskViewSet(OrgBulkModelViewSet):
|
||||
model = AccountRisk
|
||||
search_fields = ('username', 'asset')
|
||||
filterset_fields = ('risk', 'status', 'asset')
|
||||
search_fields = ("username", "asset")
|
||||
filterset_fields = ("risk", "status", "asset")
|
||||
serializer_classes = {
|
||||
'default': serializers.AccountRiskSerializer,
|
||||
'assets': serializers.AssetRiskSerializer,
|
||||
'handle': serializers.HandleRiskSerializer
|
||||
"default": serializers.AccountRiskSerializer,
|
||||
"assets": serializers.AssetRiskSerializer,
|
||||
"handle": serializers.HandleRiskSerializer,
|
||||
}
|
||||
ordering_fields = (
|
||||
'asset', 'risk', 'status', 'username', 'date_created'
|
||||
)
|
||||
ordering = ('-asset', 'date_created')
|
||||
ordering_fields = ("asset", "risk", "status", "username", "date_created")
|
||||
ordering = ("status", "asset", "date_created")
|
||||
rbac_perms = {
|
||||
'sync_accounts': 'assets.add_accountrisk',
|
||||
'assets': 'accounts.view_accountrisk',
|
||||
'handle': 'accounts.change_accountrisk'
|
||||
"sync_accounts": "assets.add_accountrisk",
|
||||
"assets": "accounts.view_accountrisk",
|
||||
"handle": "accounts.change_accountrisk",
|
||||
}
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
raise MethodNotAllowed('PUT')
|
||||
raise MethodNotAllowed("PUT")
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
raise MethodNotAllowed('POST')
|
||||
raise MethodNotAllowed("POST")
|
||||
|
||||
@action(methods=['get'], detail=False, url_path='assets')
|
||||
@action(methods=["get"], detail=False, url_path="assets")
|
||||
def assets(self, request, *args, **kwargs):
|
||||
annotations = {
|
||||
f'{risk[0]}_count': Count('id', filter=Q(risk=risk[0]))
|
||||
f"{risk[0]}_count": Count("id", filter=Q(risk=risk[0]))
|
||||
for risk in RiskChoice.choices
|
||||
}
|
||||
queryset = (
|
||||
AccountRisk.objects
|
||||
.select_related('asset', 'asset__platform') # 使用 select_related 来优化 asset 和 asset__platform 的查询
|
||||
.values('asset__id', 'asset__name', 'asset__address', 'asset__platform__name') # 添加需要的字段
|
||||
.annotate(risk_total=Count('id')) # 计算风险总数
|
||||
AccountRisk.objects.select_related(
|
||||
"asset", "asset__platform"
|
||||
) # 使用 select_related 来优化 asset 和 asset__platform 的查询
|
||||
.values(
|
||||
"asset__id", "asset__name", "asset__address", "asset__platform__name"
|
||||
) # 添加需要的字段
|
||||
.annotate(risk_total=Count("id")) # 计算风险总数
|
||||
.annotate(**annotations) # 使用上面定义的 annotations 进行计数
|
||||
)
|
||||
return self.get_paginated_response_from_queryset(queryset)
|
||||
|
||||
@action(methods=['post'], detail=False, url_path='handle')
|
||||
@action(methods=["post"], detail=False, url_path="handle")
|
||||
def handle(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
s = self.get_serializer(data=request.data)
|
||||
s.is_valid(raise_exception=True)
|
||||
|
||||
asset, username, act, risk = itemgetter('asset', 'username', 'action', 'risk')(serializer.validated_data)
|
||||
handler = RiskHandler(asset=asset, username=username)
|
||||
asset, username, act, risk = many_get(s.validated_data, ("asset", "username", "action", "risk"))
|
||||
handler = RiskHandler(asset=asset, username=username, request=self.request)
|
||||
data = handler.handle(act, risk)
|
||||
if not data:
|
||||
data = {'message': 'Success'}
|
||||
data = {"message": "Success"}
|
||||
return Response(data)
|
||||
|
||||
|
||||
class CheckAccountEngineViewSet(JMSModelViewSet):
|
||||
search_fields = ('name',)
|
||||
search_fields = ("name",)
|
||||
serializer_class = serializers.CheckAccountEngineSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
return CheckAccountEngine.objects.all()
|
||||
@staticmethod
|
||||
def init_if_need():
|
||||
data = [
|
||||
{
|
||||
"id": "00000000-0000-0000-0000-000000000001",
|
||||
"slug": "check_gathered_account",
|
||||
"name": "检查发现的账号",
|
||||
"comment": "基于自动发现的账号结果进行检查分析,检查 用户组、公钥、sudoers 等信息",
|
||||
},
|
||||
{
|
||||
"id": "00000000-0000-0000-0000-000000000002",
|
||||
"slug": "check_account_secret",
|
||||
"name": "检查账号密码强弱",
|
||||
"comment": "基于账号密码的安全性进行检查分析, 检查密码强度、泄露等信息",
|
||||
},
|
||||
]
|
||||
|
||||
model_cls = CheckAccountEngine
|
||||
if model_cls.objects.all().count() == 2:
|
||||
return
|
||||
|
||||
for item in data:
|
||||
model_cls.objects.create(**item)
|
||||
|
||||
def get_queryset(self):
|
||||
self.init_if_need()
|
||||
return CheckAccountEngine.objects.all()
|
||||
|
|
|
@ -80,7 +80,7 @@ class GatheredAccountViewSet(OrgBulkModelViewSet):
|
|||
asset_id = request.data.get("asset_id")
|
||||
username = request.data.get("username")
|
||||
asset = get_object_or_404(Asset, pk=asset_id)
|
||||
handler = RiskHandler(asset, username)
|
||||
handler = RiskHandler(asset, username, request=self.request)
|
||||
handler.handle_delete_remote()
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
|
|
|
@ -90,14 +90,14 @@ def check_account_secrets(accounts, assets):
|
|||
origin_risk = origin_risks_dict.get(key)
|
||||
|
||||
if origin_risk:
|
||||
origin_risk.details.append({"datetime": now})
|
||||
origin_risk.details.append({"datetime": now, 'type': 'refind'})
|
||||
update_risk(origin_risk)
|
||||
else:
|
||||
create_risk({
|
||||
"asset": d["account"].asset,
|
||||
"username": d["account"].username,
|
||||
"risk": d["risk"],
|
||||
"details": [{"datetime": now}],
|
||||
"details": [{"datetime": now, 'type': 'init'}],
|
||||
})
|
||||
return summary, result
|
||||
|
||||
|
|
|
@ -144,20 +144,21 @@ class AnalyseAccountRisk:
|
|||
def _update_risk(self, account):
|
||||
return account
|
||||
|
||||
def analyse_risk(self, asset, ori_account, d):
|
||||
def analyse_risk(self, asset, ori_account, d, sys_found):
|
||||
if not self.check_risk:
|
||||
return
|
||||
|
||||
basic = {"asset": asset, "username": d["username"]}
|
||||
if ori_account:
|
||||
self._analyse_item_changed(ori_account, d)
|
||||
else:
|
||||
elif not sys_found:
|
||||
self._create_risk(
|
||||
dict(
|
||||
**basic, risk="new_found", details=[{"datetime": self.now.isoformat()}]
|
||||
**basic,
|
||||
risk="new_found",
|
||||
details=[{"datetime": self.now.isoformat()}],
|
||||
)
|
||||
)
|
||||
|
||||
self._analyse_datetime_changed(ori_account, d, asset, d["username"])
|
||||
|
||||
|
||||
|
@ -227,9 +228,8 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
for asset_id, username in accounts:
|
||||
self.ori_asset_usernames[str(asset_id)].add(username)
|
||||
|
||||
ga_accounts = (
|
||||
GatheredAccount.objects.filter(asset__in=assets)
|
||||
.prefetch_related("asset")
|
||||
ga_accounts = GatheredAccount.objects.filter(asset__in=assets).prefetch_related(
|
||||
"asset"
|
||||
)
|
||||
for account in ga_accounts:
|
||||
self.ori_gathered_usernames[str(account.asset_id)].add(account.username)
|
||||
|
@ -345,6 +345,7 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
risk_analyser = AnalyseAccountRisk(self.check_risk)
|
||||
|
||||
for asset, accounts_data in self.asset_account_info.items():
|
||||
ori_users = self.ori_asset_usernames[str(asset.id)]
|
||||
with tmp_to_org(asset.org_id):
|
||||
gathered_accounts = []
|
||||
for d in accounts_data:
|
||||
|
@ -357,7 +358,8 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
self.create_gathered_account(d)
|
||||
else:
|
||||
self.update_gathered_account(ori_account, d)
|
||||
risk_analyser.analyse_risk(asset, ori_account, d)
|
||||
ori_found = username in ori_users
|
||||
risk_analyser.analyse_risk(asset, ori_account, d, ori_found)
|
||||
|
||||
self.create_gathered_account.finish()
|
||||
self.update_gathered_account.finish()
|
||||
|
|
|
@ -5,7 +5,7 @@ from copy import deepcopy
|
|||
from django.db.models import QuerySet
|
||||
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.models import Account
|
||||
from accounts.models import Account, GatheredAccount, AccountRisk
|
||||
from common.utils import get_logger
|
||||
from ..base.manager import AccountBasePlaybookManager
|
||||
|
||||
|
@ -13,59 +13,72 @@ logger = get_logger(__name__)
|
|||
|
||||
|
||||
class RemoveAccountManager(AccountBasePlaybookManager):
|
||||
super_accounts = ['root', 'administrator']
|
||||
super_accounts = ["root", "administrator"]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.host_account_mapper = dict()
|
||||
self.host_accounts = defaultdict(list)
|
||||
snapshot_account = self.execution.snapshot.get('accounts', [])
|
||||
snapshot_account = self.execution.snapshot.get("accounts", [])
|
||||
self.snapshot_asset_account_map = defaultdict(list)
|
||||
for account in snapshot_account:
|
||||
self.snapshot_asset_account_map[str(account['asset'])].append(account)
|
||||
self.snapshot_asset_account_map[str(account["asset"])].append(account)
|
||||
|
||||
def prepare_runtime_dir(self):
|
||||
path = super().prepare_runtime_dir()
|
||||
ansible_config_path = os.path.join(path, 'ansible.cfg')
|
||||
ansible_config_path = os.path.join(path, "ansible.cfg")
|
||||
|
||||
with open(ansible_config_path, 'w') as f:
|
||||
f.write('[ssh_connection]\n')
|
||||
f.write('ssh_args = -o ControlMaster=no -o ControlPersist=no\n')
|
||||
with open(ansible_config_path, "w") as f:
|
||||
f.write("[ssh_connection]\n")
|
||||
f.write("ssh_args = -o ControlMaster=no -o ControlPersist=no\n")
|
||||
return path
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
return AutomationTypes.remove_account
|
||||
|
||||
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
||||
if host.get('error'):
|
||||
def host_callback(
|
||||
self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs
|
||||
):
|
||||
if host.get("error"):
|
||||
return host
|
||||
|
||||
inventory_hosts = []
|
||||
accounts_to_remove = self.snapshot_asset_account_map.get(str(asset.id), [])
|
||||
|
||||
for account in accounts_to_remove:
|
||||
username = account.get('username')
|
||||
username = account.get("username")
|
||||
if not username or username.lower() in self.super_accounts:
|
||||
print("Super account can not be remove: ", username)
|
||||
continue
|
||||
h = deepcopy(host)
|
||||
h['name'] += '(' + username + ')'
|
||||
self.host_account_mapper[h['name']] = account
|
||||
h['account'] = {'username': username}
|
||||
h["name"] += "(" + username + ")"
|
||||
self.host_account_mapper[h["name"]] = account
|
||||
h["account"] = {"username": username}
|
||||
inventory_hosts.append(h)
|
||||
return inventory_hosts
|
||||
|
||||
def on_host_success(self, host, result):
|
||||
tuple_asset_gather_account = self.host_account_mapper.get(host)
|
||||
if not tuple_asset_gather_account:
|
||||
super().on_host_success(host, result)
|
||||
account = self.host_account_mapper.get(host)
|
||||
|
||||
if not account:
|
||||
return
|
||||
asset, gather_account = tuple_asset_gather_account
|
||||
|
||||
try:
|
||||
Account.objects.filter(
|
||||
asset_id=asset.id,
|
||||
username=gather_account.username
|
||||
asset_id=account["asset"], username=account["username"]
|
||||
).delete()
|
||||
gather_account.delete()
|
||||
GatheredAccount.objects.filter(
|
||||
asset_id=account["asset"], username=account["username"]
|
||||
).delete()
|
||||
risk = AccountRisk.objects.filter(
|
||||
asset_id=account["asset"],
|
||||
username=account["username"],
|
||||
risk__in=["new_found"],
|
||||
)
|
||||
print("Account removed: ", account)
|
||||
except Exception as e:
|
||||
print(f'\033[31m Delete account {gather_account.username} failed: {e} \033[0m\n')
|
||||
logger.error(
|
||||
f"Failed to delete account {account['username']} on asset {account['asset']}: {e}"
|
||||
)
|
||||
|
|
|
@ -3,6 +3,7 @@ from itertools import islice
|
|||
from django.db import models
|
||||
from django.db.models import TextChoices
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils import timezone
|
||||
|
||||
from common.const import ConfirmOrIgnore
|
||||
from common.db.models import JMSBaseModel
|
||||
|
@ -68,6 +69,15 @@ class AccountRisk(JMSOrgBaseModel):
|
|||
def __str__(self):
|
||||
return f"{self.username}@{self.asset} - {self.risk}"
|
||||
|
||||
def set_status(self, status, user):
|
||||
self.status = status
|
||||
self.details.append({'date': timezone.now().isoformat(), 'message': f'{user.username} set status to {status}'})
|
||||
self.save()
|
||||
|
||||
def update_details(self, message, user):
|
||||
self.details.append({'date': timezone.now().isoformat(), 'message': f'{user.username} {message}'})
|
||||
self.save(update_fields=['details'])
|
||||
|
||||
@classmethod
|
||||
def gen_fake_data(cls, count=1000, batch_size=50):
|
||||
from assets.models import Asset
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.const import ConfirmOrIgnore
|
||||
from accounts.models import GatheredAccount, AccountRisk, SecretType, AutomationExecution
|
||||
from django.utils import timezone
|
||||
|
||||
from common.const import ConfirmOrIgnore
|
||||
|
||||
TYPE_CHOICES = [
|
||||
("ignore", _("Ignore")),
|
||||
|
@ -14,21 +18,54 @@ TYPE_CHOICES = [
|
|||
|
||||
|
||||
class RiskHandler:
|
||||
def __init__(self, asset, username):
|
||||
def __init__(self, asset, username, request=None, risk=''):
|
||||
self.asset = asset
|
||||
self.username = username
|
||||
self.request = request
|
||||
self.risk = risk
|
||||
|
||||
def handle(self, tp, risk=""):
|
||||
def handle(self, tp, risk=''):
|
||||
self.risk = risk
|
||||
attr = f"handle_{tp}"
|
||||
if hasattr(self, attr):
|
||||
return getattr(self, attr)(risk=risk)
|
||||
ret = getattr(self, attr)()
|
||||
self.update_risk_if_need(tp)
|
||||
return ret
|
||||
else:
|
||||
raise ValueError(f"Invalid risk type: {tp}")
|
||||
|
||||
def handle_ignore(self, risk=""):
|
||||
def update_risk_if_need(self, tp):
|
||||
r = self.get_risk()
|
||||
if not r:
|
||||
return
|
||||
status = ConfirmOrIgnore.ignored if tp == 'ignore' else ConfirmOrIgnore.confirmed
|
||||
r.details.append({
|
||||
**self.process_detail,
|
||||
'action': tp, 'status': status
|
||||
})
|
||||
r.status = status
|
||||
r.save()
|
||||
|
||||
def get_risk(self):
|
||||
r = AccountRisk.objects.filter(asset=self.asset, username=self.username)
|
||||
if self.risk:
|
||||
r = r.filter(risk=self.risk)
|
||||
return r.first()
|
||||
|
||||
def handle_ignore(self):
|
||||
pass
|
||||
|
||||
def handle_add_account(self, risk=""):
|
||||
def handle_review(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
def process_detail(self):
|
||||
return {
|
||||
"datetime": timezone.now().isoformat(), "type": "process",
|
||||
"processor": str(self.request.user)
|
||||
}
|
||||
|
||||
def handle_add_account(self):
|
||||
data = {
|
||||
"username": self.username,
|
||||
"name": self.username,
|
||||
|
@ -37,18 +74,14 @@ class RiskHandler:
|
|||
}
|
||||
self.asset.accounts.get_or_create(defaults=data, username=self.username)
|
||||
GatheredAccount.objects.filter(asset=self.asset, username=self.username).update(
|
||||
present=True, status="confirmed"
|
||||
)
|
||||
(
|
||||
AccountRisk.objects.filter(asset=self.asset, username=self.username)
|
||||
.filter(risk__in=["new_found"])
|
||||
.update(status="confirmed")
|
||||
present=True, status=ConfirmOrIgnore.confirmed
|
||||
)
|
||||
self.risk = 'new_found'
|
||||
|
||||
def handle_disable_remote(self, risk=""):
|
||||
def handle_disable_remote(self):
|
||||
pass
|
||||
|
||||
def handle_delete_remote(self, risk=""):
|
||||
def handle_delete_remote(self):
|
||||
asset = self.asset
|
||||
execution = AutomationExecution()
|
||||
execution.snapshot = {
|
||||
|
@ -59,13 +92,13 @@ class RiskHandler:
|
|||
}
|
||||
execution.save()
|
||||
execution.start()
|
||||
return execution
|
||||
return execution.summary
|
||||
|
||||
def handle_delete_both(self, risk=""):
|
||||
def handle_delete_both(self):
|
||||
pass
|
||||
|
||||
def handle_change_password_add(self, risk=""):
|
||||
def handle_change_password_add(self):
|
||||
pass
|
||||
|
||||
def handle_change_password(self, risk=""):
|
||||
def handle_change_password(self):
|
||||
pass
|
||||
|
|
|
@ -11,6 +11,7 @@ from accounts.models import (
|
|||
CheckAccountEngine,
|
||||
)
|
||||
from assets.models import Asset
|
||||
from common.const import ConfirmOrIgnore
|
||||
from common.serializers.fields import ObjectRelatedField, LabeledChoiceField
|
||||
from common.utils import get_logger
|
||||
from .base import BaseAutomationSerializer
|
||||
|
@ -34,6 +35,9 @@ class AccountRiskSerializer(serializers.ModelSerializer):
|
|||
risk = LabeledChoiceField(
|
||||
choices=RiskChoice.choices, required=False, read_only=True, label=_("Risk")
|
||||
)
|
||||
status = LabeledChoiceField(
|
||||
choices=ConfirmOrIgnore.choices, required=False, label=_("Status")
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = AccountRisk
|
||||
|
|
|
@ -15,6 +15,7 @@ from premailer import transform
|
|||
from sshtunnel import SSHTunnelForwarder
|
||||
|
||||
from assets.automations.methods import platform_automation_methods
|
||||
from common.const import Status
|
||||
from common.db.utils import safe_db_connection
|
||||
from common.tasks import send_mail_async
|
||||
from common.utils import get_logger, lazyproperty, is_openssh_format_key, ssh_pubkey_gen
|
||||
|
@ -97,13 +98,15 @@ class BaseManager:
|
|||
self.summary = defaultdict(int)
|
||||
self.result = defaultdict(list)
|
||||
self.duration = 0
|
||||
self.status = 'success'
|
||||
|
||||
def get_assets_group_by_platform(self):
|
||||
return self.execution.all_assets_group_by_platform()
|
||||
|
||||
def pre_run(self):
|
||||
self.execution.date_start = timezone.now()
|
||||
self.execution.save(update_fields=["date_start"])
|
||||
self.execution.status = Status.running
|
||||
self.execution.save(update_fields=["date_start", "status"])
|
||||
|
||||
def update_execution(self):
|
||||
self.duration = int(time.time() - self.time_start)
|
||||
|
@ -111,7 +114,7 @@ class BaseManager:
|
|||
self.execution.duration = self.duration
|
||||
self.execution.summary = self.summary
|
||||
self.execution.result = self.result
|
||||
self.execution.status = "success"
|
||||
self.execution.status = self.status
|
||||
|
||||
with safe_db_connection():
|
||||
self.execution.save()
|
||||
|
@ -161,8 +164,12 @@ class BaseManager:
|
|||
|
||||
def run(self, *args, **kwargs):
|
||||
self.pre_run()
|
||||
self.do_run(*args, **kwargs)
|
||||
self.post_run()
|
||||
try:
|
||||
self.do_run(*args, **kwargs)
|
||||
except:
|
||||
self.status = 'error'
|
||||
finally:
|
||||
self.post_run()
|
||||
|
||||
def do_run(self, *args, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
@ -365,6 +372,7 @@ class BasePlaybookManager(PlaybookPrepareMixin, BaseManager):
|
|||
def __init__(self, execution):
|
||||
super().__init__(execution)
|
||||
self.params = execution.snapshot.get("params", {})
|
||||
self.host_success_callbacks = []
|
||||
|
||||
def get_assets_group_by_platform(self):
|
||||
return self.execution.all_assets_group_by_platform()
|
||||
|
@ -451,6 +459,9 @@ class BasePlaybookManager(PlaybookPrepareMixin, BaseManager):
|
|||
self.summary["ok_assets"] += 1
|
||||
self.result["ok_assets"].append(host)
|
||||
|
||||
for cb in self.host_success_callbacks:
|
||||
cb(host, result)
|
||||
|
||||
def on_host_error(self, host, error, result):
|
||||
self.summary["fail_assets"] += 1
|
||||
self.result["fail_assets"].append((host, str(error)))
|
||||
|
@ -464,6 +475,11 @@ class BasePlaybookManager(PlaybookPrepareMixin, BaseManager):
|
|||
detail = result.get("failures", "") or result.get("dark", "")
|
||||
self.on_host_error(host, error, detail)
|
||||
|
||||
def post_run(self):
|
||||
if self.summary['fail_assets']:
|
||||
self.status = 'failed'
|
||||
super().post_run()
|
||||
|
||||
def on_runner_success(self, runner, cb):
|
||||
summary = cb.summary
|
||||
for state, hosts in summary.items():
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
# Generated by Django 4.1.13 on 2024-12-02 11:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("assets", "0010_alter_automationexecution_duration"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="automationexecution",
|
||||
name="status",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("ready", "Ready"),
|
||||
("pending", "Pending"),
|
||||
("running", "Running"),
|
||||
("success", "Success"),
|
||||
("failed", "Failed"),
|
||||
("error", "Error"),
|
||||
("canceled", "Canceled"),
|
||||
],
|
||||
default="pending",
|
||||
max_length=16,
|
||||
verbose_name="Status",
|
||||
),
|
||||
),
|
||||
]
|
|
@ -7,7 +7,7 @@ from django.utils.translation import gettext_lazy as _
|
|||
from assets.models.asset import Asset
|
||||
from assets.models.node import Node
|
||||
from assets.tasks import execute_asset_automation_task
|
||||
from common.const.choices import Trigger
|
||||
from common.const.choices import Trigger, Status
|
||||
from common.db.fields import EncryptJsonDictTextField
|
||||
from ops.mixin import PeriodTaskModelMixin
|
||||
from orgs.mixins.models import OrgModelMixin, JMSOrgBaseModel
|
||||
|
@ -16,9 +16,11 @@ from users.models import User
|
|||
|
||||
class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
|
||||
accounts = models.JSONField(default=list, verbose_name=_("Accounts"))
|
||||
nodes = models.ManyToManyField('assets.Node', blank=True, verbose_name=_("Node"))
|
||||
assets = models.ManyToManyField('assets.Asset', blank=True, verbose_name=_("Assets"))
|
||||
type = models.CharField(max_length=16, verbose_name=_('Type'))
|
||||
nodes = models.ManyToManyField("assets.Node", blank=True, verbose_name=_("Node"))
|
||||
assets = models.ManyToManyField(
|
||||
"assets.Asset", blank=True, verbose_name=_("Assets")
|
||||
)
|
||||
type = models.CharField(max_length=16, verbose_name=_("Type"))
|
||||
is_active = models.BooleanField(default=True, verbose_name=_("Is active"))
|
||||
params = models.JSONField(default=dict, verbose_name=_("Parameters"))
|
||||
|
||||
|
@ -26,10 +28,10 @@ class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
|
|||
raise NotImplementedError
|
||||
|
||||
def __str__(self):
|
||||
return self.name + '@' + str(self.created_by)
|
||||
return self.name + "@" + str(self.created_by)
|
||||
|
||||
class Meta:
|
||||
unique_together = [('org_id', 'name', 'type')]
|
||||
unique_together = [("org_id", "name", "type")]
|
||||
verbose_name = _("Automation task")
|
||||
|
||||
@classmethod
|
||||
|
@ -43,13 +45,13 @@ class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
|
|||
|
||||
def get_all_assets(self):
|
||||
nodes = self.nodes.all()
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
direct_asset_ids = self.assets.all().values_list('id', flat=True)
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list("id", flat=True)
|
||||
direct_asset_ids = self.assets.all().values_list("id", flat=True)
|
||||
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
||||
return Asset.objects.filter(id__in=asset_ids)
|
||||
|
||||
def all_assets_group_by_platform(self):
|
||||
assets = self.get_all_assets().prefetch_related('platform')
|
||||
assets = self.get_all_assets().prefetch_related("platform")
|
||||
return assets.group_by_platform()
|
||||
|
||||
@property
|
||||
|
@ -64,17 +66,17 @@ class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
|
|||
return name, task, args, kwargs
|
||||
|
||||
def get_many_to_many_ids(self, field: str):
|
||||
return [str(i) for i in getattr(self, field).all().values_list('id', flat=True)]
|
||||
return [str(i) for i in getattr(self, field).all().values_list("id", flat=True)]
|
||||
|
||||
def to_attr_json(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'type': self.type,
|
||||
'comment': self.comment,
|
||||
'accounts': self.accounts,
|
||||
'org_id': str(self.org_id),
|
||||
'nodes': self.get_many_to_many_ids('nodes'),
|
||||
'assets': self.get_many_to_many_ids('assets'),
|
||||
"name": self.name,
|
||||
"type": self.type,
|
||||
"comment": self.comment,
|
||||
"accounts": self.accounts,
|
||||
"org_id": str(self.org_id),
|
||||
"nodes": self.get_many_to_many_ids("nodes"),
|
||||
"assets": self.get_many_to_many_ids("assets"),
|
||||
}
|
||||
|
||||
@property
|
||||
|
@ -96,7 +98,9 @@ class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
|
|||
eid = str(uuid.uuid4())
|
||||
|
||||
execution = self.execution_model.objects.create(
|
||||
id=eid, trigger=trigger, automation=self,
|
||||
id=eid,
|
||||
trigger=trigger,
|
||||
automation=self,
|
||||
snapshot=self.to_attr_json(),
|
||||
)
|
||||
return execution.start()
|
||||
|
@ -111,37 +115,60 @@ class AssetBaseAutomation(BaseAutomation):
|
|||
class AutomationExecution(OrgModelMixin):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
automation = models.ForeignKey(
|
||||
'BaseAutomation', related_name='executions', on_delete=models.CASCADE,
|
||||
verbose_name=_('Automation task'), null=True
|
||||
"BaseAutomation",
|
||||
related_name="executions",
|
||||
on_delete=models.CASCADE,
|
||||
verbose_name=_("Automation task"),
|
||||
null=True,
|
||||
)
|
||||
# pending, running, success, failed, terminated
|
||||
status = models.CharField(
|
||||
max_length=16, default=Status.pending, choices=Status.choices, verbose_name=_("Status")
|
||||
)
|
||||
date_created = models.DateTimeField(
|
||||
auto_now_add=True, verbose_name=_("Date created")
|
||||
)
|
||||
date_start = models.DateTimeField(
|
||||
null=True, verbose_name=_("Date start"), db_index=True
|
||||
)
|
||||
status = models.CharField(max_length=16, default='pending', verbose_name=_('Status'))
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||
date_start = models.DateTimeField(null=True, verbose_name=_('Date start'), db_index=True)
|
||||
date_finished = models.DateTimeField(null=True, verbose_name=_("Date finished"))
|
||||
duration = models.IntegerField(default=0, verbose_name=_('Duration'))
|
||||
duration = models.IntegerField(default=0, verbose_name=_("Duration"))
|
||||
snapshot = EncryptJsonDictTextField(
|
||||
default=dict, blank=True, null=True, verbose_name=_('Automation snapshot')
|
||||
default=dict, blank=True, null=True, verbose_name=_("Automation snapshot")
|
||||
)
|
||||
trigger = models.CharField(
|
||||
max_length=128, default=Trigger.manual, choices=Trigger.choices,
|
||||
verbose_name=_('Trigger mode')
|
||||
max_length=128,
|
||||
default=Trigger.manual,
|
||||
choices=Trigger.choices,
|
||||
verbose_name=_("Trigger mode"),
|
||||
)
|
||||
summary = models.JSONField(default=dict, verbose_name=_('Summary'))
|
||||
result = models.JSONField(default=dict, verbose_name=_('Result'))
|
||||
summary = models.JSONField(default=dict, verbose_name=_("Summary"))
|
||||
result = models.JSONField(default=dict, verbose_name=_("Result"))
|
||||
|
||||
class Meta:
|
||||
ordering = ('org_id', '-date_start',)
|
||||
verbose_name = _('Automation task execution')
|
||||
ordering = (
|
||||
"org_id",
|
||||
"-date_start",
|
||||
)
|
||||
verbose_name = _("Automation task execution")
|
||||
|
||||
@property
|
||||
def is_finished(self):
|
||||
return bool(self.date_finished)
|
||||
|
||||
@property
|
||||
def is_success(self):
|
||||
return self.status == Status.success
|
||||
|
||||
@property
|
||||
def manager_type(self):
|
||||
return self.snapshot['type']
|
||||
return self.snapshot["type"]
|
||||
|
||||
def get_all_asset_ids(self):
|
||||
node_ids = self.snapshot.get('nodes', [])
|
||||
asset_ids = self.snapshot.get('assets', [])
|
||||
node_ids = self.snapshot.get("nodes", [])
|
||||
asset_ids = self.snapshot.get("assets", [])
|
||||
nodes = Node.objects.filter(id__in=node_ids)
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list("id", flat=True)
|
||||
asset_ids = set(list(asset_ids) + list(node_asset_ids))
|
||||
return asset_ids
|
||||
|
||||
|
@ -150,12 +177,12 @@ class AutomationExecution(OrgModelMixin):
|
|||
return Asset.objects.filter(id__in=asset_ids)
|
||||
|
||||
def all_assets_group_by_platform(self):
|
||||
assets = self.get_all_assets().prefetch_related('platform')
|
||||
assets = self.get_all_assets().prefetch_related("platform")
|
||||
return assets.group_by_platform()
|
||||
|
||||
@property
|
||||
def recipients(self):
|
||||
recipients = self.snapshot.get('recipients')
|
||||
recipients = self.snapshot.get("recipients")
|
||||
if not recipients:
|
||||
return []
|
||||
users = User.objects.filter(id__in=recipients)
|
||||
|
@ -164,6 +191,7 @@ class AutomationExecution(OrgModelMixin):
|
|||
@property
|
||||
def manager(self):
|
||||
from assets.automations.endpoint import ExecutionManager
|
||||
|
||||
return ExecutionManager(execution=self)
|
||||
|
||||
def start(self):
|
||||
|
|
|
@ -433,3 +433,8 @@ def convert_html_to_markdown(html_str):
|
|||
markdown = markdown.replace('\n\n', '\n')
|
||||
markdown = markdown.replace('\n ', '\n')
|
||||
return markdown
|
||||
|
||||
|
||||
def many_get(d, keys, default=None):
|
||||
res = [d.get(key, default) for key in keys]
|
||||
return res
|
||||
|
|
|
@ -1243,7 +1243,7 @@
|
|||
"TestLdapLoginTitle": "Test ldap user login",
|
||||
"TestNodeAssetConnectivity": "Test assets connectivity of node",
|
||||
"TestPortErrorMsg": "Port error, please re-enter",
|
||||
"TestSelected": "Test selected",
|
||||
"TestSelected": "Verify selected",
|
||||
"TestSuccessMsg": "Test succeeded",
|
||||
"Thursday": "Thu",
|
||||
"Ticket": "Ticket",
|
||||
|
|
Loading…
Reference in New Issue