mirror of https://github.com/jumpserver/jumpserver
perf: 优化收集账号的报表
parent
92f7209997
commit
b87fb8bba0
|
@ -1,6 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.db import transaction
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import action
|
||||
|
@ -86,8 +87,8 @@ class GatheredAccountViewSet(OrgBulkModelViewSet):
|
|||
}
|
||||
execution.save()
|
||||
execution.start()
|
||||
accounts = self.model.objects.filter(asset=asset).prefetch_related('asset', 'asset__platform')
|
||||
return self.get_paginated_response_from_queryset(accounts)
|
||||
report = execution.manager.gen_report()
|
||||
return HttpResponse(report)
|
||||
|
||||
@action(methods=['post'], detail=False, url_path='sync-accounts')
|
||||
def sync_accounts(self, request, *args, **kwargs):
|
||||
|
|
|
@ -19,25 +19,27 @@ logger = get_logger(__name__)
|
|||
|
||||
|
||||
diff_items = [
|
||||
'authorized_keys', 'sudoers', 'groups',
|
||||
"authorized_keys",
|
||||
"sudoers",
|
||||
"groups",
|
||||
]
|
||||
|
||||
|
||||
def get_items_diff(ori_account, d):
|
||||
if hasattr(ori_account, '_diff'):
|
||||
if hasattr(ori_account, "_diff"):
|
||||
return ori_account._diff
|
||||
|
||||
diff = {}
|
||||
for item in diff_items:
|
||||
ori = getattr(ori_account, item)
|
||||
new = d.get(item, '')
|
||||
new = d.get(item, "")
|
||||
|
||||
if not ori:
|
||||
continue
|
||||
|
||||
if isinstance(new, timezone.datetime):
|
||||
new = ori.strftime('%Y-%m-%d %H:%M:%S')
|
||||
ori = ori.strftime('%Y-%m-%d %H:%M:%S')
|
||||
new = ori.strftime("%Y-%m-%d %H:%M:%S")
|
||||
ori = ori.strftime("%Y-%m-%d %H:%M:%S")
|
||||
if new != ori:
|
||||
diff[item] = get_text_diff(ori, new)
|
||||
|
||||
|
@ -48,9 +50,17 @@ def get_items_diff(ori_account, d):
|
|||
class AnalyseAccountRisk:
|
||||
long_time = timezone.timedelta(days=90)
|
||||
datetime_check_items = [
|
||||
{'field': 'date_last_login', 'risk': 'zombie', 'delta': long_time},
|
||||
{'field': 'date_password_change', 'risk': 'long_time_password', 'delta': long_time},
|
||||
{'field': 'date_password_expired', 'risk': 'password_expired', 'delta': timezone.timedelta(seconds=1)}
|
||||
{"field": "date_last_login", "risk": "zombie", "delta": long_time},
|
||||
{
|
||||
"field": "date_password_change",
|
||||
"risk": "long_time_password",
|
||||
"delta": long_time,
|
||||
},
|
||||
{
|
||||
"field": "date_password_expired",
|
||||
"risk": "password_expired",
|
||||
"delta": timezone.timedelta(seconds=1),
|
||||
},
|
||||
]
|
||||
|
||||
def __init__(self, check_risk=True):
|
||||
|
@ -66,20 +76,24 @@ class AnalyseAccountRisk:
|
|||
|
||||
risks = []
|
||||
for k, v in diff.items():
|
||||
risks.append(dict(
|
||||
asset=ori_account.asset, username=ori_account.username,
|
||||
risk=k+'_changed', detail={'diff': v}
|
||||
))
|
||||
risks.append(
|
||||
dict(
|
||||
asset=ori_account.asset,
|
||||
username=ori_account.username,
|
||||
risk=k + "_changed",
|
||||
detail={"diff": v},
|
||||
)
|
||||
)
|
||||
self.save_or_update_risks(risks)
|
||||
|
||||
def _analyse_datetime_changed(self, ori_account, d, asset, username):
|
||||
basic = {'asset': asset, 'username': username}
|
||||
basic = {"asset": asset, "username": username}
|
||||
|
||||
risks = []
|
||||
for item in self.datetime_check_items:
|
||||
field = item['field']
|
||||
risk = item['risk']
|
||||
delta = item['delta']
|
||||
field = item["field"]
|
||||
risk = item["risk"]
|
||||
delta = item["delta"]
|
||||
|
||||
date = d.get(field)
|
||||
if not date:
|
||||
|
@ -91,20 +105,20 @@ class AnalyseAccountRisk:
|
|||
|
||||
if date and date < timezone.now() - delta:
|
||||
risks.append(
|
||||
dict(**basic, risk=risk, detail={'date': date.isoformat()})
|
||||
dict(**basic, risk=risk, detail={"date": date.isoformat()})
|
||||
)
|
||||
|
||||
self.save_or_update_risks(risks)
|
||||
|
||||
def save_or_update_risks(self, risks):
|
||||
# 提前取出来,避免每次都查数据库
|
||||
assets = {r['asset'] for r in risks}
|
||||
assets = {r["asset"] for r in risks}
|
||||
assets_risks = AccountRisk.objects.filter(asset__in=assets)
|
||||
assets_risks = {f"{r.asset_id}_{r.username}_{r.risk}": r for r in assets_risks}
|
||||
|
||||
for d in risks:
|
||||
detail = d.pop('detail', {})
|
||||
detail['datetime'] = self.now.isoformat()
|
||||
detail = d.pop("detail", {})
|
||||
detail["datetime"] = self.now.isoformat()
|
||||
key = f"{d['asset'].id}_{d['username']}_{d['risk']}"
|
||||
found = assets_risks.get(key)
|
||||
|
||||
|
@ -119,7 +133,7 @@ class AnalyseAccountRisk:
|
|||
def _create_risk(self, data):
|
||||
return AccountRisk(**data)
|
||||
|
||||
@bulk_update_decorator(AccountRisk, update_fields=['details'])
|
||||
@bulk_update_decorator(AccountRisk, update_fields=["details"])
|
||||
def _update_risk(self, account):
|
||||
return account
|
||||
|
||||
|
@ -127,13 +141,17 @@ class AnalyseAccountRisk:
|
|||
if not self.check_risk:
|
||||
return
|
||||
|
||||
basic = {'asset': asset, 'username': d['username']}
|
||||
basic = {"asset": asset, "username": d["username"]}
|
||||
if ori_account:
|
||||
self._analyse_item_changed(ori_account, d)
|
||||
else:
|
||||
self._create_risk(dict(**basic, risk='ghost', details=[{'datetime': self.now.isoformat()}]))
|
||||
self._create_risk(
|
||||
dict(
|
||||
**basic, risk="ghost", details=[{"datetime": self.now.isoformat()}]
|
||||
)
|
||||
)
|
||||
|
||||
self._analyse_datetime_changed(ori_account, d, asset, d['username'])
|
||||
self._analyse_datetime_changed(ori_account, d, asset, d["username"])
|
||||
|
||||
|
||||
class GatherAccountsManager(AccountBasePlaybookManager):
|
||||
|
@ -145,8 +163,8 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
self.ori_asset_usernames = defaultdict(set)
|
||||
self.ori_gathered_usernames = defaultdict(set)
|
||||
self.ori_gathered_accounts_mapper = dict()
|
||||
self.is_sync_account = self.execution.snapshot.get('is_sync_account')
|
||||
self.check_risk = self.execution.snapshot.get('check_risk', False)
|
||||
self.is_sync_account = self.execution.snapshot.get("is_sync_account")
|
||||
self.check_risk = self.execution.snapshot.get("check_risk", False)
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
|
@ -154,7 +172,7 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
|
||||
def host_callback(self, host, asset=None, **kwargs):
|
||||
super().host_callback(host, asset=asset, **kwargs)
|
||||
self.host_asset_mapper[host['name']] = asset
|
||||
self.host_asset_mapper[host["name"]] = asset
|
||||
return host
|
||||
|
||||
def _filter_success_result(self, tp, result):
|
||||
|
@ -175,43 +193,39 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
for username, info in result.items():
|
||||
self.asset_usernames_mapper[asset].add(username)
|
||||
|
||||
d = {'asset': asset, 'username': username, 'remote_present': True, **info}
|
||||
d = {"asset": asset, "username": username, "remote_present": True, **info}
|
||||
accounts.append(d)
|
||||
self.asset_account_info[asset] = accounts
|
||||
|
||||
def on_runner_failed(self, runner, e):
|
||||
print("Runner failed: ", e)
|
||||
raise e
|
||||
|
||||
def on_host_error(self, host, error, result):
|
||||
print(f'\033[31m {host} error: {error} \033[0m\n')
|
||||
self.summary['error_assets'] += 1
|
||||
|
||||
def on_host_success(self, host, result):
|
||||
info = self._get_nested_info(result, 'debug', 'res', 'info')
|
||||
super().on_host_success(host, result)
|
||||
info = self._get_nested_info(result, "debug", "res", "info")
|
||||
asset = self.host_asset_mapper.get(host)
|
||||
self.summary['success_assets'] += 1
|
||||
|
||||
if asset and info:
|
||||
self._collect_asset_account_info(asset, info)
|
||||
else:
|
||||
print(f'\033[31m Not found {host} info \033[0m\n')
|
||||
|
||||
print(f"\033[31m Not found {host} info \033[0m\n")
|
||||
|
||||
def prefetch_origin_account_usernames(self):
|
||||
"""
|
||||
提起查出来,避免每次 sql 查询
|
||||
:return:
|
||||
"""
|
||||
assets = self.asset_usernames_mapper.keys()
|
||||
accounts = Account.objects.filter(asset__in=assets).values_list('asset', 'username')
|
||||
accounts = Account.objects.filter(asset__in=assets).values_list(
|
||||
"asset", "username"
|
||||
)
|
||||
|
||||
for asset, username in accounts:
|
||||
self.ori_asset_usernames[asset].add(username)
|
||||
|
||||
ga_accounts = GatheredAccount.objects.filter(asset__in=assets).prefetch_related('asset')
|
||||
ga_accounts = GatheredAccount.objects.filter(asset__in=assets).prefetch_related(
|
||||
"asset"
|
||||
)
|
||||
for account in ga_accounts:
|
||||
self.ori_gathered_usernames[account.asset].add(account.username)
|
||||
key = '{}_{}'.format(account.asset_id, account.username)
|
||||
key = "{}_{}".format(account.asset_id, account.username)
|
||||
self.ori_gathered_accounts_mapper[key] = account
|
||||
|
||||
def update_gather_accounts_status(self, asset):
|
||||
|
@ -225,25 +239,45 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
ori_users = self.ori_asset_usernames[asset]
|
||||
ori_ga_users = self.ori_gathered_usernames[asset]
|
||||
|
||||
queryset = (GatheredAccount.objects
|
||||
.filter(asset=asset)
|
||||
.exclude(status=ConfirmOrIgnore.ignored))
|
||||
queryset = GatheredAccount.objects.filter(asset=asset).exclude(
|
||||
status=ConfirmOrIgnore.ignored
|
||||
)
|
||||
|
||||
# 远端账号 比 收集账号多的
|
||||
# 新增创建,不用处理状态
|
||||
new_found_users = remote_users - ori_ga_users
|
||||
if new_found_users:
|
||||
self.summary["new_accounts"] += len(new_found_users)
|
||||
for username in new_found_users:
|
||||
self.result["new_accounts"].append(
|
||||
{
|
||||
"asset": str(asset),
|
||||
"username": username,
|
||||
}
|
||||
)
|
||||
|
||||
# 远端上 比 收集账号少的
|
||||
# 标识 remote_present=False, 标记为待处理
|
||||
# 远端资产上不存在的,标识为待处理,需要管理员介入
|
||||
lost_users = ori_ga_users - remote_users
|
||||
if lost_users:
|
||||
queryset.filter(username__in=lost_users).update(status='', remote_present=False)
|
||||
queryset.filter(username__in=lost_users).update(
|
||||
status="", remote_present=False
|
||||
)
|
||||
self.summary["lost_accounts"] += len(lost_users)
|
||||
for username in lost_users:
|
||||
self.result["lost_accounts"].append(
|
||||
{
|
||||
"asset": str(asset),
|
||||
"username": username,
|
||||
}
|
||||
)
|
||||
|
||||
# 收集的账号 比 账号列表多的, 有可能是账号中删掉了, 但这时候状态已经是 confirm 了
|
||||
# 标识状态为 待处理, 让管理员去确认
|
||||
ga_added_users = ori_ga_users - ori_users
|
||||
if ga_added_users:
|
||||
queryset.filter(username__in=ga_added_users).update(status='')
|
||||
queryset.filter(username__in=ga_added_users).update(status="")
|
||||
|
||||
# 收集的账号 比 账号列表少的
|
||||
# 这个好像不不用对比,原始情况就这样
|
||||
|
@ -255,22 +289,33 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
# 正常情况, 不用处理,因为远端账号会创建到收集账号,收集账号再去对比
|
||||
|
||||
# 不过这个好像也处理一下 status,因为已存在,这是状态应该是确认
|
||||
(queryset.filter(username__in=ori_users)
|
||||
.exclude(status=ConfirmOrIgnore.confirmed)
|
||||
.update(status=ConfirmOrIgnore.confirmed))
|
||||
|
||||
(
|
||||
queryset.filter(username__in=ori_users)
|
||||
.exclude(status=ConfirmOrIgnore.confirmed)
|
||||
.update(status=ConfirmOrIgnore.confirmed)
|
||||
)
|
||||
|
||||
# 远端存在的账号,标识为已存在
|
||||
queryset.filter(username__in=remote_users, remote_present=False).update(remote_present=True)
|
||||
(
|
||||
queryset.filter(username__in=remote_users, remote_present=False).update(
|
||||
remote_present=True
|
||||
)
|
||||
)
|
||||
|
||||
# 资产上没有的,标识为为存在
|
||||
queryset.exclude(username__in=ori_users).filter(present=False).update(present=True)
|
||||
(
|
||||
queryset.exclude(username__in=ori_users)
|
||||
.filter(present=False)
|
||||
.update(present=True)
|
||||
)
|
||||
|
||||
@bulk_create_decorator(GatheredAccount)
|
||||
def create_gathered_account(self, d):
|
||||
gathered_account = GatheredAccount()
|
||||
ga = GatheredAccount()
|
||||
for k, v in d.items():
|
||||
setattr(gathered_account, k, v)
|
||||
return gathered_account
|
||||
setattr(ga, k, v)
|
||||
|
||||
return ga
|
||||
|
||||
@bulk_update_decorator(GatheredAccount, update_fields=diff_items)
|
||||
def update_gathered_account(self, ori_account, d):
|
||||
|
@ -287,11 +332,13 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
risk_analyser = AnalyseAccountRisk(self.check_risk)
|
||||
|
||||
for asset, accounts_data in self.asset_account_info.items():
|
||||
with (tmp_to_org(asset.org_id)):
|
||||
with tmp_to_org(asset.org_id):
|
||||
gathered_accounts = []
|
||||
for d in accounts_data:
|
||||
username = d['username']
|
||||
ori_account = self.ori_gathered_accounts_mapper.get('{}_{}'.format(asset.id, username))
|
||||
username = d["username"]
|
||||
ori_account = self.ori_gathered_accounts_mapper.get(
|
||||
"{}_{}".format(asset.id, username)
|
||||
)
|
||||
|
||||
if not ori_account:
|
||||
self.create_gathered_account(d)
|
||||
|
@ -304,50 +351,5 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||
# 因为有 bulk create, bulk update, 所以这里需要 sleep 一下,等待数据同步
|
||||
time.sleep(0.5)
|
||||
|
||||
def send_report_if_need(self):
|
||||
pass
|
||||
|
||||
def generate_send_users_and_change_info(self):
|
||||
recipients = self.execution.recipients
|
||||
if not self.asset_usernames_mapper or not recipients:
|
||||
return None, None
|
||||
|
||||
users = recipients
|
||||
asset_ids = self.asset_usernames_mapper.keys()
|
||||
assets = Asset.objects.filter(id__in=asset_ids).prefetch_related('accounts')
|
||||
gather_accounts = GatheredAccount.objects.filter(asset_id__in=asset_ids, remote_present=True)
|
||||
|
||||
asset_id_map = {str(asset.id): asset for asset in assets}
|
||||
asset_id_username = list(assets.values_list('id', 'accounts__username'))
|
||||
asset_id_username.extend(list(gather_accounts.values_list('asset_id', 'username')))
|
||||
|
||||
system_asset_usernames_mapper = defaultdict(set)
|
||||
for asset_id, username in asset_id_username:
|
||||
system_asset_usernames_mapper[str(asset_id)].add(username)
|
||||
|
||||
change_info = defaultdict(dict)
|
||||
for asset_id, usernames in self.asset_usernames_mapper.items():
|
||||
system_usernames = system_asset_usernames_mapper.get(asset_id)
|
||||
if not system_usernames:
|
||||
continue
|
||||
|
||||
add_usernames = usernames - system_usernames
|
||||
remove_usernames = system_usernames - usernames
|
||||
|
||||
if not add_usernames and not remove_usernames:
|
||||
continue
|
||||
|
||||
change_info[str(asset_id_map[asset_id])] = {
|
||||
'add_usernames': add_usernames,
|
||||
'remove_usernames': remove_usernames
|
||||
}
|
||||
|
||||
return users, dict(change_info)
|
||||
|
||||
def send_email_if_need(self):
|
||||
users, change_info = self.generate_send_users_and_change_info()
|
||||
if not users or not change_info:
|
||||
return
|
||||
|
||||
for user in users:
|
||||
GatherAccountChangeMsg(user, change_info).publish_async()
|
||||
def get_report_template(self):
|
||||
return "accounts/gather_account_report.html"
|
||||
|
|
|
@ -28,11 +28,11 @@
|
|||
|
||||
<tr>
|
||||
<td>{% trans 'Assets count' %}: </td>
|
||||
<td>{{ summary.assets }}</td>
|
||||
<td>{{ summary.total_assets }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{% trans 'Asset success count' %}: </td>
|
||||
<td>{{ summary.success_assets }}</td>
|
||||
<td>{{ summary.ok_assets }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{% trans 'Asset failed count' %}: </td>
|
||||
|
@ -40,32 +40,24 @@
|
|||
</tr>
|
||||
<tr>
|
||||
<td>{% trans 'Asset not support count' %}: </td>
|
||||
<td>{{ summary.na_assets }}</td>
|
||||
<td>{{ summary.error_assets }}</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>{% trans 'Account new found count' %}: </td>
|
||||
<td>{{ summary.new_accounts }}</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td>{% trans 'Sudo changed count' %}:</td>
|
||||
<td> <span> {{ summary.sudo_changed }}</span></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{% trans 'Groups changed count' %}:</td>
|
||||
<td> <span> {{ summary.groups_changed }}</span></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>{% trans 'Authorized key changed count' %}:</td>
|
||||
<td> <span> {{ summary.authorized_key_changed }}</span></td>
|
||||
<td>{% trans 'Account lost count' %}: </td>
|
||||
<td>{{ summary.lost_accounts }}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class='result'>
|
||||
<p>{% trans 'New found accounts' %}:</p>
|
||||
<p>{% trans 'New found accounts' %}: {{ summary.new_accounts }}</p>
|
||||
{% if summary.new_accounts %}
|
||||
<table style="">
|
||||
<thead>
|
||||
<tr>
|
||||
|
@ -84,10 +76,36 @@
|
|||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class='result'>
|
||||
<p>{% trans 'Lost accounts' %}: {{ summary.lost_accounts }}</p>
|
||||
{% if summary.lost_accounts %}
|
||||
<table style="">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>{% trans 'No.' %}</th>
|
||||
<th>{% trans 'Asset' %}</th>
|
||||
<th>{% trans 'Username' %}</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for account in result.lost_accounts %}
|
||||
<tr>
|
||||
<td>{{ forloop.counter }}</td>
|
||||
<td>{{ account.asset }}</td>
|
||||
<td>{{ account.username }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
|
||||
<div class='result'>
|
||||
<p>{% trans 'New found risk' %}:</p>
|
||||
<p>{% trans 'New found risks' %}: {{ summary.new_risks }}</p>
|
||||
{% if summary.new_risks %}
|
||||
<table style="">
|
||||
<thead>
|
||||
<tr>
|
||||
|
@ -108,6 +126,7 @@
|
|||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
|
@ -116,7 +135,7 @@
|
|||
border-collapse: collapse;
|
||||
max-width: 100%;
|
||||
text-align: left;
|
||||
margin-top: 20px;
|
||||
margin-top: 10px;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
|
@ -137,6 +156,10 @@
|
|||
font-size: 12px;
|
||||
}
|
||||
|
||||
.result {
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
.result tr :first-child {
|
||||
width: 10%;
|
||||
}
|
||||
|
|
|
@ -117,7 +117,11 @@ class BaseManager:
|
|||
self.execution.save()
|
||||
|
||||
def print_summary(self):
|
||||
pass
|
||||
content = "\nSummery: \n"
|
||||
for k, v in self.summary.items():
|
||||
content += f"\t - {k}: {v}\n"
|
||||
content += "\t - Using: {}s\n".format(self.duration)
|
||||
print(content)
|
||||
|
||||
def get_report_template(self):
|
||||
raise NotImplementedError
|
||||
|
@ -136,6 +140,7 @@ class BaseManager:
|
|||
recipients = self.execution.recipients
|
||||
if not recipients:
|
||||
return
|
||||
print("Send report to: ", ",".join(recipients))
|
||||
|
||||
report = self.gen_report()
|
||||
report = transform(report)
|
||||
|
@ -167,26 +172,18 @@ class BaseManager:
|
|||
return json.dumps(data, indent=4, sort_keys=True)
|
||||
|
||||
|
||||
class PlaybookUtil:
|
||||
class PlaybookPrepareMixin:
|
||||
bulk_size = 100
|
||||
ansible_account_policy = "privileged_first"
|
||||
ansible_account_prefer = "root,Administrator"
|
||||
|
||||
def __init__(self, assets, playbook_dir, inventory_path):
|
||||
self.assets = assets
|
||||
self.playbook_dir = playbook_dir
|
||||
self.inventory_path = inventory_path
|
||||
summary: dict
|
||||
result: dict
|
||||
params: dict
|
||||
execution = None
|
||||
|
||||
|
||||
|
||||
|
||||
class BasePlaybookManager(BaseManager):
|
||||
bulk_size = 100
|
||||
ansible_account_policy = "privileged_first"
|
||||
ansible_account_prefer = "root,Administrator"
|
||||
|
||||
def __init__(self, execution):
|
||||
super().__init__(execution)
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
# example: {'gather_fact_windows': {'id': 'gather_fact_windows', 'name': '', 'method': 'gather_fact', ...} }
|
||||
self.method_id_meta_mapper = {
|
||||
method["id"]: method
|
||||
|
@ -196,8 +193,10 @@ class BasePlaybookManager(BaseManager):
|
|||
# 根据执行方式就行分组, 不同资产的改密、推送等操作可能会使用不同的执行方式
|
||||
# 然后根据执行方式分组, 再根据 bulk_size 分组, 生成不同的 playbook
|
||||
self.playbooks = []
|
||||
params = self.execution.snapshot.get("params")
|
||||
self.params = params or {}
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_params(self, automation, method_type):
|
||||
method_attr = "{}_method".format(method_type)
|
||||
|
@ -219,13 +218,6 @@ class BasePlaybookManager(BaseManager):
|
|||
def platform_automation_methods(self):
|
||||
return platform_automation_methods
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_assets_group_by_platform(self):
|
||||
return self.execution.all_assets_group_by_platform()
|
||||
|
||||
def prepare_runtime_dir(self):
|
||||
ansible_dir = settings.ANSIBLE_DIR
|
||||
task_name = self.execution.snapshot["name"]
|
||||
|
@ -241,13 +233,11 @@ class BasePlaybookManager(BaseManager):
|
|||
os.makedirs(path, exist_ok=True, mode=0o755)
|
||||
return path
|
||||
|
||||
@lazyproperty
|
||||
def runtime_dir(self):
|
||||
path = self.prepare_runtime_dir()
|
||||
if settings.DEBUG_DEV:
|
||||
msg = "Ansible runtime dir: {}".format(path)
|
||||
print(msg)
|
||||
return path
|
||||
def host_callback(self, host, automation=None, **kwargs):
|
||||
method_type = self.__class__.method_type()
|
||||
host = self.convert_cert_to_file(host, kwargs.get("path_dir"))
|
||||
host["params"] = self.get_params(automation, method_type)
|
||||
return host
|
||||
|
||||
@staticmethod
|
||||
def write_cert_to_file(filename, content):
|
||||
|
@ -274,15 +264,6 @@ class BasePlaybookManager(BaseManager):
|
|||
host["jms_asset"]["secret_info"][f] = result
|
||||
return host
|
||||
|
||||
def on_host_method_not_enabled(self, host, **kwargs):
|
||||
host["error"] = _("{} disabled".format(self.__class__.method_type()))
|
||||
|
||||
def host_callback(self, host, automation=None, **kwargs):
|
||||
method_type = self.__class__.method_type()
|
||||
host = self.convert_cert_to_file(host, kwargs.get("path_dir"))
|
||||
host["params"] = self.get_params(automation, method_type)
|
||||
return host
|
||||
|
||||
@staticmethod
|
||||
def generate_public_key(private_key):
|
||||
return ssh_pubkey_gen(private_key=private_key, hostname=gethostname())
|
||||
|
@ -314,6 +295,14 @@ class BasePlaybookManager(BaseManager):
|
|||
)
|
||||
inventory.write_to_file(inventory_path)
|
||||
|
||||
@lazyproperty
|
||||
def runtime_dir(self):
|
||||
path = self.prepare_runtime_dir()
|
||||
if settings.DEBUG_DEV:
|
||||
msg = "Ansible runtime dir: {}".format(path)
|
||||
print(msg)
|
||||
return path
|
||||
|
||||
@staticmethod
|
||||
def generate_playbook(method, sub_playbook_dir):
|
||||
method_playbook_dir_path = method["dir"]
|
||||
|
@ -329,17 +318,6 @@ class BasePlaybookManager(BaseManager):
|
|||
yaml.safe_dump(plays, f)
|
||||
return sub_playbook_path
|
||||
|
||||
def on_assets_not_ansible_enabled(self, assets):
|
||||
for asset in assets:
|
||||
print("\t{}".format(asset))
|
||||
|
||||
def on_assets_not_method_enabled(self, assets, method_id):
|
||||
for asset in assets:
|
||||
print("\t{}".format(asset))
|
||||
|
||||
def on_playbook_not_found(self, assets):
|
||||
pass
|
||||
|
||||
def check_automation_enabled(self, platform, assets):
|
||||
if not platform.automation or not platform.automation.ansible_enabled:
|
||||
print(_(" - Platform {} ansible disabled").format(platform.name))
|
||||
|
@ -360,6 +338,67 @@ class BasePlaybookManager(BaseManager):
|
|||
|
||||
if not method_enabled:
|
||||
self.on_assets_not_method_enabled(assets, method_type)
|
||||
return False
|
||||
return True
|
||||
|
||||
def on_assets_not_ansible_enabled(self, assets):
|
||||
self.summary["error_assets"] += len(assets)
|
||||
self.result["error_assets"].extend([str(asset) for asset in assets])
|
||||
for asset in assets:
|
||||
print("\t{}".format(asset))
|
||||
|
||||
def on_assets_not_method_enabled(self, assets, method_type):
|
||||
self.summary["error_assets"] += len(assets)
|
||||
self.result["error_assets"].extend([str(asset) for asset in assets])
|
||||
for asset in assets:
|
||||
print("\t{}".format(asset))
|
||||
|
||||
def on_playbook_not_found(self, assets):
|
||||
print("Playbook generate failed")
|
||||
|
||||
|
||||
class BasePlaybookManager(PlaybookPrepareMixin, BaseManager):
|
||||
bulk_size = 100
|
||||
ansible_account_policy = "privileged_first"
|
||||
ansible_account_prefer = "root,Administrator"
|
||||
|
||||
def __init__(self, execution):
|
||||
super().__init__(execution)
|
||||
self.params = execution.snapshot.get("params", {})
|
||||
|
||||
def get_assets_group_by_platform(self):
|
||||
return self.execution.all_assets_group_by_platform()
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_runners_by_platform(self, platform, _assets, _index):
|
||||
sub_dir = "{}_{}".format(platform.name, _index)
|
||||
playbook_dir = os.path.join(self.runtime_dir, sub_dir)
|
||||
inventory_path = os.path.join(self.runtime_dir, sub_dir, "hosts.json")
|
||||
|
||||
method_id = getattr(
|
||||
platform.automation,
|
||||
"{}_method".format(self.__class__.method_type()),
|
||||
)
|
||||
method = self.method_id_meta_mapper.get(method_id)
|
||||
|
||||
protocol = method.get("protocol")
|
||||
self.generate_inventory(_assets, inventory_path, protocol)
|
||||
playbook_path = self.generate_playbook(method, playbook_dir)
|
||||
|
||||
if not playbook_path:
|
||||
self.on_playbook_not_found(_assets)
|
||||
return None, None
|
||||
|
||||
runner = SuperPlaybookRunner(
|
||||
inventory_path,
|
||||
playbook_path,
|
||||
self.runtime_dir,
|
||||
callback=PlaybookCallback(),
|
||||
)
|
||||
return runner, inventory_path
|
||||
|
||||
def get_runners(self):
|
||||
assets_group_by_platform = self.get_assets_group_by_platform()
|
||||
|
@ -369,10 +408,13 @@ class BasePlaybookManager(BaseManager):
|
|||
|
||||
runners = []
|
||||
for platform, assets in assets_group_by_platform.items():
|
||||
self.summary["total_assets"] += len(assets)
|
||||
if not assets:
|
||||
print("No assets for platform: {}".format(platform.name))
|
||||
continue
|
||||
|
||||
if not self.check_automation_enabled(platform, assets):
|
||||
print("Platform {} ansible disabled".format(platform.name))
|
||||
continue
|
||||
|
||||
# 避免一个任务太大,分批执行
|
||||
|
@ -381,45 +423,38 @@ class BasePlaybookManager(BaseManager):
|
|||
for i in range(0, len(assets), self.bulk_size)
|
||||
]
|
||||
for i, _assets in enumerate(assets_bulked, start=1):
|
||||
sub_dir = "{}_{}".format(platform.name, i)
|
||||
playbook_dir = os.path.join(self.runtime_dir, sub_dir)
|
||||
inventory_path = os.path.join(self.runtime_dir, sub_dir, "hosts.json")
|
||||
|
||||
# method_id = getattr(
|
||||
# platform.automation,
|
||||
# "{}_method".format(self.__class__.method_type()),
|
||||
# )
|
||||
method = self.method_id_meta_mapper.get(method_id)
|
||||
|
||||
protocol = method.get("protocol")
|
||||
self.generate_inventory(_assets, inventory_path, protocol)
|
||||
playbook_path = self.generate_playbook(method, playbook_dir)
|
||||
|
||||
if not playbook_path:
|
||||
self.on_playbook_not_found(_assets)
|
||||
continue
|
||||
|
||||
runer = SuperPlaybookRunner(
|
||||
inventory_path,
|
||||
playbook_path,
|
||||
self.runtime_dir,
|
||||
callback=PlaybookCallback(),
|
||||
runner, inventory_path = self.get_runners_by_platform(
|
||||
platform, _assets, i
|
||||
)
|
||||
|
||||
if not runner or not inventory_path:
|
||||
continue
|
||||
|
||||
with open(inventory_path, "r") as f:
|
||||
inventory_data = json.load(f)
|
||||
if not inventory_data["all"].get("hosts"):
|
||||
continue
|
||||
|
||||
runners.append(runer)
|
||||
runners.append(
|
||||
(
|
||||
runner,
|
||||
{
|
||||
"assets": _assets,
|
||||
"inventory": inventory_path,
|
||||
"platform": platform,
|
||||
},
|
||||
)
|
||||
)
|
||||
return runners
|
||||
|
||||
def on_host_success(self, host, result):
|
||||
pass
|
||||
self.summary["ok_assets"] += 1
|
||||
self.result["ok_assets"].append(host)
|
||||
|
||||
def on_host_error(self, host, error, result):
|
||||
if settings.DEBUG_DEV:
|
||||
print("host error: {} -> {}".format(host, error))
|
||||
self.summary["fail_assets"] += 1
|
||||
self.result["fail_assets"].append((host, str(error)))
|
||||
print(f"\033[31m {host} error: {error} \033[0m\n")
|
||||
|
||||
def _on_host_success(self, host, result, hosts):
|
||||
self.on_host_success(host, result.get("ok", ""))
|
||||
|
@ -445,7 +480,11 @@ class BasePlaybookManager(BaseManager):
|
|||
result = cb.host_results.get(host)
|
||||
handler(host, result, hosts)
|
||||
|
||||
def on_runner_failed(self, runner, e):
|
||||
def on_runner_failed(self, runner, e, assets=None, **kwargs):
|
||||
self.summary["fail_assets"] += len(assets)
|
||||
self.result["fail_assets"].extend(
|
||||
[(str(asset), str("e")[:10]) for asset in assets]
|
||||
)
|
||||
print("Runner failed: {} {}".format(e, self))
|
||||
|
||||
def delete_runtime_dir(self):
|
||||
|
@ -467,9 +506,11 @@ class BasePlaybookManager(BaseManager):
|
|||
else:
|
||||
print(_(">>> No tasks need to be executed"), end="\n")
|
||||
|
||||
for i, runner in enumerate(runners, start=1):
|
||||
for i, runner_info in enumerate(runners, start=1):
|
||||
if len(runners) > 1:
|
||||
print(_(">>> Begin executing batch {index} of tasks").format(index=i))
|
||||
|
||||
runner, info = runner_info
|
||||
ssh_tunnel = SSHTunnelManager()
|
||||
ssh_tunnel.local_gateway_prepare(runner)
|
||||
|
||||
|
@ -478,7 +519,7 @@ class BasePlaybookManager(BaseManager):
|
|||
cb = runner.run(**kwargs)
|
||||
self.on_runner_success(runner, cb)
|
||||
except Exception as e:
|
||||
self.on_runner_failed(runner, e)
|
||||
self.on_runner_failed(runner, e, **runner_info)
|
||||
finally:
|
||||
ssh_tunnel.local_gateway_clean(runner)
|
||||
print("\n")
|
||||
|
|
|
@ -74,8 +74,6 @@ def sorted_methods(methods):
|
|||
|
||||
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
platform_automation_methods = get_platform_automation_methods(BASE_DIR)
|
||||
print("platform_automation_methods: ")
|
||||
print(json.dumps(platform_automation_methods, indent=4))
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(json.dumps(platform_automation_methods, indent=4))
|
||||
|
|
Loading…
Reference in New Issue