2023-10-25 02:03:33 +00:00
|
|
|
|
import hashlib
|
2023-02-20 11:12:57 +00:00
|
|
|
|
import json
|
2022-10-09 12:54:11 +00:00
|
|
|
|
import os
|
2022-10-28 10:28:41 +00:00
|
|
|
|
import shutil
|
2024-11-18 11:06:04 +00:00
|
|
|
|
import time
|
|
|
|
|
from collections import defaultdict
|
2022-10-28 10:28:41 +00:00
|
|
|
|
from socket import gethostname
|
2022-10-09 12:54:11 +00:00
|
|
|
|
|
2023-04-13 11:02:04 +00:00
|
|
|
|
import yaml
|
2022-10-09 12:54:11 +00:00
|
|
|
|
from django.conf import settings
|
2024-11-18 11:06:04 +00:00
|
|
|
|
from django.template.loader import render_to_string
|
2022-10-09 12:54:11 +00:00
|
|
|
|
from django.utils import timezone
|
2022-10-12 10:08:57 +00:00
|
|
|
|
from django.utils.translation import gettext as _
|
2024-11-18 11:06:04 +00:00
|
|
|
|
from premailer import transform
|
2023-08-15 10:50:48 +00:00
|
|
|
|
from sshtunnel import SSHTunnelForwarder
|
2022-10-09 12:54:11 +00:00
|
|
|
|
|
2022-10-12 10:08:57 +00:00
|
|
|
|
from assets.automations.methods import platform_automation_methods
|
2024-11-18 11:06:04 +00:00
|
|
|
|
from common.db.utils import safe_db_connection
|
|
|
|
|
from common.tasks import send_mail_async
|
2023-03-08 10:52:00 +00:00
|
|
|
|
from common.utils import get_logger, lazyproperty, is_openssh_format_key, ssh_pubkey_gen
|
2024-04-22 05:51:52 +00:00
|
|
|
|
from ops.ansible import JMSInventory, DefaultCallback, SuperPlaybookRunner
|
|
|
|
|
from ops.ansible.interface import interface
|
2022-10-12 10:08:57 +00:00
|
|
|
|
|
|
|
|
|
logger = get_logger(__name__)
|
|
|
|
|
|
|
|
|
|
|
2023-11-27 03:22:34 +00:00
|
|
|
|
class SSHTunnelManager:
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
self.gateway_servers = dict()
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def file_to_json(path):
|
2024-11-20 11:12:28 +00:00
|
|
|
|
with open(path, "r") as f:
|
2023-11-27 03:22:34 +00:00
|
|
|
|
d = json.load(f)
|
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def json_to_file(path, data):
|
2024-11-20 11:12:28 +00:00
|
|
|
|
with open(path, "w") as f:
|
2023-11-27 03:22:34 +00:00
|
|
|
|
json.dump(data, f, indent=4, sort_keys=True)
|
|
|
|
|
|
|
|
|
|
def local_gateway_prepare(self, runner):
|
|
|
|
|
info = self.file_to_json(runner.inventory)
|
|
|
|
|
servers, not_valid = [], []
|
2024-11-20 11:12:28 +00:00
|
|
|
|
for k, host in info["all"]["hosts"].items():
|
|
|
|
|
jms_asset, jms_gateway = host.get("jms_asset"), host.get("jms_gateway")
|
2023-11-27 03:22:34 +00:00
|
|
|
|
if not jms_gateway:
|
|
|
|
|
continue
|
|
|
|
|
try:
|
|
|
|
|
server = SSHTunnelForwarder(
|
2024-11-20 11:12:28 +00:00
|
|
|
|
(jms_gateway["address"], jms_gateway["port"]),
|
|
|
|
|
ssh_username=jms_gateway["username"],
|
|
|
|
|
ssh_password=jms_gateway["secret"],
|
|
|
|
|
ssh_pkey=jms_gateway["private_key_path"],
|
|
|
|
|
remote_bind_address=(jms_asset["address"], jms_asset["port"]),
|
2023-11-27 03:22:34 +00:00
|
|
|
|
)
|
|
|
|
|
server.start()
|
|
|
|
|
except Exception as e:
|
2024-11-20 11:12:28 +00:00
|
|
|
|
err_msg = "Gateway is not active: %s" % jms_asset.get("name", "")
|
|
|
|
|
print(f"\033[31m {err_msg} 原因: {e} \033[0m\n")
|
2023-11-27 03:22:34 +00:00
|
|
|
|
not_valid.append(k)
|
|
|
|
|
else:
|
2023-12-20 08:02:13 +00:00
|
|
|
|
local_bind_port = server.local_bind_port
|
2024-04-22 05:51:52 +00:00
|
|
|
|
|
2024-11-20 11:12:28 +00:00
|
|
|
|
host["ansible_host"] = jms_asset["address"] = host["login_host"] = (
|
|
|
|
|
interface.get_gateway_proxy_host()
|
|
|
|
|
)
|
|
|
|
|
host["ansible_port"] = jms_asset["port"] = host["login_port"] = (
|
|
|
|
|
local_bind_port
|
|
|
|
|
)
|
2023-11-27 03:22:34 +00:00
|
|
|
|
servers.append(server)
|
|
|
|
|
|
|
|
|
|
# 网域不可连接的,就不继续执行此资源的后续任务了
|
|
|
|
|
for a in set(not_valid):
|
2024-11-20 11:12:28 +00:00
|
|
|
|
info["all"]["hosts"].pop(a)
|
2023-11-27 03:22:34 +00:00
|
|
|
|
self.json_to_file(runner.inventory, info)
|
|
|
|
|
self.gateway_servers[runner.id] = servers
|
|
|
|
|
|
|
|
|
|
def local_gateway_clean(self, runner):
|
|
|
|
|
servers = self.gateway_servers.get(runner.id, [])
|
|
|
|
|
for s in servers:
|
|
|
|
|
try:
|
|
|
|
|
s.stop()
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
2022-10-12 10:08:57 +00:00
|
|
|
|
class PlaybookCallback(DefaultCallback):
|
|
|
|
|
def playbook_on_stats(self, event_data, **kwargs):
|
|
|
|
|
super().playbook_on_stats(event_data, **kwargs)
|
2022-10-09 12:54:11 +00:00
|
|
|
|
|
|
|
|
|
|
2024-11-18 11:06:04 +00:00
|
|
|
|
class BaseManager:
|
|
|
|
|
def __init__(self, execution):
|
|
|
|
|
self.execution = execution
|
|
|
|
|
self.time_start = time.time()
|
|
|
|
|
self.summary = defaultdict(int)
|
|
|
|
|
self.result = defaultdict(list)
|
|
|
|
|
self.duration = 0
|
|
|
|
|
|
|
|
|
|
def get_assets_group_by_platform(self):
|
|
|
|
|
return self.execution.all_assets_group_by_platform()
|
|
|
|
|
|
2024-11-19 10:05:59 +00:00
|
|
|
|
def pre_run(self):
|
2024-11-18 11:06:04 +00:00
|
|
|
|
self.execution.date_start = timezone.now()
|
2024-11-20 11:12:28 +00:00
|
|
|
|
self.execution.save(update_fields=["date_start"])
|
2024-11-18 11:06:04 +00:00
|
|
|
|
|
|
|
|
|
def update_execution(self):
|
|
|
|
|
self.duration = int(time.time() - self.time_start)
|
|
|
|
|
self.execution.date_finished = timezone.now()
|
|
|
|
|
self.execution.duration = self.duration
|
|
|
|
|
self.execution.summary = self.summary
|
|
|
|
|
self.execution.result = self.result
|
2024-11-20 11:12:28 +00:00
|
|
|
|
self.execution.status = "success"
|
2024-11-18 11:06:04 +00:00
|
|
|
|
|
|
|
|
|
with safe_db_connection():
|
2024-11-19 10:05:59 +00:00
|
|
|
|
self.execution.save()
|
2024-11-18 11:06:04 +00:00
|
|
|
|
|
|
|
|
|
def print_summary(self):
|
2024-11-22 06:30:45 +00:00
|
|
|
|
content = "\nSummery: \n"
|
|
|
|
|
for k, v in self.summary.items():
|
|
|
|
|
content += f"\t - {k}: {v}\n"
|
|
|
|
|
content += "\t - Using: {}s\n".format(self.duration)
|
|
|
|
|
print(content)
|
2024-11-18 11:06:04 +00:00
|
|
|
|
|
2024-11-19 10:05:59 +00:00
|
|
|
|
def get_report_template(self):
|
2024-11-18 11:06:04 +00:00
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
2024-11-19 10:05:59 +00:00
|
|
|
|
def get_report_subject(self):
|
2024-11-20 11:12:28 +00:00
|
|
|
|
return f"Automation {self.execution.id} finished"
|
2024-11-19 10:05:59 +00:00
|
|
|
|
|
|
|
|
|
def get_report_context(self):
|
|
|
|
|
return {
|
2024-11-20 11:12:28 +00:00
|
|
|
|
"execution": self.execution,
|
|
|
|
|
"summary": self.execution.summary,
|
|
|
|
|
"result": self.execution.result,
|
2024-11-18 11:06:04 +00:00
|
|
|
|
}
|
2024-11-19 10:05:59 +00:00
|
|
|
|
|
|
|
|
|
def send_report_if_need(self):
|
|
|
|
|
recipients = self.execution.recipients
|
|
|
|
|
if not recipients:
|
|
|
|
|
return
|
2024-11-22 06:30:45 +00:00
|
|
|
|
print("Send report to: ", ",".join(recipients))
|
2024-11-19 10:05:59 +00:00
|
|
|
|
|
|
|
|
|
report = self.gen_report()
|
|
|
|
|
report = transform(report)
|
|
|
|
|
subject = self.get_report_subject()
|
|
|
|
|
emails = [r.email for r in recipients if r.email]
|
|
|
|
|
send_mail_async(subject, report, emails, html_message=report)
|
|
|
|
|
|
|
|
|
|
def gen_report(self):
|
|
|
|
|
template_path = self.get_report_template()
|
|
|
|
|
context = self.get_report_context()
|
2024-11-18 11:06:04 +00:00
|
|
|
|
data = render_to_string(template_path, context)
|
|
|
|
|
return data
|
|
|
|
|
|
2024-11-19 10:05:59 +00:00
|
|
|
|
def post_run(self):
|
2024-11-18 11:06:04 +00:00
|
|
|
|
self.update_execution()
|
|
|
|
|
self.print_summary()
|
|
|
|
|
self.send_report_if_need()
|
|
|
|
|
|
|
|
|
|
def run(self, *args, **kwargs):
|
2024-11-19 10:05:59 +00:00
|
|
|
|
self.pre_run()
|
2024-11-18 11:06:04 +00:00
|
|
|
|
self.do_run(*args, **kwargs)
|
2024-11-19 10:05:59 +00:00
|
|
|
|
self.post_run()
|
2024-11-18 11:06:04 +00:00
|
|
|
|
|
|
|
|
|
def do_run(self, *args, **kwargs):
|
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def json_dumps(data):
|
|
|
|
|
return json.dumps(data, indent=4, sort_keys=True)
|
|
|
|
|
|
|
|
|
|
|
2024-11-22 06:30:45 +00:00
|
|
|
|
class PlaybookPrepareMixin:
|
2024-11-20 11:12:28 +00:00
|
|
|
|
bulk_size = 100
|
|
|
|
|
ansible_account_policy = "privileged_first"
|
|
|
|
|
ansible_account_prefer = "root,Administrator"
|
|
|
|
|
|
2024-11-22 06:30:45 +00:00
|
|
|
|
summary: dict
|
|
|
|
|
result: dict
|
|
|
|
|
params: dict
|
|
|
|
|
execution = None
|
2024-11-20 11:12:28 +00:00
|
|
|
|
|
2024-11-22 06:30:45 +00:00
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
|
super().__init__(*args, **kwargs)
|
2024-11-20 11:12:28 +00:00
|
|
|
|
# example: {'gather_fact_windows': {'id': 'gather_fact_windows', 'name': '', 'method': 'gather_fact', ...} }
|
2022-10-12 10:08:57 +00:00
|
|
|
|
self.method_id_meta_mapper = {
|
2024-11-20 11:12:28 +00:00
|
|
|
|
method["id"]: method
|
2023-01-16 11:02:09 +00:00
|
|
|
|
for method in self.platform_automation_methods
|
2024-11-20 11:12:28 +00:00
|
|
|
|
if method["method"] == self.__class__.method_type()
|
2022-10-12 10:08:57 +00:00
|
|
|
|
}
|
|
|
|
|
# 根据执行方式就行分组, 不同资产的改密、推送等操作可能会使用不同的执行方式
|
|
|
|
|
# 然后根据执行方式分组, 再根据 bulk_size 分组, 生成不同的 playbook
|
|
|
|
|
self.playbooks = []
|
2024-11-22 06:30:45 +00:00
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def method_type(cls):
|
|
|
|
|
raise NotImplementedError
|
2023-04-13 11:02:04 +00:00
|
|
|
|
|
|
|
|
|
def get_params(self, automation, method_type):
|
2024-11-20 11:12:28 +00:00
|
|
|
|
method_attr = "{}_method".format(method_type)
|
|
|
|
|
method_params = "{}_params".format(method_type)
|
2023-04-13 11:02:04 +00:00
|
|
|
|
method_id = getattr(automation, method_attr)
|
|
|
|
|
automation_params = getattr(automation, method_params)
|
2024-11-20 11:12:28 +00:00
|
|
|
|
serializer = self.method_id_meta_mapper[method_id]["params_serializer"]
|
2023-04-13 11:02:04 +00:00
|
|
|
|
|
|
|
|
|
if serializer is None:
|
|
|
|
|
return {}
|
|
|
|
|
|
2023-04-14 10:31:09 +00:00
|
|
|
|
data = self.params.get(method_id)
|
|
|
|
|
if not data:
|
2023-07-05 06:28:26 +00:00
|
|
|
|
data = automation_params.get(method_id, {})
|
2023-04-13 11:02:04 +00:00
|
|
|
|
params = serializer(data).data
|
2024-07-22 09:04:32 +00:00
|
|
|
|
return params
|
2022-10-12 10:08:57 +00:00
|
|
|
|
|
2023-01-16 11:02:09 +00:00
|
|
|
|
@property
|
|
|
|
|
def platform_automation_methods(self):
|
|
|
|
|
return platform_automation_methods
|
|
|
|
|
|
2023-02-22 07:13:51 +00:00
|
|
|
|
def prepare_runtime_dir(self):
|
2022-10-09 12:54:11 +00:00
|
|
|
|
ansible_dir = settings.ANSIBLE_DIR
|
2024-11-20 11:12:28 +00:00
|
|
|
|
task_name = self.execution.snapshot["name"]
|
|
|
|
|
dir_name = "{}_{}".format(task_name.replace(" ", "_"), self.execution.id)
|
2022-10-09 12:54:11 +00:00
|
|
|
|
path = os.path.join(
|
2024-11-20 11:12:28 +00:00
|
|
|
|
ansible_dir,
|
|
|
|
|
"automations",
|
|
|
|
|
self.execution.snapshot["type"],
|
|
|
|
|
dir_name,
|
|
|
|
|
timezone.now().strftime("%Y%m%d_%H%M%S"),
|
2022-10-09 12:54:11 +00:00
|
|
|
|
)
|
2022-10-13 12:28:18 +00:00
|
|
|
|
if not os.path.exists(path):
|
|
|
|
|
os.makedirs(path, exist_ok=True, mode=0o755)
|
2023-02-22 07:13:51 +00:00
|
|
|
|
return path
|
|
|
|
|
|
2024-11-22 06:30:45 +00:00
|
|
|
|
def host_callback(self, host, automation=None, **kwargs):
|
|
|
|
|
method_type = self.__class__.method_type()
|
|
|
|
|
host = self.convert_cert_to_file(host, kwargs.get("path_dir"))
|
|
|
|
|
host["params"] = self.get_params(automation, method_type)
|
|
|
|
|
return host
|
2022-10-09 12:54:11 +00:00
|
|
|
|
|
2023-01-29 08:25:16 +00:00
|
|
|
|
@staticmethod
|
|
|
|
|
def write_cert_to_file(filename, content):
|
2024-11-20 11:12:28 +00:00
|
|
|
|
with open(filename, "w") as f:
|
2023-01-29 08:25:16 +00:00
|
|
|
|
f.write(content)
|
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
|
def convert_cert_to_file(self, host, path_dir):
|
|
|
|
|
if not path_dir:
|
|
|
|
|
return host
|
|
|
|
|
|
2024-11-20 11:12:28 +00:00
|
|
|
|
specific = host.get("jms_asset", {}).get("secret_info", {})
|
|
|
|
|
cert_fields = ("ca_cert", "client_key", "client_cert")
|
2023-01-29 08:25:16 +00:00
|
|
|
|
filtered = list(filter(lambda x: specific.get(x), cert_fields))
|
|
|
|
|
if not filtered:
|
|
|
|
|
return host
|
|
|
|
|
|
2024-11-20 11:12:28 +00:00
|
|
|
|
cert_dir = os.path.join(path_dir, "certs")
|
2023-01-29 08:25:16 +00:00
|
|
|
|
if not os.path.exists(cert_dir):
|
|
|
|
|
os.makedirs(cert_dir, 0o700, True)
|
|
|
|
|
|
|
|
|
|
for f in filtered:
|
2024-11-20 11:12:28 +00:00
|
|
|
|
result = self.write_cert_to_file(os.path.join(cert_dir, f), specific.get(f))
|
|
|
|
|
host["jms_asset"]["secret_info"][f] = result
|
2023-01-29 08:25:16 +00:00
|
|
|
|
return host
|
|
|
|
|
|
2022-10-28 10:28:41 +00:00
|
|
|
|
@staticmethod
|
|
|
|
|
def generate_public_key(private_key):
|
|
|
|
|
return ssh_pubkey_gen(private_key=private_key, hostname=gethostname())
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def generate_private_key_path(secret, path_dir):
|
2024-11-20 11:12:28 +00:00
|
|
|
|
key_name = "." + hashlib.md5(secret.encode("utf-8")).hexdigest()
|
2022-10-28 10:28:41 +00:00
|
|
|
|
key_path = os.path.join(path_dir, key_name)
|
2022-11-01 03:52:51 +00:00
|
|
|
|
|
2022-10-28 10:28:41 +00:00
|
|
|
|
if not os.path.exists(key_path):
|
2023-03-02 10:50:09 +00:00
|
|
|
|
# https://github.com/ansible/ansible-runner/issues/544
|
|
|
|
|
# ssh requires OpenSSH format keys to have a full ending newline.
|
|
|
|
|
# It does not require this for old-style PEM keys.
|
2024-11-20 11:12:28 +00:00
|
|
|
|
with open(key_path, "w") as f:
|
2023-03-02 10:50:09 +00:00
|
|
|
|
f.write(secret)
|
2024-11-20 11:12:28 +00:00
|
|
|
|
if is_openssh_format_key(secret.encode("utf-8")):
|
2023-03-02 10:50:09 +00:00
|
|
|
|
f.write("\n")
|
2022-10-28 10:28:41 +00:00
|
|
|
|
os.chmod(key_path, 0o400)
|
|
|
|
|
return key_path
|
|
|
|
|
|
2023-12-20 08:02:13 +00:00
|
|
|
|
def generate_inventory(self, platformed_assets, inventory_path, protocol):
|
2022-10-09 12:54:11 +00:00
|
|
|
|
inventory = JMSInventory(
|
2022-10-13 12:28:18 +00:00
|
|
|
|
assets=platformed_assets,
|
2023-02-10 06:07:20 +00:00
|
|
|
|
account_prefer=self.ansible_account_prefer,
|
2022-10-09 12:54:11 +00:00
|
|
|
|
account_policy=self.ansible_account_policy,
|
2022-10-28 10:28:41 +00:00
|
|
|
|
host_callback=self.host_callback,
|
2023-04-26 10:50:30 +00:00
|
|
|
|
task_type=self.__class__.method_type(),
|
2023-12-20 08:02:13 +00:00
|
|
|
|
protocol=protocol,
|
2022-10-09 12:54:11 +00:00
|
|
|
|
)
|
2022-10-13 12:28:18 +00:00
|
|
|
|
inventory.write_to_file(inventory_path)
|
|
|
|
|
|
2024-11-22 06:30:45 +00:00
|
|
|
|
@lazyproperty
|
|
|
|
|
def runtime_dir(self):
|
|
|
|
|
path = self.prepare_runtime_dir()
|
|
|
|
|
if settings.DEBUG_DEV:
|
|
|
|
|
msg = "Ansible runtime dir: {}".format(path)
|
|
|
|
|
print(msg)
|
|
|
|
|
return path
|
|
|
|
|
|
2023-12-20 08:02:13 +00:00
|
|
|
|
@staticmethod
|
|
|
|
|
def generate_playbook(method, sub_playbook_dir):
|
2024-11-20 11:12:28 +00:00
|
|
|
|
method_playbook_dir_path = method["dir"]
|
|
|
|
|
sub_playbook_path = os.path.join(sub_playbook_dir, "project", "main.yml")
|
2022-10-13 12:28:18 +00:00
|
|
|
|
shutil.copytree(method_playbook_dir_path, os.path.dirname(sub_playbook_path))
|
|
|
|
|
|
2024-11-20 11:12:28 +00:00
|
|
|
|
with open(sub_playbook_path, "r") as f:
|
2022-10-13 12:28:18 +00:00
|
|
|
|
plays = yaml.safe_load(f)
|
|
|
|
|
for play in plays:
|
2024-11-20 11:12:28 +00:00
|
|
|
|
play["hosts"] = "all"
|
2022-10-13 12:28:18 +00:00
|
|
|
|
|
2024-11-20 11:12:28 +00:00
|
|
|
|
with open(sub_playbook_path, "w") as f:
|
2022-10-13 12:28:18 +00:00
|
|
|
|
yaml.safe_dump(plays, f)
|
|
|
|
|
return sub_playbook_path
|
2022-10-12 10:08:57 +00:00
|
|
|
|
|
2024-11-20 11:12:28 +00:00
|
|
|
|
def check_automation_enabled(self, platform, assets):
|
|
|
|
|
if not platform.automation or not platform.automation.ansible_enabled:
|
|
|
|
|
print(_(" - Platform {} ansible disabled").format(platform.name))
|
|
|
|
|
self.on_assets_not_ansible_enabled(assets)
|
|
|
|
|
|
|
|
|
|
automation = platform.automation
|
|
|
|
|
|
|
|
|
|
method_type = self.__class__.method_type()
|
|
|
|
|
enabled_attr = "{}_enabled".format(method_type)
|
|
|
|
|
method_attr = "{}_method".format(method_type)
|
|
|
|
|
|
|
|
|
|
method_enabled = (
|
|
|
|
|
automation
|
|
|
|
|
and getattr(automation, enabled_attr)
|
|
|
|
|
and getattr(automation, method_attr)
|
|
|
|
|
and getattr(automation, method_attr) in self.method_id_meta_mapper
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if not method_enabled:
|
|
|
|
|
self.on_assets_not_method_enabled(assets, method_type)
|
2024-11-22 06:30:45 +00:00
|
|
|
|
return False
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
def on_assets_not_ansible_enabled(self, assets):
|
|
|
|
|
self.summary["error_assets"] += len(assets)
|
|
|
|
|
self.result["error_assets"].extend([str(asset) for asset in assets])
|
|
|
|
|
for asset in assets:
|
|
|
|
|
print("\t{}".format(asset))
|
|
|
|
|
|
|
|
|
|
def on_assets_not_method_enabled(self, assets, method_type):
|
|
|
|
|
self.summary["error_assets"] += len(assets)
|
|
|
|
|
self.result["error_assets"].extend([str(asset) for asset in assets])
|
|
|
|
|
for asset in assets:
|
|
|
|
|
print("\t{}".format(asset))
|
|
|
|
|
|
|
|
|
|
def on_playbook_not_found(self, assets):
|
|
|
|
|
print("Playbook generate failed")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class BasePlaybookManager(PlaybookPrepareMixin, BaseManager):
|
|
|
|
|
bulk_size = 100
|
|
|
|
|
ansible_account_policy = "privileged_first"
|
|
|
|
|
ansible_account_prefer = "root,Administrator"
|
|
|
|
|
|
|
|
|
|
def __init__(self, execution):
|
|
|
|
|
super().__init__(execution)
|
|
|
|
|
self.params = execution.snapshot.get("params", {})
|
|
|
|
|
|
|
|
|
|
def get_assets_group_by_platform(self):
|
|
|
|
|
return self.execution.all_assets_group_by_platform()
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def method_type(cls):
|
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
|
|
def get_runners_by_platform(self, platform, _assets, _index):
|
|
|
|
|
sub_dir = "{}_{}".format(platform.name, _index)
|
|
|
|
|
playbook_dir = os.path.join(self.runtime_dir, sub_dir)
|
|
|
|
|
inventory_path = os.path.join(self.runtime_dir, sub_dir, "hosts.json")
|
|
|
|
|
|
|
|
|
|
method_id = getattr(
|
|
|
|
|
platform.automation,
|
|
|
|
|
"{}_method".format(self.__class__.method_type()),
|
|
|
|
|
)
|
|
|
|
|
method = self.method_id_meta_mapper.get(method_id)
|
|
|
|
|
|
|
|
|
|
protocol = method.get("protocol")
|
|
|
|
|
self.generate_inventory(_assets, inventory_path, protocol)
|
|
|
|
|
playbook_path = self.generate_playbook(method, playbook_dir)
|
|
|
|
|
|
|
|
|
|
if not playbook_path:
|
|
|
|
|
self.on_playbook_not_found(_assets)
|
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
|
|
runner = SuperPlaybookRunner(
|
|
|
|
|
inventory_path,
|
|
|
|
|
playbook_path,
|
|
|
|
|
self.runtime_dir,
|
|
|
|
|
callback=PlaybookCallback(),
|
|
|
|
|
)
|
|
|
|
|
return runner, inventory_path
|
2024-11-20 11:12:28 +00:00
|
|
|
|
|
2022-10-12 10:08:57 +00:00
|
|
|
|
def get_runners(self):
|
2023-02-20 12:14:06 +00:00
|
|
|
|
assets_group_by_platform = self.get_assets_group_by_platform()
|
2023-02-22 03:18:42 +00:00
|
|
|
|
if settings.DEBUG_DEV:
|
2024-11-20 11:12:28 +00:00
|
|
|
|
msg = "Assets group by platform: {}".format(dict(assets_group_by_platform))
|
2023-02-22 07:13:51 +00:00
|
|
|
|
print(msg)
|
2024-11-18 11:06:04 +00:00
|
|
|
|
|
2022-10-12 10:08:57 +00:00
|
|
|
|
runners = []
|
2023-02-20 12:14:06 +00:00
|
|
|
|
for platform, assets in assets_group_by_platform.items():
|
2024-11-22 06:30:45 +00:00
|
|
|
|
self.summary["total_assets"] += len(assets)
|
2023-09-20 03:12:15 +00:00
|
|
|
|
if not assets:
|
2024-11-22 06:30:45 +00:00
|
|
|
|
print("No assets for platform: {}".format(platform.name))
|
2023-09-20 03:12:15 +00:00
|
|
|
|
continue
|
2024-11-20 11:12:28 +00:00
|
|
|
|
|
|
|
|
|
if not self.check_automation_enabled(platform, assets):
|
2024-11-22 06:30:45 +00:00
|
|
|
|
print("Platform {} ansible disabled".format(platform.name))
|
2023-09-20 03:12:15 +00:00
|
|
|
|
continue
|
2022-10-13 12:28:18 +00:00
|
|
|
|
|
2024-11-20 11:12:28 +00:00
|
|
|
|
# 避免一个任务太大,分批执行
|
|
|
|
|
assets_bulked = [
|
|
|
|
|
assets[i : i + self.bulk_size]
|
|
|
|
|
for i in range(0, len(assets), self.bulk_size)
|
|
|
|
|
]
|
2022-10-13 12:28:18 +00:00
|
|
|
|
for i, _assets in enumerate(assets_bulked, start=1):
|
2024-11-22 06:30:45 +00:00
|
|
|
|
runner, inventory_path = self.get_runners_by_platform(
|
|
|
|
|
platform, _assets, i
|
2022-10-13 12:28:18 +00:00
|
|
|
|
)
|
2023-02-21 07:33:15 +00:00
|
|
|
|
|
2024-11-22 06:30:45 +00:00
|
|
|
|
if not runner or not inventory_path:
|
|
|
|
|
continue
|
|
|
|
|
|
2024-11-20 11:12:28 +00:00
|
|
|
|
with open(inventory_path, "r") as f:
|
2023-02-21 07:33:15 +00:00
|
|
|
|
inventory_data = json.load(f)
|
2024-11-20 11:12:28 +00:00
|
|
|
|
if not inventory_data["all"].get("hosts"):
|
2023-02-21 07:33:15 +00:00
|
|
|
|
continue
|
|
|
|
|
|
2024-11-22 06:30:45 +00:00
|
|
|
|
runners.append(
|
|
|
|
|
(
|
|
|
|
|
runner,
|
|
|
|
|
{
|
|
|
|
|
"assets": _assets,
|
|
|
|
|
"inventory": inventory_path,
|
|
|
|
|
"platform": platform,
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
)
|
2022-10-12 10:08:57 +00:00
|
|
|
|
return runners
|
|
|
|
|
|
2022-10-14 08:33:24 +00:00
|
|
|
|
def on_host_success(self, host, result):
|
2024-11-22 06:30:45 +00:00
|
|
|
|
self.summary["ok_assets"] += 1
|
|
|
|
|
self.result["ok_assets"].append(host)
|
2022-10-14 08:33:24 +00:00
|
|
|
|
|
|
|
|
|
def on_host_error(self, host, error, result):
|
2024-11-22 06:30:45 +00:00
|
|
|
|
self.summary["fail_assets"] += 1
|
|
|
|
|
self.result["fail_assets"].append((host, str(error)))
|
|
|
|
|
print(f"\033[31m {host} error: {error} \033[0m\n")
|
2022-10-14 08:33:24 +00:00
|
|
|
|
|
2024-11-19 10:05:59 +00:00
|
|
|
|
def _on_host_success(self, host, result, hosts):
|
2024-11-20 11:12:28 +00:00
|
|
|
|
self.on_host_success(host, result.get("ok", ""))
|
2024-11-18 11:06:04 +00:00
|
|
|
|
|
2024-11-19 10:05:59 +00:00
|
|
|
|
def _on_host_error(self, host, result, hosts):
|
2024-11-20 11:12:28 +00:00
|
|
|
|
error = hosts.get(host, "")
|
|
|
|
|
detail = result.get("failures", "") or result.get("dark", "")
|
2024-11-18 11:06:04 +00:00
|
|
|
|
self.on_host_error(host, error, detail)
|
|
|
|
|
|
2022-10-13 12:28:18 +00:00
|
|
|
|
def on_runner_success(self, runner, cb):
|
2022-10-14 08:33:24 +00:00
|
|
|
|
summary = cb.summary
|
|
|
|
|
for state, hosts in summary.items():
|
2024-11-19 10:05:59 +00:00
|
|
|
|
# 错误行为为,host 是 dict, ok 时是 list
|
|
|
|
|
|
2024-11-20 11:12:28 +00:00
|
|
|
|
if state == "ok":
|
2024-11-18 11:06:04 +00:00
|
|
|
|
handler = self._on_host_success
|
2024-11-20 11:12:28 +00:00
|
|
|
|
elif state == "skipped":
|
2024-11-18 11:06:04 +00:00
|
|
|
|
continue
|
|
|
|
|
else:
|
|
|
|
|
handler = self._on_host_error
|
|
|
|
|
|
2022-10-14 08:33:24 +00:00
|
|
|
|
for host in hosts:
|
2022-10-17 03:22:21 +00:00
|
|
|
|
result = cb.host_results.get(host)
|
2024-11-19 10:05:59 +00:00
|
|
|
|
handler(host, result, hosts)
|
2022-10-09 12:54:11 +00:00
|
|
|
|
|
2024-11-22 06:30:45 +00:00
|
|
|
|
def on_runner_failed(self, runner, e, assets=None, **kwargs):
|
|
|
|
|
self.summary["fail_assets"] += len(assets)
|
|
|
|
|
self.result["fail_assets"].extend(
|
|
|
|
|
[(str(asset), str("e")[:10]) for asset in assets]
|
|
|
|
|
)
|
2022-10-12 10:08:57 +00:00
|
|
|
|
print("Runner failed: {} {}".format(e, self))
|
2022-10-09 12:54:11 +00:00
|
|
|
|
|
2023-02-22 07:13:51 +00:00
|
|
|
|
def delete_runtime_dir(self):
|
2023-02-20 11:12:57 +00:00
|
|
|
|
if settings.DEBUG_DEV:
|
|
|
|
|
return
|
2024-04-12 03:31:09 +00:00
|
|
|
|
shutil.rmtree(self.runtime_dir, ignore_errors=True)
|
2023-02-20 11:12:57 +00:00
|
|
|
|
|
2024-11-18 11:06:04 +00:00
|
|
|
|
def do_run(self, *args, **kwargs):
|
2024-05-22 10:18:56 +00:00
|
|
|
|
print(_(">>> Task preparation phase"), end="\n")
|
2022-10-12 10:08:57 +00:00
|
|
|
|
runners = self.get_runners()
|
|
|
|
|
if len(runners) > 1:
|
2024-11-20 11:12:28 +00:00
|
|
|
|
print(
|
|
|
|
|
_(">>> Executing tasks in batches, total {runner_count}").format(
|
|
|
|
|
runner_count=len(runners)
|
|
|
|
|
)
|
|
|
|
|
)
|
2023-02-10 06:37:55 +00:00
|
|
|
|
elif len(runners) == 1:
|
2024-05-22 10:18:56 +00:00
|
|
|
|
print(_(">>> Start executing tasks"))
|
2023-02-10 06:37:55 +00:00
|
|
|
|
else:
|
2024-05-22 10:18:56 +00:00
|
|
|
|
print(_(">>> No tasks need to be executed"), end="\n")
|
2022-10-12 10:08:57 +00:00
|
|
|
|
|
2024-11-22 06:30:45 +00:00
|
|
|
|
for i, runner_info in enumerate(runners, start=1):
|
2022-10-12 10:08:57 +00:00
|
|
|
|
if len(runners) > 1:
|
2024-05-22 10:18:56 +00:00
|
|
|
|
print(_(">>> Begin executing batch {index} of tasks").format(index=i))
|
2024-11-22 06:30:45 +00:00
|
|
|
|
|
|
|
|
|
runner, info = runner_info
|
2023-11-27 03:22:34 +00:00
|
|
|
|
ssh_tunnel = SSHTunnelManager()
|
|
|
|
|
ssh_tunnel.local_gateway_prepare(runner)
|
2024-11-18 11:06:04 +00:00
|
|
|
|
|
2022-10-12 10:08:57 +00:00
|
|
|
|
try:
|
2024-04-18 04:39:23 +00:00
|
|
|
|
kwargs.update({"clean_workspace": False})
|
2022-10-12 10:08:57 +00:00
|
|
|
|
cb = runner.run(**kwargs)
|
2022-10-13 12:28:18 +00:00
|
|
|
|
self.on_runner_success(runner, cb)
|
2022-10-12 10:08:57 +00:00
|
|
|
|
except Exception as e:
|
2024-11-22 06:30:45 +00:00
|
|
|
|
self.on_runner_failed(runner, e, **runner_info)
|
2023-02-21 07:33:15 +00:00
|
|
|
|
finally:
|
2023-11-27 03:22:34 +00:00
|
|
|
|
ssh_tunnel.local_gateway_clean(runner)
|
2024-11-20 11:12:28 +00:00
|
|
|
|
print("\n")
|