mirror of https://github.com/jumpserver/jumpserver
perf: gather asset info and test asset connectivity
parent
1c9f754e27
commit
0e5ebfad1c
|
@ -12,7 +12,8 @@ from orgs.mixins import generics
|
||||||
from assets import serializers
|
from assets import serializers
|
||||||
from assets.models import Asset, Gateway
|
from assets.models import Asset, Gateway
|
||||||
from assets.tasks import (
|
from assets.tasks import (
|
||||||
update_assets_hardware_info_manual, test_assets_connectivity_manual,
|
test_assets_connectivity_manual,
|
||||||
|
update_assets_hardware_info_manual,
|
||||||
)
|
)
|
||||||
from assets.filters import NodeFilterBackend, LabelFilterBackend, IpInFilterBackend
|
from assets.filters import NodeFilterBackend, LabelFilterBackend, IpInFilterBackend
|
||||||
from ..mixin import NodeFilterMixin
|
from ..mixin import NodeFilterMixin
|
||||||
|
@ -78,12 +79,10 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
||||||
class AssetsTaskMixin:
|
class AssetsTaskMixin:
|
||||||
def perform_assets_task(self, serializer):
|
def perform_assets_task(self, serializer):
|
||||||
data = serializer.validated_data
|
data = serializer.validated_data
|
||||||
action = data['action']
|
|
||||||
assets = data.get('assets', [])
|
assets = data.get('assets', [])
|
||||||
if action == "refresh":
|
if data['action'] == "refresh":
|
||||||
task = update_assets_hardware_info_manual.delay(assets)
|
task = update_assets_hardware_info_manual.delay(assets)
|
||||||
else:
|
else:
|
||||||
# action == 'test':
|
|
||||||
task = test_assets_connectivity_manual.delay(assets)
|
task = test_assets_connectivity_manual.delay(assets)
|
||||||
return task
|
return task
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,11 @@ from collections import namedtuple, defaultdict
|
||||||
from django.core.exceptions import PermissionDenied
|
from django.core.exceptions import PermissionDenied
|
||||||
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
from rest_framework.generics import get_object_or_404
|
||||||
from rest_framework.serializers import ValidationError
|
from rest_framework.serializers import ValidationError
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.decorators import action
|
from rest_framework.decorators import action
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
from django.shortcuts import get_object_or_404, Http404
|
|
||||||
from django.db.models.signals import m2m_changed
|
from django.db.models.signals import m2m_changed
|
||||||
|
|
||||||
from common.const.http import POST
|
from common.const.http import POST
|
||||||
|
@ -16,7 +16,7 @@ from common.exceptions import SomeoneIsDoingThis
|
||||||
from common.const.signals import PRE_REMOVE, POST_REMOVE
|
from common.const.signals import PRE_REMOVE, POST_REMOVE
|
||||||
from common.mixins.api import SuggestionMixin
|
from common.mixins.api import SuggestionMixin
|
||||||
from assets.models import Asset
|
from assets.models import Asset
|
||||||
from common.utils import get_logger, get_object_or_none
|
from common.utils import get_logger
|
||||||
from common.tree import TreeNodeSerializer
|
from common.tree import TreeNodeSerializer
|
||||||
from orgs.mixins.api import OrgBulkModelViewSet
|
from orgs.mixins.api import OrgBulkModelViewSet
|
||||||
from orgs.mixins import generics
|
from orgs.mixins import generics
|
||||||
|
@ -339,7 +339,7 @@ class NodeTaskCreateApi(generics.CreateAPIView):
|
||||||
|
|
||||||
def get_object(self):
|
def get_object(self):
|
||||||
node_id = self.kwargs.get('pk')
|
node_id = self.kwargs.get('pk')
|
||||||
node = get_object_or_none(self.model, id=node_id)
|
node = get_object_or_404(self.model, id=node_id)
|
||||||
return node
|
return node
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -361,8 +361,6 @@ class NodeTaskCreateApi(generics.CreateAPIView):
|
||||||
task = self.refresh_nodes_cache()
|
task = self.refresh_nodes_cache()
|
||||||
self.set_serializer_data(serializer, task)
|
self.set_serializer_data(serializer, task)
|
||||||
return
|
return
|
||||||
if node is None:
|
|
||||||
raise Http404()
|
|
||||||
if action == "refresh":
|
if action == "refresh":
|
||||||
task = update_node_assets_hardware_info_manual.delay(node)
|
task = update_node_assets_hardware_info_manual.delay(node)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from assets.const import AutomationTypes
|
from assets.const import AutomationTypes, Connectivity
|
||||||
from ..base.manager import BasePlaybookManager
|
from ..base.manager import BasePlaybookManager
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
@ -8,13 +8,27 @@ logger = get_logger(__name__)
|
||||||
class PingManager(BasePlaybookManager):
|
class PingManager(BasePlaybookManager):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self.host_asset_mapper = {}
|
self.host_asset_and_account_mapper = {}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def method_type(cls):
|
def method_type(cls):
|
||||||
return AutomationTypes.ping
|
return AutomationTypes.ping
|
||||||
|
|
||||||
def host_callback(self, host, asset=None, **kwargs):
|
def host_callback(self, host, asset=None, account=None, **kwargs):
|
||||||
super().host_callback(host, asset=asset, **kwargs)
|
super().host_callback(host, asset=asset, account=account, **kwargs)
|
||||||
self.host_asset_mapper[host['name']] = asset
|
self.host_asset_and_account_mapper[host['name']] = (asset, account)
|
||||||
return host
|
return host
|
||||||
|
|
||||||
|
def on_host_success(self, host, result):
|
||||||
|
asset, account = self.host_asset_and_account_mapper.get(host)
|
||||||
|
asset.set_connectivity(Connectivity.ok)
|
||||||
|
if not account:
|
||||||
|
return
|
||||||
|
account.set_connectivity(Connectivity.ok)
|
||||||
|
|
||||||
|
def on_host_error(self, host, error, result):
|
||||||
|
asset, account = self.host_asset_and_account_mapper.get(host)
|
||||||
|
asset.set_connectivity(Connectivity.failed)
|
||||||
|
if not account:
|
||||||
|
return
|
||||||
|
account.set_connectivity(Connectivity.failed)
|
||||||
|
|
|
@ -26,4 +26,16 @@ class Migration(migrations.Migration):
|
||||||
name='type',
|
name='type',
|
||||||
field=models.CharField(choices=[('ping', 'Ping'), ('gather_facts', 'Gather facts'), ('push_account', 'Create account'), ('change_secret', 'Change secret'), ('verify_account', 'Verify account'), ('gather_accounts', 'Gather accounts')], max_length=16, verbose_name='Type'),
|
field=models.CharField(choices=[('ping', 'Ping'), ('gather_facts', 'Gather facts'), ('push_account', 'Create account'), ('change_secret', 'Change secret'), ('verify_account', 'Verify account'), ('gather_accounts', 'Gather accounts')], max_length=16, verbose_name='Type'),
|
||||||
),
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='PingAutomation',
|
||||||
|
fields=[
|
||||||
|
('baseautomation_ptr',
|
||||||
|
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
|
||||||
|
primary_key=True, serialize=False, to='assets.baseautomation')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Ping asset',
|
||||||
|
},
|
||||||
|
bases=('assets.baseautomation',),
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -4,3 +4,4 @@ from .push_account import *
|
||||||
from .gather_facts import *
|
from .gather_facts import *
|
||||||
from .gather_accounts import *
|
from .gather_accounts import *
|
||||||
from .verify_account import *
|
from .verify_account import *
|
||||||
|
from .ping import *
|
||||||
|
|
|
@ -28,6 +28,15 @@ class BaseAutomation(CommonModelMixin, PeriodTaskModelMixin, OrgModelMixin):
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name + '@' + str(self.created_by)
|
return self.name + '@' + str(self.created_by)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def generate_unique_name(cls, name):
|
||||||
|
while True:
|
||||||
|
name = name + str(uuid.uuid4())[:8]
|
||||||
|
try:
|
||||||
|
cls.objects.get(name=name)
|
||||||
|
except cls.DoesNotExist:
|
||||||
|
return name
|
||||||
|
|
||||||
def get_all_assets(self):
|
def get_all_assets(self):
|
||||||
nodes = self.nodes.all()
|
nodes = self.nodes.all()
|
||||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
|
from assets.const import AutomationTypes
|
||||||
|
from .base import BaseAutomation
|
||||||
|
|
||||||
|
__all__ = ['PingAutomation']
|
||||||
|
|
||||||
|
|
||||||
|
class PingAutomation(BaseAutomation):
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
self.type = AutomationTypes.ping
|
||||||
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _("Ping asset")
|
|
@ -19,14 +19,12 @@ logger = get_logger(__file__)
|
||||||
|
|
||||||
def update_asset_hardware_info_on_created(asset):
|
def update_asset_hardware_info_on_created(asset):
|
||||||
logger.debug("Update asset `{}` hardware info".format(asset))
|
logger.debug("Update asset `{}` hardware info".format(asset))
|
||||||
# Todo:
|
update_assets_hardware_info_util.delay([asset])
|
||||||
# update_assets_hardware_info_util.delay([asset])
|
|
||||||
|
|
||||||
|
|
||||||
def test_asset_conn_on_created(asset):
|
def test_asset_conn_on_created(asset):
|
||||||
logger.debug("Test asset `{}` connectivity".format(asset))
|
logger.debug("Test asset `{}` connectivity".format(asset))
|
||||||
# Todo:
|
test_asset_connectivity_util.delay([asset])
|
||||||
# test_asset_connectivity_util.delay([asset])
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_save, sender=Node)
|
@receiver(pre_save, sender=Node)
|
||||||
|
|
|
@ -1,124 +1,50 @@
|
||||||
# ~*~ coding: utf-8 ~*~
|
# ~*~ coding: utf-8 ~*~
|
||||||
from itertools import groupby
|
|
||||||
from collections import defaultdict
|
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from django.utils.translation import gettext_noop
|
from django.utils.translation import gettext_noop
|
||||||
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
from orgs.utils import org_aware_func, tmp_to_root_org
|
from orgs.utils import org_aware_func, tmp_to_root_org
|
||||||
from ..models import Asset, Connectivity, Account, Node
|
|
||||||
from . import const
|
|
||||||
from .utils import clean_ansible_task_hosts, group_asset_by_platform
|
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'test_asset_connectivity_util', 'test_asset_connectivity_manual',
|
'test_asset_connectivity_util',
|
||||||
'test_node_assets_connectivity_manual', 'test_assets_connectivity_manual',
|
'test_assets_connectivity_manual',
|
||||||
|
'test_node_assets_connectivity_manual',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
# Todo: 这里可能有问题了
|
|
||||||
def set_assets_accounts_connectivity(assets, results_summary):
|
|
||||||
asset_ids_ok = set()
|
|
||||||
asset_ids_failed = set()
|
|
||||||
|
|
||||||
asset_hostnames_ok = results_summary.get('contacted', {}).keys()
|
|
||||||
|
|
||||||
for asset in assets:
|
|
||||||
if asset.name in asset_hostnames_ok:
|
|
||||||
asset_ids_ok.add(asset.id)
|
|
||||||
else:
|
|
||||||
asset_ids_failed.add(asset.id)
|
|
||||||
|
|
||||||
Asset.bulk_set_connectivity(asset_ids_ok, Connectivity.ok)
|
|
||||||
Asset.bulk_set_connectivity(asset_ids_failed, Connectivity.failed)
|
|
||||||
|
|
||||||
accounts_ok = Account.objects.filter(asset_id__in=asset_ids_ok,)
|
|
||||||
accounts_failed = Account.objects.filter(asset_id__in=asset_ids_faile)
|
|
||||||
|
|
||||||
Account.bulk_set_connectivity(accounts_ok, Connectivity.ok)
|
|
||||||
Account.bulk_set_connectivity(accounts_failed, Connectivity.failed)
|
|
||||||
|
|
||||||
|
|
||||||
@org_aware_func('assets')
|
@org_aware_func('assets')
|
||||||
def test_asset_connectivity_util(assets, task_name=None):
|
def test_asset_connectivity_util(assets, task_name=None):
|
||||||
from ops.utils import update_or_create_ansible_task
|
from assets.models import PingAutomation
|
||||||
|
|
||||||
if task_name is None:
|
if task_name is None:
|
||||||
task_name = gettext_noop("Test assets connectivity. ")
|
task_name = gettext_noop("Test assets connectivity. ")
|
||||||
|
|
||||||
hosts = clean_ansible_task_hosts(assets)
|
task_name = PingAutomation.generate_unique_name(task_name)
|
||||||
if not hosts:
|
data = {
|
||||||
return {}
|
'name': task_name,
|
||||||
platform_hosts_map = {}
|
'comment': ', '.join([str(i) for i in assets])
|
||||||
hosts_sorted = sorted(hosts, key=group_asset_by_platform)
|
|
||||||
platform_hosts = groupby(hosts_sorted, key=group_asset_by_platform)
|
|
||||||
for i in platform_hosts:
|
|
||||||
platform_hosts_map[i[0]] = list(i[1])
|
|
||||||
|
|
||||||
platform_tasks_map = {
|
|
||||||
"unixlike": const.PING_UNIXLIKE_TASKS,
|
|
||||||
"windows": const.PING_WINDOWS_TASKS
|
|
||||||
}
|
}
|
||||||
results_summary = dict(
|
instance = PingAutomation.objects.create(**data)
|
||||||
contacted=defaultdict(dict), dark=defaultdict(dict), success=True
|
instance.assets.add(*assets)
|
||||||
)
|
instance.execute()
|
||||||
for platform, _hosts in platform_hosts_map.items():
|
|
||||||
if not _hosts:
|
|
||||||
continue
|
|
||||||
logger.debug("System user not has special auth")
|
|
||||||
tasks = platform_tasks_map.get(platform)
|
|
||||||
task, created = update_or_create_ansible_task(
|
|
||||||
task_name=task_name, hosts=_hosts, tasks=tasks,
|
|
||||||
pattern='all', options=const.TASK_OPTIONS, run_as_admin=True,
|
|
||||||
)
|
|
||||||
raw, summary = task.run()
|
|
||||||
success = summary.get('success', False)
|
|
||||||
contacted = summary.get('contacted', {})
|
|
||||||
dark = summary.get('dark', {})
|
|
||||||
|
|
||||||
results_summary['success'] &= success
|
|
||||||
results_summary['contacted'].update(contacted)
|
|
||||||
results_summary['dark'].update(dark)
|
|
||||||
continue
|
|
||||||
set_assets_accounts_connectivity(assets, results_summary)
|
|
||||||
return results_summary
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(queue="ansible")
|
|
||||||
def test_asset_connectivity_manual(asset_id):
|
|
||||||
asset = Asset.objects.filter(id=asset_id).first()
|
|
||||||
if not asset:
|
|
||||||
return
|
|
||||||
task_name = gettext_noop("Test assets connectivity: ") + str(asset)
|
|
||||||
summary = test_asset_connectivity_util([asset], task_name=task_name)
|
|
||||||
|
|
||||||
if summary.get('dark'):
|
|
||||||
return False, summary['dark']
|
|
||||||
else:
|
|
||||||
return True, ""
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(queue="ansible")
|
@shared_task(queue="ansible")
|
||||||
def test_assets_connectivity_manual(asset_ids):
|
def test_assets_connectivity_manual(asset_ids):
|
||||||
|
from assets.models import Asset
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
assets = Asset.objects.filter(id__in=asset_ids)
|
assets = Asset.objects.filter(id__in=asset_ids)
|
||||||
task_name = gettext_noop("Test assets connectivity: ") + str([asset.name for asset in assets])
|
|
||||||
summary = test_asset_connectivity_util(assets, task_name=task_name)
|
|
||||||
|
|
||||||
if summary.get('dark'):
|
task_name = gettext_noop("Test assets connectivity: ")
|
||||||
return False, summary['dark']
|
test_asset_connectivity_util(assets, task_name=task_name)
|
||||||
else:
|
|
||||||
return True, ""
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(queue="ansible")
|
@shared_task(queue="ansible")
|
||||||
def test_node_assets_connectivity_manual(node_id):
|
def test_node_assets_connectivity_manual(node_id):
|
||||||
|
from assets.models import Node
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
node = Node.objects.get(id=node_id)
|
node = Node.objects.get(id=node_id)
|
||||||
|
|
||||||
task_name = gettext_noop("Test if the assets under the node are connectable: ") + node.name
|
task_name = gettext_noop("Test if the assets under the node are connectable: ")
|
||||||
assets = node.get_all_assets()
|
assets = node.get_all_assets()
|
||||||
result = test_asset_connectivity_util(assets, task_name=task_name)
|
test_asset_connectivity_util(assets, task_name=task_name)
|
||||||
return result
|
|
||||||
|
|
|
@ -1,149 +1,50 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
import json
|
|
||||||
import re
|
|
||||||
|
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from django.utils.translation import ugettext as _, gettext_noop
|
from django.utils.translation import gettext_noop
|
||||||
|
|
||||||
from common.utils import (
|
from common.utils import get_logger
|
||||||
capacity_convert, sum_capacity, get_logger
|
|
||||||
)
|
|
||||||
from orgs.utils import org_aware_func, tmp_to_root_org
|
from orgs.utils import org_aware_func, tmp_to_root_org
|
||||||
from . import const
|
|
||||||
from ..models import Asset, Node
|
|
||||||
from .utils import clean_ansible_task_hosts
|
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
disk_pattern = re.compile(r'^hd|sd|xvd|vd|nv')
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'update_assets_hardware_info_util', 'update_asset_hardware_info_manual',
|
'update_assets_hardware_info_util',
|
||||||
'update_assets_hardware_info_period', 'update_node_assets_hardware_info_manual',
|
'update_node_assets_hardware_info_manual',
|
||||||
'update_assets_hardware_info_manual',
|
'update_assets_hardware_info_manual',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def set_assets_hardware_info(assets, result, **kwargs):
|
|
||||||
"""
|
|
||||||
Using ops task run result, to update asset info
|
|
||||||
|
|
||||||
be a celery task also
|
|
||||||
:param assets:
|
|
||||||
:param result:
|
|
||||||
:param kwargs: {task_name: ""}
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
result_raw = result[0]
|
|
||||||
assets_updated = []
|
|
||||||
success_result = result_raw.get('ok', {})
|
|
||||||
|
|
||||||
for asset in assets:
|
|
||||||
hostname = asset.name
|
|
||||||
info = success_result.get(hostname, {})
|
|
||||||
info = info.get('setup', {}).get('ansible_facts', {})
|
|
||||||
if not info:
|
|
||||||
logger.error(_("Get asset info failed: {}").format(hostname))
|
|
||||||
continue
|
|
||||||
___vendor = info.get('ansible_system_vendor', 'Unknown')
|
|
||||||
___model = info.get('ansible_product_name', 'Unknown')
|
|
||||||
___sn = info.get('ansible_product_serial', 'Unknown')
|
|
||||||
|
|
||||||
for ___cpu_model in info.get('ansible_processor', []):
|
|
||||||
if ___cpu_model.endswith('GHz') or ___cpu_model.startswith("Intel"):
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
___cpu_model = 'Unknown'
|
|
||||||
___cpu_model = ___cpu_model[:48]
|
|
||||||
___cpu_count = info.get('ansible_processor_count', 0)
|
|
||||||
___cpu_cores = info.get('ansible_processor_cores', None) or \
|
|
||||||
len(info.get('ansible_processor', []))
|
|
||||||
___cpu_vcpus = info.get('ansible_processor_vcpus', 0)
|
|
||||||
___memory = '%s %s' % capacity_convert(
|
|
||||||
'{} MB'.format(info.get('ansible_memtotal_mb'))
|
|
||||||
)
|
|
||||||
disk_info = {}
|
|
||||||
for dev, dev_info in info.get('ansible_devices', {}).items():
|
|
||||||
if disk_pattern.match(dev) and dev_info['removable'] == '0':
|
|
||||||
disk_info[dev] = dev_info['size']
|
|
||||||
___disk_total = '%.1f %s' % sum_capacity(disk_info.values())
|
|
||||||
___disk_info = json.dumps(disk_info)
|
|
||||||
|
|
||||||
# ___platform = info.get('ansible_system', 'Unknown')
|
|
||||||
___os = info.get('ansible_distribution', 'Unknown')
|
|
||||||
___os_version = info.get('ansible_distribution_version', 'Unknown')
|
|
||||||
___os_arch = info.get('ansible_architecture', 'Unknown')
|
|
||||||
___hostname_raw = info.get('ansible_hostname', 'Unknown')
|
|
||||||
|
|
||||||
for k, v in locals().items():
|
|
||||||
if k.startswith('___'):
|
|
||||||
setattr(asset, k.strip('_'), v)
|
|
||||||
asset.save()
|
|
||||||
assets_updated.append(asset)
|
|
||||||
return assets_updated
|
|
||||||
|
|
||||||
|
|
||||||
@org_aware_func('assets')
|
@org_aware_func('assets')
|
||||||
def update_assets_hardware_info_util(assets, task_name=None):
|
def update_assets_hardware_info_util(assets, task_name=None):
|
||||||
"""
|
from assets.models import GatherFactsAutomation
|
||||||
Using ansible api to update asset hardware info
|
|
||||||
:param asset_ids: asset seq
|
|
||||||
:param task_name: task_name running
|
|
||||||
:return: result summary ['contacted': {}, 'dark': {}]
|
|
||||||
"""
|
|
||||||
|
|
||||||
from ops.utils import update_or_create_ansible_task
|
|
||||||
if task_name is None:
|
if task_name is None:
|
||||||
task_name = gettext_noop("Update some assets hardware info. ")
|
task_name = gettext_noop("Update some assets hardware info. ")
|
||||||
tasks = const.UPDATE_ASSETS_HARDWARE_TASKS
|
|
||||||
hosts = clean_ansible_task_hosts(assets)
|
|
||||||
if not hosts:
|
|
||||||
return {}
|
|
||||||
task, created = update_or_create_ansible_task(
|
|
||||||
task_name, hosts=hosts, tasks=tasks,
|
|
||||||
pattern='all', options=const.TASK_OPTIONS,
|
|
||||||
run_as_admin=True,
|
|
||||||
)
|
|
||||||
result = task.run()
|
|
||||||
set_assets_hardware_info(assets, result)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
task_name = GatherFactsAutomation.generate_unique_name(task_name)
|
||||||
@shared_task(queue="ansible")
|
data = {
|
||||||
def update_asset_hardware_info_manual(asset_id):
|
'name': task_name,
|
||||||
with tmp_to_root_org():
|
'comment': ', '.join([str(i) for i in assets])
|
||||||
asset = Asset.objects.filter(id=asset_id).first()
|
}
|
||||||
if not asset:
|
instance = GatherFactsAutomation.objects.create(**data)
|
||||||
return
|
instance.assets.add(*assets)
|
||||||
task_name = gettext_noop("Update asset hardware info: ") + str(asset.name)
|
instance.execute()
|
||||||
update_assets_hardware_info_util([asset], task_name=task_name)
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(queue="ansible")
|
@shared_task(queue="ansible")
|
||||||
def update_assets_hardware_info_manual(asset_ids):
|
def update_assets_hardware_info_manual(asset_ids):
|
||||||
task_name = gettext_noop("Update assets hardware info: ") + str([asset.name for asset in assets])
|
from assets.models import Asset
|
||||||
update_assets_hardware_info_util(asset_ids, task_name=task_name)
|
with tmp_to_root_org():
|
||||||
|
assets = Asset.objects.filter(id__in=asset_ids)
|
||||||
|
task_name = gettext_noop("Update assets hardware info: ")
|
||||||
@shared_task(queue="ansible")
|
update_assets_hardware_info_util(assets, task_name=task_name)
|
||||||
def update_assets_hardware_info_period():
|
|
||||||
"""
|
|
||||||
Update asset hardware period task
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
if not const.PERIOD_TASK_ENABLED:
|
|
||||||
logger.debug("Period task disabled, update assets hardware info pass")
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(queue="ansible")
|
@shared_task(queue="ansible")
|
||||||
def update_node_assets_hardware_info_manual(node_id):
|
def update_node_assets_hardware_info_manual(node_id):
|
||||||
|
from assets.models import Node
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
node = Node.objects.filter(id=node_id).first()
|
node = Node.objects.get(id=node_id)
|
||||||
if not node:
|
|
||||||
return
|
|
||||||
|
|
||||||
task_name = gettext_noop("Update node asset hardware information: ") + str(node.name)
|
task_name = gettext_noop("Update node asset hardware information: ")
|
||||||
assets = node.get_all_assets()
|
assets = node.get_all_assets()
|
||||||
result = update_assets_hardware_info_util(assets, task_name=task_name)
|
update_assets_hardware_info_util(assets, task_name=task_name)
|
||||||
return result
|
|
||||||
|
|
Loading…
Reference in New Issue