mirror of https://github.com/jumpserver/jumpserver
parent
956367cfed
commit
697b3fb860
|
@ -10,7 +10,7 @@ from common.permissions import UserConfirmation
|
||||||
from authentication.const import ConfirmType
|
from authentication.const import ConfirmType
|
||||||
from assets.models import Account
|
from assets.models import Account
|
||||||
from assets.filters import AccountFilterSet
|
from assets.filters import AccountFilterSet
|
||||||
from assets.tasks.account_connectivity import test_accounts_connectivity_manual
|
from assets.tasks import verify_accounts_connectivity
|
||||||
from assets import serializers
|
from assets import serializers
|
||||||
|
|
||||||
__all__ = ['AccountViewSet', 'AccountSecretsViewSet', 'AccountTaskCreateAPI', 'AccountHistoriesSecretAPI']
|
__all__ = ['AccountViewSet', 'AccountSecretsViewSet', 'AccountTaskCreateAPI', 'AccountHistoriesSecretAPI']
|
||||||
|
@ -32,7 +32,9 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||||
@action(methods=['post'], detail=True, url_path='verify')
|
@action(methods=['post'], detail=True, url_path='verify')
|
||||||
def verify_account(self, request, *args, **kwargs):
|
def verify_account(self, request, *args, **kwargs):
|
||||||
account = super().get_object()
|
account = super().get_object()
|
||||||
task = test_accounts_connectivity_manual.delay([account.id])
|
account_ids = [account.id]
|
||||||
|
asset_ids = [account.asset_id]
|
||||||
|
task = verify_accounts_connectivity.delay(account_ids, asset_ids)
|
||||||
return Response(data={'task': task.id})
|
return Response(data={'task': task.id})
|
||||||
|
|
||||||
|
|
||||||
|
@ -80,8 +82,10 @@ class AccountTaskCreateAPI(CreateAPIView):
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
def perform_create(self, serializer):
|
def perform_create(self, serializer):
|
||||||
account_ids = self.get_accounts().values_list('id', flat=True)
|
accounts = self.get_accounts()
|
||||||
task = test_accounts_connectivity_manual.delay(account_ids)
|
account_ids = accounts.values_list('id', flat=True)
|
||||||
|
asset_ids = [account.asset_id for account in accounts]
|
||||||
|
task = verify_accounts_connectivity.delay(account_ids, asset_ids)
|
||||||
data = getattr(serializer, '_data', {})
|
data = getattr(serializer, '_data', {})
|
||||||
data["task"] = task.id
|
data["task"] = task.id
|
||||||
setattr(serializer, '_data', data)
|
setattr(serializer, '_data', data)
|
||||||
|
|
|
@ -12,6 +12,8 @@ from orgs.mixins import generics
|
||||||
from assets import serializers
|
from assets import serializers
|
||||||
from assets.models import Asset, Gateway
|
from assets.models import Asset, Gateway
|
||||||
from assets.tasks import (
|
from assets.tasks import (
|
||||||
|
push_accounts_to_assets,
|
||||||
|
verify_accounts_connectivity,
|
||||||
test_assets_connectivity_manual,
|
test_assets_connectivity_manual,
|
||||||
update_assets_hardware_info_manual,
|
update_assets_hardware_info_manual,
|
||||||
)
|
)
|
||||||
|
@ -110,9 +112,9 @@ class AssetTaskCreateApi(AssetsTaskMixin, generics.CreateAPIView):
|
||||||
action = request.data.get('action')
|
action = request.data.get('action')
|
||||||
action_perm_require = {
|
action_perm_require = {
|
||||||
'refresh': 'assets.refresh_assethardwareinfo',
|
'refresh': 'assets.refresh_assethardwareinfo',
|
||||||
'push_system_user': 'assets.push_assetsystemuser',
|
'push_account': 'assets.push_assetsystemuser',
|
||||||
'test': 'assets.test_assetconnectivity',
|
'test': 'assets.test_assetconnectivity',
|
||||||
'test_system_user': 'assets.test_assetconnectivity'
|
'test_account': 'assets.test_assetconnectivity'
|
||||||
}
|
}
|
||||||
perm_required = action_perm_require.get(action)
|
perm_required = action_perm_require.get(action)
|
||||||
has = self.request.user.has_perm(perm_required)
|
has = self.request.user.has_perm(perm_required)
|
||||||
|
@ -120,20 +122,23 @@ class AssetTaskCreateApi(AssetsTaskMixin, generics.CreateAPIView):
|
||||||
if not has:
|
if not has:
|
||||||
self.permission_denied(request)
|
self.permission_denied(request)
|
||||||
|
|
||||||
def perform_asset_task(self, serializer):
|
@staticmethod
|
||||||
|
def perform_asset_task(serializer):
|
||||||
data = serializer.validated_data
|
data = serializer.validated_data
|
||||||
action = data['action']
|
if data['action'] not in ['push_system_user', 'test_system_user']:
|
||||||
if action not in ['push_system_user', 'test_system_user']:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
asset = data['asset']
|
asset = data['asset']
|
||||||
system_users = data.get('system_users')
|
accounts = data.get('accounts')
|
||||||
if not system_users:
|
if not accounts:
|
||||||
system_users = asset.get_all_system_users()
|
accounts = asset.accounts.all()
|
||||||
if action == 'push_system_user':
|
|
||||||
task = push_system_users_a_asset.delay(system_users, asset=asset)
|
asset_ids = [asset.id]
|
||||||
elif action == 'test_system_user':
|
account_ids = accounts.values_list('id', flat=True)
|
||||||
task = test_system_users_connectivity_a_asset.delay(system_users, asset=asset)
|
if action == 'push_account':
|
||||||
|
task = push_accounts_to_assets.delay(account_ids, asset_ids)
|
||||||
|
elif action == 'test_account':
|
||||||
|
task = verify_accounts_connectivity.delay(account_ids, asset_ids)
|
||||||
else:
|
else:
|
||||||
task = None
|
task = None
|
||||||
return task
|
return task
|
||||||
|
|
|
@ -38,4 +38,13 @@ class Migration(migrations.Migration):
|
||||||
},
|
},
|
||||||
bases=('assets.baseautomation',),
|
bases=('assets.baseautomation',),
|
||||||
),
|
),
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name='asset',
|
||||||
|
options={'ordering': ['name'],
|
||||||
|
'permissions': [('refresh_assethardwareinfo', 'Can refresh asset hardware info'),
|
||||||
|
('test_assetconnectivity', 'Can test asset connectivity'),
|
||||||
|
('push_assetaccount', 'Can push account to asset'),
|
||||||
|
('match_asset', 'Can match asset'), ('add_assettonode', 'Add asset to node'),
|
||||||
|
('move_assettonode', 'Move asset to node')], 'verbose_name': 'Asset'},
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -229,7 +229,7 @@ class Asset(NodesRelationMixin, AbsConnectivity, JMSOrgBaseModel):
|
||||||
permissions = [
|
permissions = [
|
||||||
('refresh_assethardwareinfo', _('Can refresh asset hardware info')),
|
('refresh_assethardwareinfo', _('Can refresh asset hardware info')),
|
||||||
('test_assetconnectivity', _('Can test asset connectivity')),
|
('test_assetconnectivity', _('Can test asset connectivity')),
|
||||||
('push_assetsystemuser', _('Can push system user to asset')),
|
('push_assetaccount', _('Can push account to asset')),
|
||||||
('match_asset', _('Can match asset')),
|
('match_asset', _('Can match asset')),
|
||||||
('add_assettonode', _('Add asset to node')),
|
('add_assettonode', _('Add asset to node')),
|
||||||
('move_assettonode', _('Move asset to node')),
|
('move_assettonode', _('Move asset to node')),
|
||||||
|
|
|
@ -206,3 +206,6 @@ class AssetTaskSerializer(AssetsTaskSerializer):
|
||||||
asset = serializers.PrimaryKeyRelatedField(
|
asset = serializers.PrimaryKeyRelatedField(
|
||||||
queryset=Asset.objects, required=False, allow_empty=True, many=False
|
queryset=Asset.objects, required=False, allow_empty=True, many=False
|
||||||
)
|
)
|
||||||
|
accounts = serializers.PrimaryKeyRelatedField(
|
||||||
|
queryset=Account.objects, required=False, allow_empty=True, many=True
|
||||||
|
)
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
|
from .ping import *
|
||||||
from .utils import *
|
from .utils import *
|
||||||
from .common import *
|
from .common import *
|
||||||
from .backup import *
|
from .backup import *
|
||||||
from .automation import *
|
from .automation import *
|
||||||
|
from .gather_facts import *
|
||||||
from .nodes_amount import *
|
from .nodes_amount import *
|
||||||
from .gather_asset_users import *
|
from .push_account import *
|
||||||
from .asset_connectivity import *
|
from .verify_account import *
|
||||||
from .account_connectivity import *
|
|
||||||
from .gather_asset_hardware_info import *
|
|
||||||
|
|
|
@ -1,110 +0,0 @@
|
||||||
# ~*~ coding: utf-8 ~*~
|
|
||||||
|
|
||||||
from celery import shared_task
|
|
||||||
from django.utils.translation import ugettext as _, gettext_noop
|
|
||||||
|
|
||||||
from common.utils import get_logger
|
|
||||||
from orgs.utils import org_aware_func
|
|
||||||
from ..models import Connectivity, Account
|
|
||||||
from . import const
|
|
||||||
from .utils import check_asset_can_run_ansible
|
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'test_account_connectivity_util', 'test_accounts_connectivity_manual',
|
|
||||||
'get_test_account_connectivity_tasks', 'test_user_connectivity',
|
|
||||||
'run_adhoc',
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def get_test_account_connectivity_tasks(asset):
|
|
||||||
if asset.is_unixlike():
|
|
||||||
tasks = const.PING_UNIXLIKE_TASKS
|
|
||||||
elif asset.is_windows():
|
|
||||||
tasks = const.PING_WINDOWS_TASKS
|
|
||||||
else:
|
|
||||||
msg = _(
|
|
||||||
"The asset {} system platform {} does not "
|
|
||||||
"support run Ansible tasks".format(asset.name, asset.platform)
|
|
||||||
)
|
|
||||||
logger.info(msg)
|
|
||||||
tasks = []
|
|
||||||
return tasks
|
|
||||||
|
|
||||||
|
|
||||||
def run_adhoc(task_name, tasks, inventory):
|
|
||||||
"""
|
|
||||||
:param task_name
|
|
||||||
:param tasks
|
|
||||||
:param inventory
|
|
||||||
"""
|
|
||||||
from ops.ansible.runner import AdHocRunner
|
|
||||||
runner = AdHocRunner(inventory, options=const.TASK_OPTIONS)
|
|
||||||
result = runner.run(tasks, 'all', task_name)
|
|
||||||
return result.results_raw, result.results_summary
|
|
||||||
|
|
||||||
|
|
||||||
def test_user_connectivity(task_name, asset, username, password=None, private_key=None):
|
|
||||||
"""
|
|
||||||
:param task_name
|
|
||||||
:param asset
|
|
||||||
:param username
|
|
||||||
:param password
|
|
||||||
:param private_key
|
|
||||||
"""
|
|
||||||
from ops.inventory import JMSCustomInventory
|
|
||||||
|
|
||||||
tasks = get_test_account_connectivity_tasks(asset)
|
|
||||||
if not tasks:
|
|
||||||
logger.debug("No tasks ")
|
|
||||||
return {}, {}
|
|
||||||
inventory = JMSCustomInventory(
|
|
||||||
assets=[asset], username=username, password=password,
|
|
||||||
private_key=private_key
|
|
||||||
)
|
|
||||||
raw, summary = run_adhoc(
|
|
||||||
task_name=task_name, tasks=tasks, inventory=inventory
|
|
||||||
)
|
|
||||||
return raw, summary
|
|
||||||
|
|
||||||
|
|
||||||
@org_aware_func("account")
|
|
||||||
def test_account_connectivity_util(account, task_name):
|
|
||||||
"""
|
|
||||||
:param account: <Account>对象
|
|
||||||
:param task_name:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
if not check_asset_can_run_ansible(account.asset):
|
|
||||||
return
|
|
||||||
|
|
||||||
account.load_auth()
|
|
||||||
try:
|
|
||||||
raw, summary = test_user_connectivity(
|
|
||||||
task_name=task_name, asset=account.asset,
|
|
||||||
username=account.username, password=account.password,
|
|
||||||
private_key=account.private_key_file
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warn("Failed run adhoc {}, {}".format(task_name, e))
|
|
||||||
return
|
|
||||||
|
|
||||||
if summary.get('success'):
|
|
||||||
account.set_connectivity(Connectivity.ok)
|
|
||||||
else:
|
|
||||||
account.set_connectivity(Connectivity.failed)
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(queue="ansible")
|
|
||||||
def test_accounts_connectivity_manual(account_ids):
|
|
||||||
"""
|
|
||||||
:param accounts: <Account>对象
|
|
||||||
"""
|
|
||||||
accounts = Account.objects.filter(id__in=account_ids)
|
|
||||||
for account in accounts:
|
|
||||||
task_name = gettext_noop("Test account connectivity: ") + str(account)
|
|
||||||
test_account_connectivity_util(account, task_name)
|
|
||||||
print(".\n")
|
|
|
@ -6,7 +6,7 @@ from common.utils import get_logger, get_object_or_none
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task(queue='ansible')
|
||||||
def execute_automation(pid, trigger, mode):
|
def execute_automation(pid, trigger, mode):
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
instance = get_object_or_none(mode, pk=pid)
|
instance = get_object_or_none(mode, pk=pid)
|
||||||
|
|
|
@ -1,150 +0,0 @@
|
||||||
# ~*~ coding: utf-8 ~*~
|
|
||||||
|
|
||||||
import re
|
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
from celery import shared_task
|
|
||||||
from django.utils.translation import gettext_noop
|
|
||||||
from django.utils import timezone
|
|
||||||
|
|
||||||
from orgs.utils import tmp_to_org, org_aware_func, tmp_to_root_org
|
|
||||||
from common.utils import get_logger
|
|
||||||
from ..models import GatheredUser, Node
|
|
||||||
from .utils import clean_ansible_task_hosts
|
|
||||||
from . import const
|
|
||||||
|
|
||||||
__all__ = ['gather_asset_users', 'gather_nodes_asset_users']
|
|
||||||
logger = get_logger(__name__)
|
|
||||||
space = re.compile('\s+')
|
|
||||||
ignore_login_shell = re.compile(r'nologin$|sync$|shutdown$|halt$')
|
|
||||||
|
|
||||||
|
|
||||||
def parse_linux_result_to_users(result):
|
|
||||||
users = defaultdict(dict)
|
|
||||||
users_result = result.get('gather host users', {})\
|
|
||||||
.get('ansible_facts', {})\
|
|
||||||
.get('getent_passwd')
|
|
||||||
if not isinstance(users_result, dict):
|
|
||||||
users_result = {}
|
|
||||||
for username, attr in users_result.items():
|
|
||||||
if ignore_login_shell.search(attr[-1]):
|
|
||||||
continue
|
|
||||||
users[username] = {}
|
|
||||||
last_login_result = result.get('get last login', {}).get('stdout_lines', [])
|
|
||||||
for line in last_login_result:
|
|
||||||
data = line.split('@')
|
|
||||||
if len(data) != 3:
|
|
||||||
continue
|
|
||||||
username, ip, dt = data
|
|
||||||
dt += ' +0800'
|
|
||||||
date = timezone.datetime.strptime(dt, '%b %d %H:%M:%S %Y %z')
|
|
||||||
users[username] = {"ip": ip, "date": date}
|
|
||||||
return users
|
|
||||||
|
|
||||||
|
|
||||||
def parse_windows_result_to_users(result):
|
|
||||||
task_result = []
|
|
||||||
for task_name, raw in result.items():
|
|
||||||
res = raw.get('stdout_lines', {})
|
|
||||||
if res:
|
|
||||||
task_result = res
|
|
||||||
break
|
|
||||||
if not task_result:
|
|
||||||
return []
|
|
||||||
|
|
||||||
users = {}
|
|
||||||
|
|
||||||
for i in range(4):
|
|
||||||
task_result.pop(0)
|
|
||||||
for i in range(2):
|
|
||||||
task_result.pop()
|
|
||||||
|
|
||||||
for line in task_result:
|
|
||||||
username_list = space.split(line)
|
|
||||||
# such as: ['Admini', 'appadm', 'DefaultAccount', '']
|
|
||||||
for username in username_list:
|
|
||||||
if not username:
|
|
||||||
continue
|
|
||||||
users[username] = {}
|
|
||||||
return users
|
|
||||||
|
|
||||||
|
|
||||||
def add_asset_users(assets, results):
|
|
||||||
assets_map = {a.name: a for a in assets}
|
|
||||||
parser_map = {
|
|
||||||
'linux': parse_linux_result_to_users,
|
|
||||||
'windows': parse_windows_result_to_users
|
|
||||||
}
|
|
||||||
|
|
||||||
assets_users_map = {}
|
|
||||||
|
|
||||||
for platform, platform_results in results.items():
|
|
||||||
for hostname, res in platform_results.items():
|
|
||||||
parse = parser_map.get(platform)
|
|
||||||
users = parse(res)
|
|
||||||
logger.debug('Gathered host users: {} {}'.format(hostname, users))
|
|
||||||
asset = assets_map.get(hostname)
|
|
||||||
if not asset:
|
|
||||||
continue
|
|
||||||
assets_users_map[asset] = users
|
|
||||||
|
|
||||||
for asset, users in assets_users_map.items():
|
|
||||||
with tmp_to_org(asset.org_id):
|
|
||||||
GatheredUser.objects.filter(asset=asset, present=True)\
|
|
||||||
.update(present=False)
|
|
||||||
for username, data in users.items():
|
|
||||||
defaults = {'asset': asset, 'username': username, 'present': True}
|
|
||||||
if data.get("ip"):
|
|
||||||
defaults["ip_last_login"] = data["address"][:32]
|
|
||||||
if data.get("date"):
|
|
||||||
defaults["date_last_login"] = data["date"]
|
|
||||||
GatheredUser.objects.update_or_create(
|
|
||||||
defaults=defaults, asset=asset, username=username,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@org_aware_func("assets")
|
|
||||||
def gather_asset_users(assets, task_name=None):
|
|
||||||
from ops.utils import update_or_create_ansible_task
|
|
||||||
if task_name is None:
|
|
||||||
task_name = gettext_noop("Gather assets users")
|
|
||||||
assets = clean_ansible_task_hosts(assets)
|
|
||||||
if not assets:
|
|
||||||
return
|
|
||||||
hosts_category = {
|
|
||||||
'linux': {
|
|
||||||
'hosts': [],
|
|
||||||
'tasks': const.GATHER_ASSET_USERS_TASKS
|
|
||||||
},
|
|
||||||
'windows': {
|
|
||||||
'hosts': [],
|
|
||||||
'tasks': const.GATHER_ASSET_USERS_TASKS_WINDOWS
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for asset in assets:
|
|
||||||
hosts_list = hosts_category['windows']['hosts'] if asset.is_windows() \
|
|
||||||
else hosts_category['linux']['hosts']
|
|
||||||
hosts_list.append(asset)
|
|
||||||
|
|
||||||
results = {'linux': defaultdict(dict), 'windows': defaultdict(dict)}
|
|
||||||
for k, value in hosts_category.items():
|
|
||||||
if not value['hosts']:
|
|
||||||
continue
|
|
||||||
_task_name = '{}: {}'.format(task_name, k)
|
|
||||||
task, created = update_or_create_ansible_task(
|
|
||||||
task_name=_task_name, hosts=value['hosts'], tasks=value['tasks'],
|
|
||||||
pattern='all', options=const.TASK_OPTIONS,
|
|
||||||
run_as_admin=True,
|
|
||||||
)
|
|
||||||
raw, summary = task.run()
|
|
||||||
results[k].update(raw['ok'])
|
|
||||||
add_asset_users(assets, results)
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task(queue="ansible")
|
|
||||||
def gather_nodes_asset_users(nodes_key):
|
|
||||||
nodes = Node.objects.filter(key__in=nodes_key)
|
|
||||||
assets = Node.get_nodes_all_assets(*nodes)
|
|
||||||
assets_groups_by_100 = [assets[i:i+100] for i in range(0, len(assets), 100)]
|
|
||||||
for _assets in assets_groups_by_100:
|
|
||||||
gather_asset_users(_assets)
|
|
|
@ -15,18 +15,28 @@ __all__ = [
|
||||||
|
|
||||||
|
|
||||||
@org_aware_func('assets')
|
@org_aware_func('assets')
|
||||||
def update_assets_hardware_info_util(assets, task_name=None):
|
def update_assets_hardware_info_util(assets=None, nodes=None, task_name=None):
|
||||||
from assets.models import GatherFactsAutomation
|
from assets.models import GatherFactsAutomation
|
||||||
|
if not assets and not nodes:
|
||||||
|
logger.info("No assets or nodes to update hardware info")
|
||||||
|
return
|
||||||
|
|
||||||
if task_name is None:
|
if task_name is None:
|
||||||
task_name = gettext_noop("Update some assets hardware info. ")
|
task_name = gettext_noop("Update some assets hardware info. ")
|
||||||
|
|
||||||
task_name = GatherFactsAutomation.generate_unique_name(task_name)
|
task_name = GatherFactsAutomation.generate_unique_name(task_name)
|
||||||
data = {
|
comment = ''
|
||||||
'name': task_name,
|
if assets:
|
||||||
'comment': ', '.join([str(i) for i in assets])
|
comment += 'asset:' + ', '.join([str(i) for i in assets]) + '\n'
|
||||||
}
|
if nodes:
|
||||||
|
comment += 'node:' + ', '.join([str(i) for i in nodes])
|
||||||
|
|
||||||
|
data = {'name': task_name, 'comment': comment}
|
||||||
instance = GatherFactsAutomation.objects.create(**data)
|
instance = GatherFactsAutomation.objects.create(**data)
|
||||||
instance.assets.add(*assets)
|
|
||||||
|
if assets:
|
||||||
|
instance.assets.add(*assets)
|
||||||
|
if nodes:
|
||||||
|
instance.nodes.add(*nodes)
|
||||||
instance.execute()
|
instance.execute()
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,7 +46,7 @@ def update_assets_hardware_info_manual(asset_ids):
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
assets = Asset.objects.filter(id__in=asset_ids)
|
assets = Asset.objects.filter(id__in=asset_ids)
|
||||||
task_name = gettext_noop("Update assets hardware info: ")
|
task_name = gettext_noop("Update assets hardware info: ")
|
||||||
update_assets_hardware_info_util(assets, task_name=task_name)
|
update_assets_hardware_info_util(assets=assets, task_name=task_name)
|
||||||
|
|
||||||
|
|
||||||
@shared_task(queue="ansible")
|
@shared_task(queue="ansible")
|
||||||
|
@ -46,5 +56,4 @@ def update_node_assets_hardware_info_manual(node_id):
|
||||||
node = Node.objects.get(id=node_id)
|
node = Node.objects.get(id=node_id)
|
||||||
|
|
||||||
task_name = gettext_noop("Update node asset hardware information: ")
|
task_name = gettext_noop("Update node asset hardware information: ")
|
||||||
assets = node.get_all_assets()
|
update_assets_hardware_info_util(nodes=[node], task_name=task_name)
|
||||||
update_assets_hardware_info_util(assets, task_name=task_name)
|
|
|
@ -17,7 +17,7 @@ __all__ = [
|
||||||
def test_asset_connectivity_util(assets, task_name=None):
|
def test_asset_connectivity_util(assets, task_name=None):
|
||||||
from assets.models import PingAutomation
|
from assets.models import PingAutomation
|
||||||
if task_name is None:
|
if task_name is None:
|
||||||
task_name = gettext_noop("Test assets connectivity. ")
|
task_name = gettext_noop("Test assets connectivity ")
|
||||||
|
|
||||||
task_name = PingAutomation.generate_unique_name(task_name)
|
task_name = PingAutomation.generate_unique_name(task_name)
|
||||||
data = {
|
data = {
|
||||||
|
@ -35,7 +35,7 @@ def test_assets_connectivity_manual(asset_ids):
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
assets = Asset.objects.filter(id__in=asset_ids)
|
assets = Asset.objects.filter(id__in=asset_ids)
|
||||||
|
|
||||||
task_name = gettext_noop("Test assets connectivity: ")
|
task_name = gettext_noop("Test assets connectivity ")
|
||||||
test_asset_connectivity_util(assets, task_name=task_name)
|
test_asset_connectivity_util(assets, task_name=task_name)
|
||||||
|
|
||||||
|
|
||||||
|
@ -45,6 +45,6 @@ def test_node_assets_connectivity_manual(node_id):
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
node = Node.objects.get(id=node_id)
|
node = Node.objects.get(id=node_id)
|
||||||
|
|
||||||
task_name = gettext_noop("Test if the assets under the node are connectable: ")
|
task_name = gettext_noop("Test if the assets under the node are connectable ")
|
||||||
assets = node.get_all_assets()
|
assets = node.get_all_assets()
|
||||||
test_asset_connectivity_util(assets, task_name=task_name)
|
test_asset_connectivity_util(assets, task_name=task_name)
|
|
@ -0,0 +1,37 @@
|
||||||
|
from celery import shared_task
|
||||||
|
from django.utils.translation import gettext_noop
|
||||||
|
|
||||||
|
from common.utils import get_logger
|
||||||
|
from orgs.utils import org_aware_func, tmp_to_root_org
|
||||||
|
|
||||||
|
logger = get_logger(__file__)
|
||||||
|
__all__ = [
|
||||||
|
'push_accounts_to_assets',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@org_aware_func("assets")
|
||||||
|
def push_accounts_to_assets_util(accounts, assets, task_name):
|
||||||
|
from assets.models import PushAccountAutomation
|
||||||
|
task_name = PushAccountAutomation.generate_unique_name(task_name)
|
||||||
|
account_usernames = list(accounts.values_list('username', flat=True))
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'name': task_name,
|
||||||
|
'accounts': account_usernames,
|
||||||
|
'comment': ', '.join([str(i) for i in assets])
|
||||||
|
}
|
||||||
|
instance = PushAccountAutomation.objects.create(**data)
|
||||||
|
instance.assets.add(*assets)
|
||||||
|
instance.execute()
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(queue="ansible")
|
||||||
|
def push_accounts_to_assets(account_ids, asset_ids):
|
||||||
|
from assets.models import Asset, Account
|
||||||
|
with tmp_to_root_org():
|
||||||
|
assets = Asset.objects.get(id=asset_ids)
|
||||||
|
accounts = Account.objects.get(id=account_ids)
|
||||||
|
|
||||||
|
task_name = gettext_noop("Push accounts to assets")
|
||||||
|
return push_accounts_to_assets_util(accounts, assets, task_name)
|
|
@ -0,0 +1,37 @@
|
||||||
|
from celery import shared_task
|
||||||
|
from django.utils.translation import gettext_noop
|
||||||
|
|
||||||
|
from common.utils import get_logger
|
||||||
|
from orgs.utils import org_aware_func, tmp_to_root_org
|
||||||
|
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
__all__ = [
|
||||||
|
'verify_accounts_connectivity'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@org_aware_func("assets")
|
||||||
|
def verify_accounts_connectivity_util(accounts, assets, task_name):
|
||||||
|
from assets.models import VerifyAccountAutomation
|
||||||
|
task_name = VerifyAccountAutomation.generate_unique_name(task_name)
|
||||||
|
account_usernames = list(accounts.values_list('username', flat=True))
|
||||||
|
|
||||||
|
data = {
|
||||||
|
'name': task_name,
|
||||||
|
'accounts': account_usernames,
|
||||||
|
'comment': ', '.join([str(i) for i in assets])
|
||||||
|
}
|
||||||
|
instance = VerifyAccountAutomation.objects.create(**data)
|
||||||
|
instance.assets.add(*assets)
|
||||||
|
instance.execute()
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(queue="ansible")
|
||||||
|
def verify_accounts_connectivity(account_ids, asset_ids):
|
||||||
|
from assets.models import Asset, Account
|
||||||
|
with tmp_to_root_org():
|
||||||
|
assets = Asset.objects.get(id=asset_ids)
|
||||||
|
accounts = Account.objects.get(id=account_ids)
|
||||||
|
|
||||||
|
task_name = gettext_noop("Verify accounts connectivity")
|
||||||
|
return verify_accounts_connectivity_util(accounts, assets, task_name)
|
|
@ -172,15 +172,3 @@ def hello_random():
|
||||||
def hello_callback(result):
|
def hello_callback(result):
|
||||||
print(result)
|
print(result)
|
||||||
print("Hello callback")
|
print("Hello callback")
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
|
||||||
def execute_automation_strategy(pid, trigger):
|
|
||||||
from .models import AutomationStrategy
|
|
||||||
with tmp_to_root_org():
|
|
||||||
instance = get_object_or_none(AutomationStrategy, pk=pid)
|
|
||||||
if not instance:
|
|
||||||
logger.error("No automation plan found: {}".format(pid))
|
|
||||||
return
|
|
||||||
with tmp_to_org(instance.org):
|
|
||||||
instance.execute(trigger)
|
|
||||||
|
|
Loading…
Reference in New Issue